lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
src/stages/how_to_play.rs
rdrmic/color-columns
197a7a46ba314b93401a7882e764c85c844ec967
use ggez::{ graphics::{self, Align, DrawParam, PxScale, Text, TextFragment}, mint::Point2, Context, GameResult, }; use glam::Vec2; use crate::{ blocks::Block, constants::{ BLOCK_COLOR_BLUE, BLOCK_COLOR_GREEN, BLOCK_COLOR_MAGENTA, BLOCK_COLOR_ORANGE, BLOCK_COLOR_RED, BLOCK_COLOR_YELLOW, COLOR_GREEN, COLOR_ORANGE, COLOR_YELLOW, GO_BACK_LABEL_POSITION, HOWTOPLAY_AND_ABOUT_AREA_WIDTH, HOWTOPLAY_CONTROLS_CHAR_SCALE, HOWTOPLAY_CONTROLS_LEFTSIDE_TEXT_POSITION_X, HOWTOPLAY_CONTROLS_RIGHTSIDE_TEXT_POSITION_X, HOWTOPLAY_CONTROLS_TEXT_POSITION_Y, HOWTOPLAY_HEADER_BLOCK_SIZE, HOWTOPLAY_HEADER_CONTROLS_POSITION_Y, HOWTOPLAY_HEADER_POSITION_X, HOWTOPLAY_HEADER_SCORING_RULES_POSITION_Y, HOWTOPLAY_SCORING_RULES_CHAR_SCALE, HOWTOPLAY_SCORING_RULES_TEXT_POSITION, }, input::Event, resources::Resources, }; use super::{Stage, StageTrait}; pub struct HowToPlay { blocks_header_controls: [Block; 3], blocks_header_scoring_rules: [Block; 3], go_back_instruction: Text, controls_leftside: Text, controls_rightside: Text, scoring_rules: Text, } impl HowToPlay { pub fn new(resources: &Resources) -> Self { let font = resources.get_fonts().semi_bold; let controls_leftside_str = "\n\ Right:\n\ Left:\n\ Shuffle up:\n\ Shuffle down:\n\ Drop: "; let mut controls_leftside = Text::new(TextFragment { text: controls_leftside_str.to_string(), color: Some(COLOR_ORANGE), font: Some(font), scale: Some(PxScale::from(HOWTOPLAY_CONTROLS_CHAR_SCALE)), }); controls_leftside.set_bounds( Vec2::new(HOWTOPLAY_AND_ABOUT_AREA_WIDTH, f32::INFINITY), Align::Left, ); let controls_rightside_str = "\n\ RIGHT / D\n\ LEFT / A\n\ UP / W\n\ DOWN / S\n\ SPACE "; let mut controls_rightside = Text::new(TextFragment { text: controls_rightside_str.to_string(), color: Some(COLOR_YELLOW), font: Some(font), scale: Some(PxScale::from(HOWTOPLAY_CONTROLS_CHAR_SCALE)), }); controls_rightside.set_bounds( Vec2::new(HOWTOPLAY_AND_ABOUT_AREA_WIDTH, f32::INFINITY), Align::Left, ); let scoring_rules_str = "\n\ Points are gained by matching\n\ same-colored blocks in all 4\n\ directions.\n\n\ The more matched blocks in a\n\ line - the more points gained.\n\n\ Also, the points are\n\ multiplicated by the number of\n\ sequential cascading matchings. "; let mut scoring_rules = Text::new(TextFragment { text: scoring_rules_str.to_string(), color: Some(COLOR_GREEN), font: Some(font), scale: Some(PxScale::from(HOWTOPLAY_SCORING_RULES_CHAR_SCALE)), }); scoring_rules.set_bounds( Vec2::new(HOWTOPLAY_AND_ABOUT_AREA_WIDTH, f32::INFINITY), Align::Left, ); Self { blocks_header_controls: Self::create_header_for_controls(), blocks_header_scoring_rules: Self::create_header_for_scoring_rules(), go_back_instruction: resources .get_navigation_instructions() .get_go_back() .clone(), controls_leftside, controls_rightside, scoring_rules, } } fn create_header_for_controls() -> [Block; 3] { [ Block::new( Point2 { x: HOWTOPLAY_HEADER_POSITION_X, y: HOWTOPLAY_HEADER_CONTROLS_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_BLUE, ), Block::new( Point2 { x: HOWTOPLAY_HEADER_POSITION_X + HOWTOPLAY_HEADER_BLOCK_SIZE, y: HOWTOPLAY_HEADER_CONTROLS_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_YELLOW, ), Block::new( Point2 { x: HOWTOPLAY_HEADER_BLOCK_SIZE.mul_add(2.0, HOWTOPLAY_HEADER_POSITION_X), y: HOWTOPLAY_HEADER_CONTROLS_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_GREEN, ), ] } fn create_header_for_scoring_rules() -> [Block; 3] { [ Block::new( Point2 { x: HOWTOPLAY_HEADER_POSITION_X, y: HOWTOPLAY_HEADER_SCORING_RULES_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_RED, ), Block::new( Point2 { x: HOWTOPLAY_HEADER_POSITION_X + HOWTOPLAY_HEADER_BLOCK_SIZE, y: HOWTOPLAY_HEADER_SCORING_RULES_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_ORANGE, ), Block::new( Point2 { x: HOWTOPLAY_HEADER_BLOCK_SIZE.mul_add(2.0, HOWTOPLAY_HEADER_POSITION_X), y: HOWTOPLAY_HEADER_SCORING_RULES_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_MAGENTA, ), ] } } impl StageTrait for HowToPlay { fn update(&mut self, _ctx: &Context, input_event: Event) -> GameResult<Option<Stage>> { if let Event::Escape = input_event { return Ok(Some(Stage::MainMenu)); } Ok(Some(Stage::HowToPlay)) } fn draw(&mut self, ctx: &mut Context) -> GameResult<()> { for mut block in self.blocks_header_controls { block.draw(ctx)?; } for mut block in self.blocks_header_scoring_rules { block.draw(ctx)?; } graphics::queue_text( ctx, &self.go_back_instruction, Vec2::new(GO_BACK_LABEL_POSITION[0], GO_BACK_LABEL_POSITION[1]), None, ); graphics::queue_text( ctx, &self.controls_leftside, Vec2::new( HOWTOPLAY_CONTROLS_LEFTSIDE_TEXT_POSITION_X, HOWTOPLAY_CONTROLS_TEXT_POSITION_Y, ), None, ); graphics::queue_text( ctx, &self.controls_rightside, Vec2::new( HOWTOPLAY_CONTROLS_RIGHTSIDE_TEXT_POSITION_X, HOWTOPLAY_CONTROLS_TEXT_POSITION_Y, ), None, ); graphics::queue_text( ctx, &self.scoring_rules, Vec2::new( HOWTOPLAY_SCORING_RULES_TEXT_POSITION[0], HOWTOPLAY_SCORING_RULES_TEXT_POSITION[1], ), None, ); graphics::draw_queued_text( ctx, DrawParam::default(), None, graphics::FilterMode::Linear, )?; Ok(()) } }
use ggez::{ graphics::{self, Align, DrawParam, PxScale, Text, TextFragment}, mint::Point2, Context, GameResult, }; use glam::Vec2; use crate::{ blocks::Block, constants::{ BLOCK_COLOR_BLUE, BLOCK_COLOR_GREEN, BLOCK_COLOR_MAGENTA, BLOCK_COLOR_ORANGE, BLOCK_COLOR_RED, BLOCK_COLOR_YELLOW, COLOR_GREEN, COLOR_ORANGE, COLOR_YELLOW, GO_BACK_LABEL_POSITION, HOWTOPLAY_AND_ABOUT_AREA_WIDTH, HOWTOPLAY_CONTROLS_CHAR_SCALE, HOWTOPLAY_CONTROLS_LEFTSIDE_TEXT_POSITION_X, HOWTOPLAY_CONTROLS_RIGHTSIDE_TEXT_POSITION_X, HOWTOPLAY_CONTROLS_TEXT_POSITION_Y, HOWTOPLAY_HEADER_BLOCK_SIZE, HOWTOPLAY_HEADER_CONTROLS_POSITION_Y, HOWTOPLAY_HEADER_POSITION_X, HOWTOPLAY_HEADER_SCORING_RULES_POSITION_Y, HOWTOPLAY_SCORING_RULES_CHAR_SCALE, HOWTOPLAY_SCORING_RULES_TEXT_POSITION, }, input::Event, resources::Resources, }; use super::{Stage, StageTrait}; pub struct HowToPlay { blocks_header_controls: [Block; 3], blocks_header_scoring_rules: [Block; 3], go_back_instruction: Text, controls_leftside: Text, controls_rightside: Text, scoring_rules: Text, } impl HowToPlay { pub fn new(resources: &Resources) -> Self { let font = resources.get_fonts().semi_bold; let controls_leftside_str = "\n\ Right:\n\ Left:\n\ Shuffle up:\n\ Shuffle down:\n\ Drop: "; let mut controls_leftside = Text::new(TextFragment { text: controls_leftside_str.to_string(), color: Some(COLOR_ORANGE), font: Some(font), scale: Some(PxScale::from(HOWTOPLAY_CONTROLS_CHAR_SCALE)), }); controls_leftside.set_bounds( Vec2::new(HOWTOPLAY_AND_ABOUT_AREA_WIDTH, f32::INFINITY), Align::Left, ); let controls_rightside_str = "\n\ RIGHT / D\n\ LEFT / A\n\ UP / W\n\ DOWN / S\n\ SPACE "; let mut controls_rightside = Text::new(TextFragment { text: controls_rightside_str.to_string(), color: Some(COLOR_YELLOW), font: Some(font), scale: Some(PxScale::from(HOWTOPLAY_CONTROLS_CHAR_SCALE)), }); controls_rightside.set_bounds( Vec2::new(HOWTOPLAY_AND_ABOUT_AREA_WIDTH, f32::INFINITY), Align::Left, ); let scoring_rules_str = "\n\ Points are gained by matching\n\ same-colored blocks in all 4\n\ directions.\n\n\ The more matched blocks in a\n\ line - the more points gained.\n\n\ Also, the points are\n\ multiplicated by the number of\n\ sequential cascading matchings. "; let mut scoring_rules = Text::new(TextFragment { text: scoring_rules_str.to_string(), color: Some(COLOR_GREEN), font: Some(font), scale: Some(PxScale::from(HOWTOPLAY_SCORING_RULES_CHAR_SCALE)), }); scoring_rules.set_bounds( Vec2::new(HOWTOPLAY_AND_ABOUT_AREA_WIDTH, f32::INFINITY), Align::Left, ); Self { blocks_header_controls: Self::create_header_for_controls(), blocks_header_scoring_rules: Self::create_header_for_scoring_rules(), go_back_instruction: resources .get_navigation_instructions() .get_go_back() .clone(), controls_leftside, controls_rightside, scoring_rules, } } fn create_header_for_controls() -> [Block; 3] { [ Block::new( Point2 { x: HOWTOPLAY_HEADER_POSITION_X, y: HOWTOPLAY_HEADER_CONTROLS_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_BLUE, ), Block::new( Point2 { x: HOWTOPLAY_HEADER_POSITION_X + HOWTOPLAY_HEADER_BLOCK_SIZE, y: HOWTOPLAY_HEADER_CONTROLS_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_YELLOW, ), Block::new( Point2 { x: HOWTOPLAY_HEADER_BLOCK_SIZE.mul_add(2.0, HOWTOPLAY_HEADER_POSITION_X), y: HOWTOPLAY_HEADER_CONTROLS_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_GREEN, ), ] } fn create_header_for_scoring_rules() -> [Block; 3] { [ Block::new( Point2 { x: HOWTOPLAY_HEADER_POSITION_X, y: HOWTOPLAY_HEADER_SCORING_RULES_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_RED, ), Block::new( Point2 { x: HOWTOPLAY_HEADER_POSITION_X + HOWTOPLAY_HEADER_BLOCK_SIZE, y: HOWTOPLAY_HEADER_SCORING_RULES_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_ORANGE, ), Block::new( Point2 { x: HOWTOPLAY_HEADER_BLOCK_SIZE.mul_add(2.0, HOWTOPLAY_HEADER_POSITION_X), y: HOWTOPLAY_HEADER_SCORING_RULES_POSITION_Y, }, HOWTOPLAY_HEADER_BLOCK_SIZE, BLOCK_COLOR_MAGENTA, ), ] } } impl StageTrait for HowToPlay {
fn draw(&mut self, ctx: &mut Context) -> GameResult<()> { for mut block in self.blocks_header_controls { block.draw(ctx)?; } for mut block in self.blocks_header_scoring_rules { block.draw(ctx)?; } graphics::queue_text( ctx, &self.go_back_instruction, Vec2::new(GO_BACK_LABEL_POSITION[0], GO_BACK_LABEL_POSITION[1]), None, ); graphics::queue_text( ctx, &self.controls_leftside, Vec2::new( HOWTOPLAY_CONTROLS_LEFTSIDE_TEXT_POSITION_X, HOWTOPLAY_CONTROLS_TEXT_POSITION_Y, ), None, ); graphics::queue_text( ctx, &self.controls_rightside, Vec2::new( HOWTOPLAY_CONTROLS_RIGHTSIDE_TEXT_POSITION_X, HOWTOPLAY_CONTROLS_TEXT_POSITION_Y, ), None, ); graphics::queue_text( ctx, &self.scoring_rules, Vec2::new( HOWTOPLAY_SCORING_RULES_TEXT_POSITION[0], HOWTOPLAY_SCORING_RULES_TEXT_POSITION[1], ), None, ); graphics::draw_queued_text( ctx, DrawParam::default(), None, graphics::FilterMode::Linear, )?; Ok(()) } }
fn update(&mut self, _ctx: &Context, input_event: Event) -> GameResult<Option<Stage>> { if let Event::Escape = input_event { return Ok(Some(Stage::MainMenu)); } Ok(Some(Stage::HowToPlay)) }
function_block-full_function
[ { "content": "pub fn idx_pair_to_center_point_of_block(idxs: &[usize; 2]) -> Point2<f32> {\n\n Point2 {\n\n x: BLOCK_SIZE.mul_add(idxs[0] as f32, GAME_ARENA_RECT.left()) + BLOCK_SIZE / 2.0,\n\n y: (GAME_ARENA_RECT.bottom() - BLOCK_SIZE * idxs[1] as f32) - BLOCK_SIZE + BLOCK_SIZE / 2.0,\n\n }\n\n}\n\n\n\n/*******************************************************************************\n\n**** BLOCK\n\n*******************************************************************************/\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Block {\n\n rect: Rect,\n\n pub color: BlockColor,\n\n}\n\n\n\nimpl Block {\n\n // TODO glam::Vec2::new(10.0, 10.0) ?\n\n pub fn new(point: Point2<f32>, size: f32, color: BlockColor) -> Self {\n\n Self {\n", "file_path": "src/blocks.rs", "rank": 0, "score": 140884.92225698597 }, { "content": "pub fn run() {\n\n // GET PATH TO RESOURCES DIRECTORY\n\n let resources_dir_path = create_exe_relative_dir_path(\"resources\");\n\n\n\n // CREATE GAME CONTEXT\n\n let ctx_builder_result = ContextBuilder::new(env!(\"CARGO_PKG_NAME\"), env!(\"CARGO_PKG_AUTHORS\"))\n\n .window_mode(\n\n WindowMode::default()\n\n .dimensions(WINDOW_WIDTH, WINDOW_HEIGHT)\n\n .visible(false),\n\n )\n\n .add_resource_path(&resources_dir_path)\n\n .build();\n\n let (mut ctx, event_loop);\n\n match ctx_builder_result {\n\n Ok(ctx_builder) => {\n\n ctx = ctx_builder.0;\n\n event_loop = ctx_builder.1;\n\n }\n\n Err(error) => {\n", "file_path": "src/app.rs", "rank": 1, "score": 84780.75251630877 }, { "content": "pub fn position_to_idx(pos: f32, axis: char) -> usize {\n\n let idx: usize;\n\n if axis == 'x' {\n\n idx = ((pos - GAME_ARENA_RECT.left()) / BLOCK_SIZE) as usize;\n\n } else if axis == 'y' {\n\n idx = ((GAME_ARENA_RECT.bottom() - pos - BLOCK_SIZE) / BLOCK_SIZE) as usize;\n\n } else {\n\n panic!(\"Wrong axis attribute!\")\n\n }\n\n idx\n\n}\n\n\n", "file_path": "src/blocks.rs", "rank": 2, "score": 84057.12767671299 }, { "content": "pub fn idx_to_position(idx: usize, axis: char) -> f32 {\n\n let position: f32;\n\n if axis == 'x' {\n\n position = BLOCK_SIZE.mul_add(idx as f32, GAME_ARENA_RECT.left());\n\n } else if axis == 'y' {\n\n position = (GAME_ARENA_RECT.bottom() - BLOCK_SIZE * idx as f32) - BLOCK_SIZE;\n\n } else {\n\n panic!(\"Wrong axis attribute!\")\n\n }\n\n position\n\n}\n\n\n", "file_path": "src/blocks.rs", "rank": 3, "score": 84057.12767671299 }, { "content": "pub fn create_pile_from_file() -> Pile {\n\n let snapshot_path = \"snapshots/snapshot.txt\";\n\n let matrix_snapshot = match fs::read_to_string(snapshot_path) {\n\n Ok(matrix_as_string) => matrix_as_string,\n\n Err(error) => {\n\n log_error(\n\n \"snapshot::create_pile_from_file\",\n\n &GameError::CustomError(format!(\"{} -> {}\", error, snapshot_path)),\n\n );\n\n panic!(\"{}\", &error);\n\n }\n\n };\n\n\n\n let mut matrix_snapshot_vec = Vec::with_capacity(GAME_ARENA_ROWS);\n\n for line in matrix_snapshot.lines().rev() {\n\n #[allow(clippy::unwrap_used)]\n\n let row: Vec<char> = line\n\n .split_whitespace()\n\n .map(|str| str.chars().next().unwrap())\n\n .collect();\n", "file_path": "src/snapshot.rs", "rank": 4, "score": 74990.26634041016 }, { "content": "pub fn log_error(origin: &str, error: &GameError) {\n\n let mut error_log = None;\n\n\n\n let error_logs_dir_path = create_exe_relative_dir_path(\"__errors\");\n\n if !error_logs_dir_path.exists() && fs::create_dir(&error_logs_dir_path).is_err() {\n\n eprintln!(\"Error logs directory could not be created!\");\n\n }\n\n if error_logs_dir_path.exists() {\n\n let date_and_time = Local::now().format(\"on %Y-%m-%d at %H_%M_%S\").to_string();\n\n let error_log_file_name = format!(\"ERROR {}.log\", date_and_time);\n\n let error_log_file_path = error_logs_dir_path.join(error_log_file_name);\n\n if let Ok(created_file) = OpenOptions::new()\n\n .create(true)\n\n .append(true)\n\n .open(error_log_file_path)\n\n {\n\n error_log = Some(created_file);\n\n } else {\n\n eprintln!(\"Error log file could not be created!\");\n\n }\n", "file_path": "src/app.rs", "rank": 5, "score": 63837.777221752774 }, { "content": "struct SelectedItemBlocksPositions {\n\n play: [Point2<f32>; 2],\n\n how_to_play: [Point2<f32>; 2],\n\n about: [Point2<f32>; 2],\n\n}\n\n\n\nimpl SelectedItemBlocksPositions {\n\n fn new(item_widths: [f32; 3]) -> Self {\n\n let positions = Self::create_blocks_positions(item_widths);\n\n Self {\n\n play: positions[0],\n\n how_to_play: positions[1],\n\n about: positions[2],\n\n }\n\n }\n\n\n\n fn create_blocks_positions(item_widths: [f32; 3]) -> Vec<[Point2<f32>; 2]> {\n\n let mut positions = Vec::with_capacity(3);\n\n for i in 0..item_widths.len() {\n\n let item_width_half = item_widths[i] / 2.0;\n", "file_path": "src/stages/main_menu.rs", "rank": 6, "score": 63682.27288899211 }, { "content": "type ExtractedMatchingData = (\n\n Vec<usize>,\n\n Vec<(Color, [Point2<f32>; 2])>,\n\n HashSet<[usize; 2]>,\n\n Vec<Block>,\n\n);\n\n\n\n#[derive(Debug)]\n\npub struct Matching {\n\n num_of_sequential_matchings: usize,\n\n\n\n num_of_matching_blocks: Vec<usize>,\n\n match_direction_indicators: Vec<(Color, [Point2<f32>; 2])>,\n\n unique_matching_blocks_indexes: HashSet<[usize; 2]>,\n\n blocks: Vec<Block>,\n\n\n\n pub blinking_animation_stage: usize,\n\n}\n\n\n\nimpl Matching {\n", "file_path": "src/blocks/matches.rs", "rank": 7, "score": 60110.10373090046 }, { "content": "use std::collections::HashSet;\n\n\n\nuse ggez::{\n\n graphics::{self, Color, DrawParam, Font, Mesh, PxScale, Text, TextFragment},\n\n mint::Point2,\n\n Context, GameResult,\n\n};\n\n\n\nuse crate::{\n\n blocks::idx_pair_to_center_point_of_block,\n\n constants::{\n\n BLOCK_SIZE, GAME_ARENA_RECT, MATCH_COMBO_POINTS_CHAR_SCALE, MATCH_DIRECTION_INDICATOR_WIDTH,\n\n },\n\n};\n\n\n\nuse super::{\n\n idx_to_position,\n\n pile::{Matches, Pile},\n\n Block,\n\n};\n\n\n\n/*******************************************************************************\n\n**** MATCHING\n\n*******************************************************************************/\n", "file_path": "src/blocks/matches.rs", "rank": 8, "score": 59038.18690316086 }, { "content": "\n\n pub fn start_new_animation(\n\n &mut self,\n\n ctx: &Context,\n\n points: usize,\n\n matching_blocks_indexes: &HashSet<[usize; 2]>,\n\n ) {\n\n let points_bckg = Text::new(TextFragment {\n\n text: points.to_string(),\n\n color: Some(Color::BLACK),\n\n font: Some(self.font),\n\n scale: Some(PxScale::from(MATCH_COMBO_POINTS_CHAR_SCALE)),\n\n });\n\n let points = Text::new(TextFragment {\n\n text: points.to_string(),\n\n color: Some(Color::WHITE),\n\n font: Some(self.font),\n\n scale: Some(PxScale::from(MATCH_COMBO_POINTS_CHAR_SCALE)),\n\n });\n\n\n", "file_path": "src/blocks/matches.rs", "rank": 9, "score": 59036.25770113435 }, { "content": " /*#[inline]\n\n pub fn get_num_of_sequential_matchings(&self) -> usize {\n\n self.num_of_sequential_matchings\n\n }*/\n\n\n\n pub fn get_blocks(&self) -> Vec<Block> {\n\n self.blocks.clone()\n\n }\n\n\n\n pub fn draw(&mut self, ctx: &mut Context) -> GameResult<()> {\n\n if self.blinking_animation_stage % 2 == 0 {\n\n for block in &mut self.blocks {\n\n block.draw(ctx)?;\n\n }\n\n } else {\n\n for (color, points) in &self.match_direction_indicators {\n\n let line_mesh =\n\n Mesh::new_line(ctx, points, MATCH_DIRECTION_INDICATOR_WIDTH, *color)?;\n\n graphics::draw(ctx, &line_mesh, DrawParam::default())?;\n\n }\n", "file_path": "src/blocks/matches.rs", "rank": 10, "score": 59035.554326917685 }, { "content": "pub struct ComboPointsAnimation {\n\n points_bckg: Text,\n\n points: Text,\n\n position: Point2<f32>,\n\n color_bckg: Color,\n\n color: Color,\n\n alpha: f32,\n\n}\n\n\n\nimpl ComboPointsAnimation {\n\n pub fn new(points_bckg: Text, points: Text, position: Point2<f32>) -> Self {\n\n Self {\n\n points_bckg,\n\n points,\n\n position,\n\n color_bckg: Color::BLACK,\n\n color: Color::WHITE,\n\n alpha: 1.0,\n\n }\n\n }\n", "file_path": "src/blocks/matches.rs", "rank": 11, "score": 59029.555391224894 }, { "content": " pub fn new(matches: &Matches, pile: &mut Pile) -> Self {\n\n let (num_of_matching_blocks, match_direction_indicators, unique_match_indexes, blocks) =\n\n Self::extract_matching_data_from_matches(matches, pile);\n\n\n\n Self {\n\n num_of_sequential_matchings: 1,\n\n\n\n num_of_matching_blocks,\n\n match_direction_indicators,\n\n unique_matching_blocks_indexes: unique_match_indexes,\n\n blocks,\n\n\n\n blinking_animation_stage: 0,\n\n }\n\n }\n\n\n\n pub fn new_chained_match(&mut self, matches: &Matches, pile: &mut Pile) {\n\n self.num_of_sequential_matchings += 1;\n\n\n\n let (num_of_matching_blocks, match_direction_indicators, unique_match_indexes, blocks) =\n", "file_path": "src/blocks/matches.rs", "rank": 12, "score": 59025.36519754934 }, { "content": " }\n\n Ok(())\n\n }\n\n}\n\n\n\n/*******************************************************************************\n\n**** COMBO POINTS ANIMATION\n\n*******************************************************************************/\n\npub struct ComboPointsAnimationsHolder {\n\n font: Font,\n\n pub current_animations: Vec<ComboPointsAnimation>,\n\n}\n\n\n\nimpl ComboPointsAnimationsHolder {\n\n pub fn new(font: Font) -> Self {\n\n Self {\n\n font,\n\n current_animations: Vec::with_capacity(4),\n\n }\n\n }\n", "file_path": "src/blocks/matches.rs", "rank": 13, "score": 59023.51015352903 }, { "content": " // lower-right corner\n\n bckg_position = Point2 {\n\n x: self.position.x + bckg_offset,\n\n y: self.position.y + bckg_offset,\n\n };\n\n graphics::queue_text(ctx, &self.points_bckg, bckg_position, None);\n\n // down\n\n bckg_position = Point2 {\n\n x: self.position.x,\n\n y: self.position.y + bckg_offset,\n\n };\n\n graphics::queue_text(ctx, &self.points_bckg, bckg_position, None);\n\n // lower-left corner\n\n bckg_position = Point2 {\n\n x: self.position.x - bckg_offset,\n\n y: self.position.y + bckg_offset,\n\n };\n\n graphics::queue_text(ctx, &self.points_bckg, bckg_position, None);\n\n // left\n\n bckg_position = Point2 {\n", "file_path": "src/blocks/matches.rs", "rank": 14, "score": 59023.185132751736 }, { "content": "\n\n pub fn update(&mut self) -> bool {\n\n self.position.x += 0.05;\n\n self.position.y -= 0.5;\n\n\n\n self.color_bckg.a = self.alpha;\n\n self.points_bckg.fragments_mut()[0].color = Some(self.color_bckg);\n\n\n\n self.color.a = self.alpha;\n\n self.points.fragments_mut()[0].color = Some(self.color);\n\n\n\n if self.alpha <= 0.0 {\n\n return true;\n\n }\n\n self.alpha -= 0.016;\n\n false\n\n }\n\n\n\n pub fn draw(&mut self, ctx: &mut Context) -> GameResult {\n\n // OUTLINE\n", "file_path": "src/blocks/matches.rs", "rank": 15, "score": 59022.583668716645 }, { "content": " pile: &mut Pile,\n\n ) -> ExtractedMatchingData {\n\n let mut num_of_matching_blocks = Vec::new();\n\n let mut match_direction_indicators = Vec::new();\n\n let mut unique_match_indexes = HashSet::new();\n\n for matches in matches.values() {\n\n for r#match in matches {\n\n num_of_matching_blocks.push(r#match.1.len());\n\n\n\n #[allow(clippy::unwrap_used)]\n\n let pos_first = r#match.1.first().unwrap();\n\n let start_point = idx_pair_to_center_point_of_block(pos_first);\n\n\n\n #[allow(clippy::unwrap_used)]\n\n let pos_last = r#match.1.last().unwrap();\n\n let end_point = idx_pair_to_center_point_of_block(pos_last);\n\n\n\n match_direction_indicators.push((r#match.0, [start_point, end_point]));\n\n\n\n for position in &r#match.1 {\n", "file_path": "src/blocks/matches.rs", "rank": 16, "score": 59022.55707551377 }, { "content": " let points_rect = points.dimensions(ctx);\n\n let points_dimensions = [points_rect.w, points_rect.h];\n\n let starting_position =\n\n Self::calculate_animation_starting_position(matching_blocks_indexes, points_dimensions);\n\n\n\n let new_animation = ComboPointsAnimation::new(points_bckg, points, starting_position);\n\n self.current_animations.push(new_animation);\n\n }\n\n\n\n fn calculate_animation_starting_position(\n\n unique_matching_blocks_indexes: &HashSet<[usize; 2]>,\n\n points_dimensions: [f32; 2],\n\n ) -> Point2<f32> {\n\n #[allow(clippy::unwrap_used)]\n\n let leftmost_x_idx = unique_matching_blocks_indexes\n\n .iter()\n\n .map(|point| point[0])\n\n .reduce(|accum, item| if accum <= item { accum } else { item })\n\n .unwrap();\n\n let leftmost_position = idx_to_position(leftmost_x_idx, 'x');\n", "file_path": "src/blocks/matches.rs", "rank": 17, "score": 59021.03950599921 }, { "content": " let bckg_offset = 1.5;\n\n let mut bckg_position;\n\n // up\n\n bckg_position = Point2 {\n\n x: self.position.x,\n\n y: self.position.y - bckg_offset,\n\n };\n\n graphics::queue_text(ctx, &self.points_bckg, bckg_position, None);\n\n // upper-right corner\n\n bckg_position = Point2 {\n\n x: self.position.x + bckg_offset,\n\n y: self.position.y - bckg_offset,\n\n };\n\n graphics::queue_text(ctx, &self.points_bckg, bckg_position, None);\n\n // right\n\n bckg_position = Point2 {\n\n x: self.position.x + bckg_offset,\n\n y: self.position.y,\n\n };\n\n graphics::queue_text(ctx, &self.points_bckg, bckg_position, None);\n", "file_path": "src/blocks/matches.rs", "rank": 18, "score": 59020.8323880255 }, { "content": " Self::extract_matching_data_from_matches(matches, pile);\n\n\n\n self.num_of_matching_blocks = num_of_matching_blocks;\n\n self.match_direction_indicators = match_direction_indicators;\n\n self.unique_matching_blocks_indexes = unique_match_indexes;\n\n self.blocks = blocks;\n\n\n\n self.blinking_animation_stage = 0;\n\n }\n\n\n\n #[inline]\n\n pub fn get_scoring_data(&self) -> (&Vec<usize>, usize) {\n\n (\n\n &self.num_of_matching_blocks,\n\n self.num_of_sequential_matchings,\n\n )\n\n }\n\n\n\n fn extract_matching_data_from_matches(\n\n matches: &Matches,\n", "file_path": "src/blocks/matches.rs", "rank": 19, "score": 59020.4004724509 }, { "content": " #[allow(clippy::unwrap_used)]\n\n let rightmost_x_idx = unique_matching_blocks_indexes\n\n .iter()\n\n .map(|point| point[0])\n\n .reduce(|accum, item| if accum >= item { accum } else { item })\n\n .unwrap();\n\n let rightmost_position = idx_to_position(rightmost_x_idx, 'x');\n\n let mut horizontal_middle_position =\n\n (leftmost_position + rightmost_position + BLOCK_SIZE) / 2.0;\n\n\n\n #[allow(clippy::unwrap_used)]\n\n let highest_y_idx = unique_matching_blocks_indexes\n\n .iter()\n\n .map(|point| point[1])\n\n .reduce(|accum, item| if accum >= item { accum } else { item })\n\n .unwrap();\n\n let highest_position = idx_to_position(highest_y_idx, 'y');\n\n #[allow(clippy::unwrap_used)]\n\n let lowest_y_idx = unique_matching_blocks_indexes\n\n .iter()\n", "file_path": "src/blocks/matches.rs", "rank": 20, "score": 59020.13116503087 }, { "content": " .map(|point| point[1])\n\n .reduce(|accum, item| if accum <= item { accum } else { item })\n\n .unwrap();\n\n let lowest_position = idx_to_position(lowest_y_idx, 'y');\n\n let mut vertical_middle_position = (highest_position + lowest_position) / 2.0;\n\n\n\n let points_width = points_dimensions[0];\n\n let horizontal_correction = points_width / 2.0;\n\n horizontal_middle_position -= horizontal_correction;\n\n if horizontal_middle_position < GAME_ARENA_RECT.left() + 1.0 {\n\n horizontal_middle_position = GAME_ARENA_RECT.left() + 1.0;\n\n } else if horizontal_middle_position + points_width + 2.0 > GAME_ARENA_RECT.right() {\n\n horizontal_middle_position = GAME_ARENA_RECT.right() - points_width - 2.0;\n\n }\n\n\n\n let vertical_correction = points_dimensions[1] / 6.0;\n\n vertical_middle_position += vertical_correction;\n\n\n\n Point2 {\n\n x: horizontal_middle_position,\n", "file_path": "src/blocks/matches.rs", "rank": 21, "score": 59018.46463768987 }, { "content": " x: self.position.x - bckg_offset,\n\n y: self.position.y,\n\n };\n\n graphics::queue_text(ctx, &self.points_bckg, bckg_position, None);\n\n // upper-left corner\n\n bckg_position = Point2 {\n\n x: self.position.x - bckg_offset,\n\n y: self.position.y - bckg_offset,\n\n };\n\n graphics::queue_text(ctx, &self.points_bckg, bckg_position, None);\n\n\n\n graphics::queue_text(ctx, &self.points, self.position, None);\n\n\n\n graphics::draw_queued_text(\n\n ctx,\n\n DrawParam::default(),\n\n None,\n\n graphics::FilterMode::Linear,\n\n )?;\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/blocks/matches.rs", "rank": 22, "score": 59017.30018229419 }, { "content": " unique_match_indexes.insert(*position);\n\n }\n\n }\n\n }\n\n\n\n let matching_blocks = pile.extract_matching_blocks(&unique_match_indexes);\n\n (\n\n num_of_matching_blocks,\n\n match_direction_indicators,\n\n unique_match_indexes,\n\n matching_blocks,\n\n )\n\n }\n\n\n\n #[inline]\n\n pub fn get_unique_matching_blocks_indexes(&self) -> &HashSet<[usize; 2]> {\n\n &self.unique_matching_blocks_indexes\n\n }\n\n\n\n // TODO remove\n", "file_path": "src/blocks/matches.rs", "rank": 23, "score": 59016.16193635688 }, { "content": " .swap_remove(idx_of_finished_animation);\n\n }\n\n }\n\n }\n\n\n\n pub fn draw_animations(&mut self, ctx: &mut Context) -> GameResult {\n\n if !self.current_animations.is_empty() {\n\n for animation in &mut self.current_animations {\n\n animation.draw(ctx)?;\n\n }\n\n }\n\n Ok(())\n\n }\n\n\n\n pub fn reset(&mut self) {\n\n self.current_animations.clear();\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "src/blocks/matches.rs", "rank": 24, "score": 59015.18443865417 }, { "content": " y: vertical_middle_position,\n\n }\n\n }\n\n\n\n pub fn update_animations(&mut self) {\n\n if !self.current_animations.is_empty() {\n\n //println!(\"NUM ANIMATIONS: {}\", self.current_animations.len());\n\n let mut idx_of_finished_animation = None;\n\n for idx in 0..self.current_animations.len() {\n\n let is_over = self.current_animations[idx].update();\n\n if is_over {\n\n idx_of_finished_animation = Some(idx);\n\n }\n\n }\n\n if let Some(idx_of_finished_animation) = idx_of_finished_animation {\n\n /*println!(\n\n \"---- removing animation with idx {}\",\n\n idx_of_finished_animation\n\n );*/\n\n self.current_animations\n", "file_path": "src/blocks/matches.rs", "rank": 25, "score": 59010.71662437125 }, { "content": "pub trait StageTrait {\n\n fn update(&mut self, ctx: &Context, user_input: Event) -> GameResult<Option<Stage>>;\n\n fn draw(&mut self, ctx: &mut Context) -> GameResult<()>;\n\n}\n", "file_path": "src/stages.rs", "rank": 26, "score": 57560.827633920344 }, { "content": "fn main() {\n\n set_windows_exe_icon();\n\n write_build_time_to_file();\n\n}\n\n\n", "file_path": "build.rs", "rank": 27, "score": 46172.03426889798 }, { "content": "struct Factory {\n\n color: Option<Color>,\n\n font: Option<Font>,\n\n scale: Option<PxScale>,\n\n}\n\n\n\nimpl Factory {\n\n pub fn new(font: Font) -> Self {\n\n Self {\n\n color: Some(COLOR_GRAY),\n\n font: Some(font),\n\n scale: Some(PxScale::from(NAVIGATION_INSTRUCTIONS_CHAR_SCALE)),\n\n }\n\n }\n\n\n\n fn create_label(&self, text: &str) -> Text {\n\n Text::new(TextFragment {\n\n text: text.to_string(),\n\n color: self.color,\n\n font: self.font,\n\n scale: self.scale,\n\n })\n\n }\n\n}\n", "file_path": "src/navigation_instructions.rs", "rank": 28, "score": 44993.33577749179 }, { "content": "fn main() {\n\n app::run();\n\n}\n", "file_path": "src/main.rs", "rank": 29, "score": 44630.52570066055 }, { "content": "struct GameArena {\n\n border_rect: Rect,\n\n border_color: Color,\n\n}\n\n\n\nimpl GameArena {\n\n fn new() -> Self {\n\n // compensating for border line width\n\n let mut border_rect = GAME_ARENA_RECT;\n\n border_rect.w += 1.0;\n\n border_rect.h += 1.0;\n\n\n\n Self {\n\n border_rect,\n\n border_color: COLOR_GRAY,\n\n }\n\n }\n\n\n\n fn draw(&mut self, ctx: &mut Context) -> GameResult<()> {\n\n let game_arena_mesh = Mesh::new_rectangle(\n", "file_path": "src/stages/playing.rs", "rank": 30, "score": 43711.281715748795 }, { "content": "struct HudLabels {\n\n game_info_playing_state_ready: Text,\n\n game_info_playing_state_go: Text,\n\n game_info_playing_state_speedup: Text,\n\n game_info_playing_state_maxspeed: Text,\n\n game_info_playing_state_pause: Text,\n\n game_info_playing_state_gameover: Text,\n\n game_info_instructions_ready: Text,\n\n game_info_instructions_go: Text,\n\n game_info_instructions_pause: Text,\n\n game_info_instructions_gameover: Text,\n\n scoring_score: Text,\n\n scoring_maxcombo: Text,\n\n scoring_highscore: Text,\n\n}\n\n\n\nimpl HudLabels {\n\n pub fn new(resources: &Resources) -> Self {\n\n let fonts: Fonts = resources.get_fonts();\n\n let font_extra_bold = fonts.extra_bold;\n", "file_path": "src/stages/playing/hud.rs", "rank": 31, "score": 42537.16797572453 }, { "content": "struct ScoringValues {\n\n score: Text,\n\n maxcombo: Text,\n\n highscore: Text,\n\n}\n\n\n\nimpl ScoringValues {\n\n // FIXME refactor\n\n pub fn new(font: Font, highscore: usize) -> Self {\n\n Self {\n\n score: Self::set_value(font, 0),\n\n maxcombo: Self::set_value(font, 0),\n\n highscore: Self::set_value(font, highscore),\n\n }\n\n }\n\n\n\n fn set_value(font: Font, value: usize) -> Text {\n\n // FIXME refactor: get rid of the font function argument\n\n Text::new(TextFragment {\n\n text: value.to_string(),\n\n color: Some(COLOR_LIGHT_GRAY),\n\n font: Some(font),\n\n scale: Some(PxScale::from(HUD_SCORING_CHAR_SCALE)),\n\n })\n\n }\n\n}\n", "file_path": "src/stages/playing/hud.rs", "rank": 32, "score": 42537.16797572453 }, { "content": "struct ItemLabels {\n\n play: Text,\n\n how_to_play: Text,\n\n about: Text,\n\n}\n\n\n\nimpl ItemLabels {\n\n pub fn new(font: Font) -> Self {\n\n Self {\n\n play: Self::create_item_label(font, \"PLAY\", COLOR_GREEN),\n\n how_to_play: Self::create_item_label(font, \"HOW TO PLAY\", COLOR_YELLOW),\n\n about: Self::create_item_label(font, \"ABOUT\", COLOR_BLUE),\n\n }\n\n }\n\n\n\n fn create_item_label(font: Font, item: &str, text_color: Color) -> Text {\n\n let mut item = Text::new(TextFragment {\n\n text: item.to_string(),\n\n color: Some(text_color),\n\n font: Some(font),\n", "file_path": "src/stages/main_menu.rs", "rank": 33, "score": 42537.16797572453 }, { "content": "fn write_build_time_to_file() {\n\n let file_path = format!(\"{}/build-time\", env::var(\"OUT_DIR\").unwrap());\n\n let mut file = File::create(&file_path).unwrap();\n\n\n\n let build_time = chrono::Local::now()\n\n .format(\"Built on %Y-%m-%d at %H:%M:%S\")\n\n .to_string();\n\n\n\n write!(file, r#\"\"{}\"\"#, build_time).ok();\n\n}\n", "file_path": "build.rs", "rank": 34, "score": 41953.973295141026 }, { "content": "fn set_windows_exe_icon() {\n\n if cfg!(target_os = \"windows\") {\n\n let mut res = winres::WindowsResource::new();\n\n res.set_icon(\"icon.ico\");\n\n res.compile().unwrap();\n\n }\n\n}\n\n\n", "file_path": "build.rs", "rank": 35, "score": 41953.973295141026 }, { "content": "# Color Columns\n\nA falling-blocks-type 2D game with a simple but addictive gameplay\n\n\n\n![Gameplay screenshot](github-resources/cc_gameplay.png)\n\n\n\n## About\n\nThe game is a remake of various old, \"classic\", columns-type games.\n\nAnd it's made in an attempt to learn [Rust programming language](https://www.rust-lang.org/).\n\n\n\nGame-dev framework used is [ggez](https://ggez.rs/).\n\n\n\n## Download\n\nAt the moment, only Windows version is available, and it is [freely downloadable as a .zip file](https://github.com/rdrmic/color-columns/raw/main/dist/color-columns-v018.zip) that contains a standalone / portable / no-installer .exe file.\n\n\n\nHowever, prerequisite is Microsoft C++ runtime, which can be downloaded [here](https://docs.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist?view=msvc-170#visual-studio-2015-2017-2019-and-2022) in case you don't have it on your computer already.\n", "file_path": "README.md", "rank": 36, "score": 41539.8797621695 }, { "content": "struct SelectedItemIndicator {\n\n blocks_positions: SelectedItemBlocksPositions,\n\n}\n\n\n\nimpl SelectedItemIndicator {\n\n fn new(item_widths: [f32; 3]) -> Self {\n\n Self {\n\n blocks_positions: SelectedItemBlocksPositions::new(item_widths),\n\n }\n\n }\n\n\n\n fn create_blocks(&self, selected_item: Stage) -> [Block; 2] {\n\n let blocks_positions;\n\n let color;\n\n match selected_item {\n\n Stage::HowToPlay => {\n\n blocks_positions = self.blocks_positions.how_to_play;\n\n color = BLOCK_COLOR_YELLOW;\n\n }\n\n Stage::About => {\n", "file_path": "src/stages/main_menu.rs", "rank": 37, "score": 41457.9134550831 }, { "content": "use ggez::Context;\n\n\n\nuse crate::{fonts::Fonts, navigation_instructions::NavigationInstructions};\n\n\n\npub struct Resources {\n\n fonts: Fonts,\n\n navigation_instructions: NavigationInstructions,\n\n}\n\n\n\nimpl Resources {\n\n pub fn new(ctx: &mut Context) -> Self {\n\n let fonts = Fonts::load(ctx);\n\n Self {\n\n fonts,\n\n navigation_instructions: NavigationInstructions::new(fonts.light_italic),\n\n }\n\n }\n\n\n\n pub const fn get_fonts(&self) -> Fonts {\n\n self.fonts\n\n }\n\n\n\n pub const fn get_navigation_instructions(&self) -> &NavigationInstructions {\n\n &self.navigation_instructions\n\n }\n\n}\n", "file_path": "src/resources.rs", "rank": 38, "score": 32355.88656571045 }, { "content": "use ggez::{graphics::Font, Context};\n\n\n\nuse crate::app::log_error;\n\n\n\n#[derive(Clone, Copy)]\n\npub struct Fonts {\n\n pub extra_bold: Font,\n\n pub bold: Font,\n\n pub semi_bold: Font,\n\n pub light_italic: Font,\n\n}\n\n\n\nimpl Fonts {\n\n pub fn load(ctx: &mut Context) -> Self {\n\n Self {\n\n extra_bold: Self::load_font(ctx, \"/ArgentumSans-ExtraBold.otf\"),\n\n bold: Self::load_font(ctx, \"/ArgentumSans-ExtraBold.otf\"),\n\n semi_bold: Self::load_font(ctx, \"/ArgentumSans-SemiBold.otf\"),\n\n light_italic: Self::load_font(ctx, \"/ArgentumSans-LightItalic.otf\"),\n\n }\n", "file_path": "src/fonts.rs", "rank": 39, "score": 32317.04638953072 }, { "content": " }\n\n\n\n fn load_font(ctx: &mut Context, filename: &str) -> Font {\n\n match Font::new(ctx, filename) {\n\n Ok(font) => font,\n\n Err(error) => {\n\n log_error(\"load_font\", &error);\n\n panic!(\"{}\", &error);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/fonts.rs", "rank": 40, "score": 32304.04215952955 }, { "content": "fn create_exe_relative_dir_path(dir: &str) -> PathBuf {\n\n let root_path = if let Ok(manifest_dir) = env::var(\"CARGO_MANIFEST_DIR\") {\n\n PathBuf::from(manifest_dir)\n\n } else {\n\n PathBuf::from(\"./\")\n\n };\n\n root_path.join(dir)\n\n}\n\n\n", "file_path": "src/app.rs", "rank": 41, "score": 32187.57147670118 }, { "content": "\n\n// HOW TO PLAY & ABOUT\n\npub const GO_BACK_LABEL_POSITION: [f32; 2] = [GAME_ARENA_RECT.left() + 50.0, 30.0];\n\npub const HOWTOPLAY_HEADER_BLOCK_SIZE: f32 = 17.5;\n\npub const HOWTOPLAY_HEADER_POSITION_X: f32 =\n\n WINDOW_WIDTH / 2.0 - (HOWTOPLAY_HEADER_BLOCK_SIZE * 3.0 / 2.0);\n\npub const HOWTOPLAY_HEADER_CONTROLS_POSITION_Y: f32 = 115.0;\n\npub const HOWTOPLAY_HEADER_SCORING_RULES_POSITION_Y: f32 = 275.0;\n\npub const HOWTOPLAY_AND_ABOUT_AREA_WIDTH: f32 = 300.0;\n\npub const HOWTOPLAY_AND_ABOUT_TEXT_POSITION_X: f32 = 73.0;\n\npub const HOWTOPLAY_CONTROLS_CHAR_SCALE: f32 = 19.5;\n\npub const HOWTOPLAY_CONTROLS_TEXT_POSITION_Y: f32 = 132.5;\n\npub const HOWTOPLAY_CONTROLS_LEFTSIDE_TEXT_POSITION_X: f32 = HOWTOPLAY_AND_ABOUT_TEXT_POSITION_X;\n\npub const HOWTOPLAY_CONTROLS_RIGHTSIDE_TEXT_POSITION_X: f32 =\n\n HOWTOPLAY_AND_ABOUT_TEXT_POSITION_X + 177.0;\n\npub const HOWTOPLAY_SCORING_RULES_CHAR_SCALE: f32 = 19.5;\n\npub const HOWTOPLAY_SCORING_RULES_TEXT_POSITION: [f32; 2] =\n\n [HOWTOPLAY_AND_ABOUT_TEXT_POSITION_X, 290.0];\n\npub const ABOUT_CHAR_SCALE: f32 = 22.0;\n\npub const ABOUT_TEXT_POSITION: [f32; 2] = [HOWTOPLAY_AND_ABOUT_TEXT_POSITION_X, 155.0];\n", "file_path": "src/constants.rs", "rank": 42, "score": 32115.703766642557 }, { "content": "#![allow(clippy::cast_precision_loss, clippy::suboptimal_flops)]\n\n\n\nuse ggez::graphics::{Color, Rect};\n\n\n\nuse crate::blocks::BlockColor;\n\n\n\n// APP\n\npub const APP_NAME: &str = \"Color Columns\";\n\npub const BUILD_TIME: &str = include!(concat!(env!(\"OUT_DIR\"), \"/build-time\"));\n\n\n\n// WINDOW\n\npub const WINDOW_WIDTH: f32 = 400.0;\n\npub const WINDOW_HEIGHT: f32 = 600.0;\n\n\n\n// COLORS\n\npub const COLOR_GRAY: Color = Color::new(1.0, 1.0, 1.0, 0.2);\n\npub const COLOR_LIGHT_GRAY: Color = Color::new(1.0, 1.0, 1.0, 0.4);\n\npub const COLOR_RED: Color = Color::new(1.0, 0.0, 0.1, 1.0);\n\npub const COLOR_GREEN: Color = Color::new(0.0, 0.72, 0.0, 1.0);\n\npub const COLOR_BLUE: Color = Color::new(0.0, 0.15, 1.0, 1.0);\n", "file_path": "src/constants.rs", "rank": 43, "score": 32110.521214452758 }, { "content": "];\n\n\n\n// GAME ARENA\n\npub const GAME_ARENA_COLUMNS: usize = 9;\n\npub const GAME_ARENA_ROWS: usize = 18;\n\npub const GAME_ARENA_MARGIN: f32 = 30.0;\n\npub const GAME_ARENA_RECT: Rect = Rect::new(\n\n WINDOW_WIDTH - GAME_ARENA_COLUMNS as f32 * BLOCK_SIZE - GAME_ARENA_MARGIN,\n\n WINDOW_HEIGHT - GAME_ARENA_ROWS as f32 * BLOCK_SIZE - GAME_ARENA_MARGIN,\n\n GAME_ARENA_COLUMNS as f32 * BLOCK_SIZE,\n\n GAME_ARENA_ROWS as f32 * BLOCK_SIZE,\n\n);\n\npub const GAME_ARENA_MARGIN_LEFT: f32 = BLOCK_SIZE * 2.0 + 4.0;\n\n\n\n// GAMEPLAY\n\npub const FPS: u32 = 60;\n\npub const STARTING_NUM_TICKS_FOR_CARGO_DESCENT: usize = 45;\n\npub const NUM_DESCENDED_CARGOES_GAMEPLAY_ACCELERATION: usize = 9;\n\npub const NUM_TICKS_GAMEPLAY_ACCELERATION_LIMIT: usize = 18;\n\npub const MATCH_DIRECTION_INDICATOR_WIDTH: f32 = 2.0;\n\npub const MATCH_COMBO_POINTS_CHAR_SCALE: f32 = 18.0;\n\npub const NUM_TICKS_SEQUENCE_FOR_MATCHES_REMOVAL: [usize; 4] = [15, 12, 12, 14];\n\npub const NUM_TICKS_FOR_PAUSED_BLOCKS_SHUFFLE: usize = 8;\n", "file_path": "src/constants.rs", "rank": 44, "score": 32108.91806982061 }, { "content": "pub const ABOUT_VERSION_AND_BUILDTIME_CHAR_SCALE: f32 = 17.0;\n\npub const ABOUT_VERSION_AND_BUILDTIME_POSITION: [f32; 2] =\n\n [HOWTOPLAY_AND_ABOUT_TEXT_POSITION_X, 470.0];\n\npub const ABOUT_VERSION_AND_BUILDTIME_AREA_WIDTH: f32 = 260.0;\n\n\n\n// HUD\n\npub const HUD_LABEL_PLAYING_STATE_CHAR_SCALE: f32 = 40.0;\n\npub const HUD_LABEL_PLAYING_STATE_POSITION: [f32; 2] =\n\n [GAME_ARENA_RECT.left() + 1.0, GAME_ARENA_MARGIN];\n\npub const NUM_TICKS_FOR_PLAYING_STATE_GO_BLINKING: usize = 18;\n\npub const NUM_TICKS_FOR_PLAYING_STATE_SPEEDUP_BLINKING: usize = 15;\n\npub const HUD_LABEL_INSTRUCTIONS_POSITION: [f32; 2] = [GAME_ARENA_RECT.left() + 1.0, 85.0];\n\npub const HUD_LABEL_SCORING_CHAR_SCALE: f32 = 19.0;\n\npub const HUD_LABEL_SCORING_POSITION_X: f32 = GAME_ARENA_MARGIN;\n\npub const HUD_LABEL_SCORING_DELTA_POSITION_Y: f32 = 80.0;\n\npub const HUD_LABEL_SCORE_POSITION_Y: f32 = 345.0;\n\npub const HUD_LABEL_SCORE_POSITION: [f32; 2] =\n\n [HUD_LABEL_SCORING_POSITION_X, HUD_LABEL_SCORE_POSITION_Y];\n\npub const HUD_LABEL_MAXCOMBO_POSITION: [f32; 2] = [\n\n HUD_LABEL_SCORING_POSITION_X,\n", "file_path": "src/constants.rs", "rank": 45, "score": 32107.93869982798 }, { "content": " code: 'M',\n\n color: COLOR_MAGENTA,\n\n};\n\npub const BLOCK_COLOR_ORANGE: BlockColor = BlockColor {\n\n code: 'O',\n\n color: COLOR_ORANGE,\n\n};\n\npub const BLOCK_COLOR_YELLOW: BlockColor = BlockColor {\n\n code: 'Y',\n\n color: COLOR_YELLOW,\n\n};\n\n\n\n// TITLE SCREEN\n\npub const TITLE_SCREEN_AREA_WIDTH: f32 = 300.0;\n\npub const TITLE_SCREEN_TITLE_CHAR_SCALE: f32 = 63.0;\n\npub const TITLE_SCREEN_TITLE_POSITION: [f32; 2] = [50.0, 135.0];\n\npub const TITLE_SCREEN_NAVIGATION_INSTRUCTIONS_CHAR_SCALE: f32 = 16.0;\n\npub const TITLE_SCREEN_NAVIGATION_INSTRUCTIONS_POSITION: [f32; 2] = [50.0, 274.0];\n\npub const TITLE_SCREEN_NUM_FRAMES_FOR_ANIMATION: usize = 15;\n\n\n", "file_path": "src/constants.rs", "rank": 46, "score": 32107.72747547714 }, { "content": "pub const COLOR_MAGENTA: Color = Color::new(0.7, 0.0, 0.65, 1.0);\n\npub const COLOR_ORANGE: Color = Color::new(1.0, 0.45, 0.0, 1.0);\n\npub const COLOR_YELLOW: Color = Color::new(0.85, 0.75, 0.0, 1.0);\n\n\n\n// BLOCKS\n\npub const BLOCK_SIZE: f32 = 23.0;\n\npub const NO_BLOCK_CODE: char = '.';\n\npub const BLOCK_COLOR_RED: BlockColor = BlockColor {\n\n code: 'R',\n\n color: COLOR_RED,\n\n};\n\npub const BLOCK_COLOR_GREEN: BlockColor = BlockColor {\n\n code: 'G',\n\n color: COLOR_GREEN,\n\n};\n\npub const BLOCK_COLOR_BLUE: BlockColor = BlockColor {\n\n code: 'B',\n\n color: COLOR_BLUE,\n\n};\n\npub const BLOCK_COLOR_MAGENTA: BlockColor = BlockColor {\n", "file_path": "src/constants.rs", "rank": 47, "score": 32105.898091646188 }, { "content": "// MAIN MENU\n\npub const MAIN_MENU_ITEM_CHAR_SCALE: f32 = 30.0;\n\npub const MAIN_MENU_ITEM_AREA_X: f32 = 50.0;\n\npub const MAIN_MENU_ITEM_AREA_WIDTH: f32 = 300.0;\n\npub const MAIN_MENU_ITEM_AREA_CENTER: f32 = MAIN_MENU_ITEM_AREA_X + MAIN_MENU_ITEM_AREA_WIDTH / 2.0;\n\npub const MAIN_MENU_SELECTED_ITEM_BLOCK_SIZE: f32 = 18.0;\n\npub const MAIN_MENU_SELECTED_ITEM_BLOCK_FADE_IN_TRESHOLD: f32 = 0.75;\n\npub const MAIN_MENU_SELECTED_ITEM_BLOCK_MARGIN_X: f32 = MAIN_MENU_SELECTED_ITEM_BLOCK_SIZE * 0.725;\n\npub const MAIN_MENU_SELECTED_ITEM_BLOCK_MARGIN_Y: f32 = 6.0;\n\npub const MAIN_MENU_ITEM_DELTA_Y: f32 = 125.0;\n\npub const MAIN_MENU_TOP_ITEM_Y: f32 = 160.0;\n\npub const MAIN_MENU_ITEMS_Y_POSITIONS: [f32; 3] = [\n\n MAIN_MENU_TOP_ITEM_Y,\n\n MAIN_MENU_TOP_ITEM_Y + MAIN_MENU_ITEM_DELTA_Y,\n\n MAIN_MENU_TOP_ITEM_Y + MAIN_MENU_ITEM_DELTA_Y * 2.0,\n\n];\n\npub const MAIN_MENU_SELECTED_ITEM_BLOCK_ALPHA_INCREMENT_ACCELERATION: f32 = 0.001;\n\n\n\n// NAVIGATION INSTRUCTIONS\n\npub const NAVIGATION_INSTRUCTIONS_CHAR_SCALE: f32 = 19.0;\n", "file_path": "src/constants.rs", "rank": 48, "score": 32104.8185875748 }, { "content": " HUD_LABEL_SCORE_POSITION_Y + HUD_LABEL_SCORING_DELTA_POSITION_Y,\n\n];\n\npub const HUD_LABEL_HIGHSCORE_POSITION: [f32; 2] = [\n\n HUD_LABEL_SCORING_POSITION_X,\n\n HUD_LABEL_SCORE_POSITION_Y + HUD_LABEL_SCORING_DELTA_POSITION_Y * 2.0,\n\n];\n\npub const HUD_SCORING_CHAR_SCALE: f32 = HUD_LABEL_SCORING_CHAR_SCALE;\n\npub const HUD_SCORING_POSITION_X: f32 = HUD_LABEL_SCORING_POSITION_X + 1.0;\n\npub const HUD_SCORING_DELTA_POSITION_Y: f32 = 20.0;\n\npub const HUD_SCORE_POSITION: [f32; 2] = [\n\n HUD_SCORING_POSITION_X,\n\n HUD_LABEL_SCORE_POSITION_Y + HUD_SCORING_DELTA_POSITION_Y,\n\n];\n\npub const HUD_MAXCOMBO_POSITION: [f32; 2] = [\n\n HUD_SCORING_POSITION_X,\n\n HUD_LABEL_MAXCOMBO_POSITION[1] + HUD_SCORING_DELTA_POSITION_Y,\n\n];\n\npub const HUD_HIGHSCORE_POSITION: [f32; 2] = [\n\n HUD_SCORING_POSITION_X,\n\n HUD_LABEL_HIGHSCORE_POSITION[1] + HUD_SCORING_DELTA_POSITION_Y,\n", "file_path": "src/constants.rs", "rank": 49, "score": 32102.800822367073 }, { "content": "#![allow(\n\n clippy::float_cmp,\n\n clippy::cast_possible_truncation,\n\n clippy::cast_possible_wrap,\n\n clippy::cast_precision_loss,\n\n clippy::cast_sign_loss,\n\n clippy::missing_const_for_fn\n\n)]\n\n\n\nuse std::fmt::{Display, Formatter, Result};\n\n\n\nuse rand::prelude::SliceRandom;\n\nuse rand::rngs::ThreadRng;\n\nuse rand::Rng;\n\n\n\nuse ggez::graphics::{self, mint::Point2, Color, DrawMode, DrawParam, Mesh, Rect};\n\nuse ggez::{Context, GameResult};\n\n\n\nuse crate::constants::{\n\n BLOCK_COLOR_BLUE, BLOCK_COLOR_GREEN, BLOCK_COLOR_MAGENTA, BLOCK_COLOR_ORANGE, BLOCK_COLOR_RED,\n\n BLOCK_COLOR_YELLOW, BLOCK_SIZE, GAME_ARENA_COLUMNS, GAME_ARENA_MARGIN_LEFT, GAME_ARENA_RECT,\n\n};\n\n\n\nuse self::cargo::Cargo;\n\n\n\npub mod cargo;\n\npub mod matches;\n\npub mod pile;\n\n\n", "file_path": "src/blocks.rs", "rank": 50, "score": 30297.19302093277 }, { "content": " panic!(\"Block color with code '{}' does not exist!\", code);\n\n }\n\n\n\n pub fn create_next_cargo(&mut self) -> Cargo {\n\n let mut color_block_randomly = |point| {\n\n #[allow(clippy::unwrap_used)]\n\n let random_color = Self::COLORS.choose(&mut self.rng).unwrap();\n\n Block::new(point, BLOCK_SIZE, *random_color)\n\n };\n\n\n\n let x = GAME_ARENA_RECT.left() - GAME_ARENA_MARGIN_LEFT;\n\n let y = GAME_ARENA_RECT.top();\n\n let blocks = [\n\n color_block_randomly(Point2 {\n\n x,\n\n y: BLOCK_SIZE.mul_add(0.0, y),\n\n }),\n\n color_block_randomly(Point2 {\n\n x,\n\n y: BLOCK_SIZE.mul_add(1.0, y),\n", "file_path": "src/blocks.rs", "rank": 51, "score": 30297.025422946183 }, { "content": " rect: Rect::new(point.x, point.y, size, size),\n\n color,\n\n }\n\n }\n\n\n\n pub fn draw(&mut self, ctx: &mut Context) -> GameResult<()> {\n\n let block_mesh = Mesh::new_rectangle(ctx, DrawMode::fill(), self.rect, self.color.color)?;\n\n graphics::draw(ctx, &block_mesh, DrawParam::default())?;\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Display for Block {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> Result {\n\n write!(\n\n f,\n\n \"{} ({},{})\",\n\n self.color.code,\n\n position_to_idx(self.rect.x, 'x'),\n", "file_path": "src/blocks.rs", "rank": 52, "score": 30291.695356700213 }, { "content": " cargo.blocks[i].rect.y = BLOCK_SIZE.mul_add(i as f32, y);\n\n }\n\n cargo\n\n }\n\n\n\n pub fn change_block_color_randomly(&mut self, block: &mut Block) {\n\n let mut new_random_block_color;\n\n #[allow(clippy::unwrap_used)]\n\n loop {\n\n new_random_block_color = Self::COLORS.choose(&mut self.rng).unwrap();\n\n if new_random_block_color.code != block.color.code {\n\n break;\n\n }\n\n }\n\n block.color = *new_random_block_color;\n\n }\n\n}\n", "file_path": "src/blocks.rs", "rank": 53, "score": 30289.87181456848 }, { "content": " position_to_idx(self.rect.y, 'y')\n\n )\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub struct BlockColor {\n\n pub code: char,\n\n pub color: Color,\n\n}\n\n\n\n/*******************************************************************************\n\n**** BLOCKS FACTORY\n\n*******************************************************************************/\n\npub struct Factory {\n\n rng: ThreadRng,\n\n}\n\n\n\nimpl Factory {\n\n pub const COLORS: [BlockColor; 6] = [\n", "file_path": "src/blocks.rs", "rank": 54, "score": 30289.186802950033 }, { "content": " BLOCK_COLOR_RED,\n\n BLOCK_COLOR_GREEN,\n\n BLOCK_COLOR_BLUE,\n\n BLOCK_COLOR_ORANGE,\n\n BLOCK_COLOR_MAGENTA,\n\n BLOCK_COLOR_YELLOW,\n\n ];\n\n\n\n pub fn new() -> Self {\n\n Self {\n\n rng: rand::thread_rng(),\n\n }\n\n }\n\n\n\n pub fn get_block_color_by_code(code: char) -> Color {\n\n for block_color in Self::COLORS {\n\n if block_color.code == code {\n\n return block_color.color;\n\n }\n\n }\n", "file_path": "src/blocks.rs", "rank": 55, "score": 30287.013603057178 }, { "content": " }),\n\n color_block_randomly(Point2 {\n\n x,\n\n y: BLOCK_SIZE.mul_add(2.0, y),\n\n }),\n\n ];\n\n Cargo::new(blocks)\n\n }\n\n\n\n pub fn put_cargo_in_arena(&mut self, mut cargo: Cargo) -> Cargo {\n\n let x = BLOCK_SIZE.mul_add(\n\n self.rng.gen_range(0..GAME_ARENA_COLUMNS) as f32,\n\n GAME_ARENA_RECT.left(),\n\n );\n\n let y = GAME_ARENA_RECT.top() - BLOCK_SIZE * 3.0;\n\n cargo.rect.x = x;\n\n cargo.rect.y = y;\n\n cargo.column_idx = position_to_idx(cargo.rect.x, 'x');\n\n for i in 0..3 {\n\n cargo.blocks[i].rect.x = x;\n", "file_path": "src/blocks.rs", "rank": 56, "score": 30286.862025905863 }, { "content": "use std::fmt::{Display, Formatter, Result};\n\n\n\nuse ggez::{graphics::Rect, Context, GameResult};\n\n\n\nuse crate::constants::{BLOCK_SIZE, GAME_ARENA_COLUMNS, GAME_ARENA_RECT};\n\n\n\nuse super::{pile::Pile, Block};\n\n\n\n/*******************************************************************************\n\n**** CARGO\n\n*******************************************************************************/\n\n#[derive(Debug)]\n\npub struct Cargo {\n\n pub blocks: [Block; 3],\n\n pub rect: Rect,\n\n pub column_idx: usize,\n\n}\n\n\n\nimpl Cargo {\n\n pub fn new(blocks: [Block; 3]) -> Self {\n", "file_path": "src/blocks/cargo.rs", "rank": 57, "score": 28835.821859857442 }, { "content": "use std::{\n\n cmp::{max, min, Ord},\n\n collections::{HashMap, HashSet},\n\n mem,\n\n};\n\n\n\nuse ggez::{graphics::Color, Context, GameResult};\n\n\n\nuse crate::{\n\n constants::{BLOCK_SIZE, GAME_ARENA_COLUMNS, GAME_ARENA_RECT, GAME_ARENA_ROWS, NO_BLOCK_CODE},\n\n stages::playing::Direction,\n\n};\n\n\n\nuse super::{cargo::Cargo, Block, Factory};\n\n\n\n/*******************************************************************************\n\n**** PILE\n\n*******************************************************************************/\n\npub type Matches = HashMap<Direction, Vec<(Color, Vec<[usize; 2]>)>>;\n\n\n", "file_path": "src/blocks/pile.rs", "rank": 58, "score": 28833.87798632353 }, { "content": " if self.rect.right() < GAME_ARENA_RECT.left() {\n\n // next cargo\n\n for i in 0..3 {\n\n blocks.push(self.blocks[i]);\n\n }\n\n } else {\n\n // descending cargo\n\n for i in 0..3 {\n\n let block = self.blocks[i];\n\n if block.rect.bottom() > GAME_ARENA_RECT.top() {\n\n blocks.push(block);\n\n }\n\n }\n\n }\n\n blocks\n\n }\n\n\n\n pub fn draw(&mut self, ctx: &mut Context) -> GameResult<()> {\n\n for i in 0..3 {\n\n if self.blocks[i].rect.bottom() > GAME_ARENA_RECT.top() {\n", "file_path": "src/blocks/cargo.rs", "rank": 59, "score": 28830.02007255693 }, { "content": " let code_color_0 = self.blocks[0].color;\n\n self.blocks[0].color = self.blocks[1].color;\n\n self.blocks[1].color = self.blocks[2].color;\n\n self.blocks[2].color = code_color_0;\n\n }\n\n\n\n pub fn rearrange_down(&mut self) {\n\n let code_color_2 = self.blocks[2].color;\n\n self.blocks[2].color = self.blocks[1].color;\n\n self.blocks[1].color = self.blocks[0].color;\n\n self.blocks[0].color = code_color_2;\n\n }\n\n\n\n pub fn drop(&mut self, pile: &Pile) {\n\n let pile_column_top = pile.column_tops[self.column_idx].1;\n\n self.rect.y = pile_column_top - self.rect.h;\n\n for i in 0..3 {\n\n self.blocks[i].rect.y = BLOCK_SIZE.mul_add(i as f32, self.rect.y);\n\n }\n\n }\n", "file_path": "src/blocks/cargo.rs", "rank": 60, "score": 28829.39012395615 }, { "content": " }\n\n }\n\n Self::take_match_from_collector(direction, collector, matches);\n\n }\n\n\n\n fn take_match_from_collector(\n\n direction: Direction,\n\n collector: &mut Vec<(char, usize, usize)>,\n\n matches: &mut Matches,\n\n ) {\n\n if collector.len() >= 3 {\n\n let vec_of_matches = matches.entry(direction).or_insert_with(Vec::new);\n\n\n\n let match_color = Factory::get_block_color_by_code(collector[0].0);\n\n let r#match = collector\n\n .iter()\n\n .map(|block_repr| [block_repr.1, block_repr.2])\n\n .collect();\n\n\n\n vec_of_matches.push((match_color, r#match));\n", "file_path": "src/blocks/pile.rs", "rank": 61, "score": 28829.187000833805 }, { "content": " }\n\n collector.clear();\n\n }\n\n /*** SEARCHING FOR MATCHES [END] ***/\n\n\n\n pub fn extract_matching_blocks(\n\n &mut self,\n\n unique_match_positions: &HashSet<[usize; 2]>,\n\n ) -> Vec<Block> {\n\n let mut matched_blocks = Vec::with_capacity(unique_match_positions.len());\n\n for pos_idxs in unique_match_positions {\n\n let block = mem::take(&mut self.matrix[pos_idxs[0]][pos_idxs[1]]);\n\n #[allow(clippy::unwrap_used)]\n\n matched_blocks.push(block.unwrap());\n\n }\n\n matched_blocks\n\n }\n\n\n\n pub fn remove_matches(&mut self, matches: &HashSet<[usize; 2]>) -> bool {\n\n // COLLECT A MAP OF ROW (VERTICAL) INDEXES OF MATCHED BLOCKS BY COLUMN (HORIZONTAL) INDEX\n", "file_path": "src/blocks/pile.rs", "rank": 62, "score": 28829.03453827401 }, { "content": " fn get_block_code(block: &Option<Block>) -> char {\n\n if let Some(block) = block {\n\n return block.color.code;\n\n }\n\n NO_BLOCK_CODE\n\n }\n\n\n\n fn search_sequence_for_vertical_matches(\n\n direction: Direction,\n\n sequence: &[(char, usize, usize)],\n\n collector: &mut Vec<(char, usize, usize)>,\n\n matches: &mut Matches,\n\n ) {\n\n for block_repr in sequence {\n\n if let Some(previous_match) = collector.last() {\n\n if block_repr.0 != previous_match.0 {\n\n Self::take_match_from_collector(direction, collector, matches);\n\n }\n\n }\n\n collector.push(*block_repr);\n", "file_path": "src/blocks/pile.rs", "rank": 63, "score": 28828.304054557786 }, { "content": " }\n\n Self::take_match_from_collector(direction, collector, matches);\n\n }\n\n\n\n fn search_sequence_for_matches(\n\n direction: Direction,\n\n sequence: &[(char, usize, usize)],\n\n collector: &mut Vec<(char, usize, usize)>,\n\n matches: &mut Matches,\n\n ) {\n\n for block_repr in sequence {\n\n if block_repr.0 == NO_BLOCK_CODE {\n\n Self::take_match_from_collector(direction, collector, matches);\n\n } else {\n\n if let Some(previous_block_repr) = collector.last() {\n\n if block_repr.0 != previous_block_repr.0 {\n\n Self::take_match_from_collector(direction, collector, matches);\n\n }\n\n }\n\n collector.push(*block_repr);\n", "file_path": "src/blocks/pile.rs", "rank": 64, "score": 28827.277959720963 }, { "content": " let block = self.matrix[col_idx][row_idx as usize].unwrap();\n\n sequence.push((block.color.code, col_idx, row_idx as usize));\n\n }\n\n Self::search_sequence_for_vertical_matches(\n\n Direction::Vertical,\n\n &sequence,\n\n &mut match_collector,\n\n matches,\n\n );\n\n sequence.clear();\n\n }\n\n }\n\n }\n\n\n\n fn collect_horizontal_matches(&self, matches: &mut Matches) {\n\n let mut sequence = Vec::with_capacity(GAME_ARENA_COLUMNS);\n\n let mut match_collector = Vec::<(char, usize, usize)>::with_capacity(5);\n\n\n\n let topmost_column_idx = self.get_topmost_column_idx();\n\n if topmost_column_idx > -1 {\n", "file_path": "src/blocks/pile.rs", "rank": 65, "score": 28827.18897854784 }, { "content": " self.blocks[i].rect.x += BLOCK_SIZE;\n\n }\n\n }\n\n }\n\n }\n\n\n\n pub fn move_to_left(&mut self, pile: &Pile) {\n\n if self.column_idx > 0 {\n\n let pile_previous_column_top = pile.column_tops[self.column_idx - 1].1;\n\n if self.rect.bottom() <= pile_previous_column_top {\n\n self.rect.x -= BLOCK_SIZE;\n\n self.column_idx -= 1;\n\n for i in 0..3 {\n\n self.blocks[i].rect.x -= BLOCK_SIZE;\n\n }\n\n }\n\n }\n\n }\n\n\n\n pub fn rearrange_up(&mut self) {\n", "file_path": "src/blocks/cargo.rs", "rank": 66, "score": 28825.79206188187 }, { "content": " let first_block_rect = blocks[0].rect;\n\n Self {\n\n blocks,\n\n rect: Rect::new(\n\n first_block_rect.x,\n\n first_block_rect.y,\n\n first_block_rect.w,\n\n first_block_rect.h * 3.0,\n\n ),\n\n column_idx: usize::MAX,\n\n }\n\n }\n\n\n\n pub fn move_to_right(&mut self, pile: &Pile) {\n\n if self.column_idx + 1 < GAME_ARENA_COLUMNS {\n\n let pile_next_column_top = pile.column_tops[self.column_idx + 1].1;\n\n if self.rect.bottom() <= pile_next_column_top {\n\n self.rect.x += BLOCK_SIZE;\n\n self.column_idx += 1;\n\n for i in 0..3 {\n", "file_path": "src/blocks/cargo.rs", "rank": 67, "score": 28825.464829124307 }, { "content": " }\n\n }\n\n }\n\n blocks\n\n }\n\n\n\n pub fn draw(&mut self, ctx: &mut Context) -> GameResult<()> {\n\n for col_idx in 0..GAME_ARENA_COLUMNS {\n\n for row_idx in 0..GAME_ARENA_ROWS {\n\n if let Some(mut block) = self.matrix[col_idx][row_idx] {\n\n block.draw(ctx)?;\n\n }\n\n }\n\n }\n\n Ok(())\n\n }\n\n\n\n pub fn __print(&self) {\n\n for row_idx in (0..GAME_ARENA_ROWS).rev() {\n\n for col_idx in 0..GAME_ARENA_COLUMNS {\n", "file_path": "src/blocks/pile.rs", "rank": 68, "score": 28825.445359613812 }, { "content": " for row_idx in 0..=topmost_column_idx as usize {\n\n for col_idx in 0..GAME_ARENA_COLUMNS {\n\n let code = Self::get_block_code(&self.matrix[col_idx][row_idx as usize]);\n\n sequence.push((code, col_idx, row_idx as usize));\n\n }\n\n Self::search_sequence_for_matches(\n\n Direction::Horizontal,\n\n &sequence,\n\n &mut match_collector,\n\n matches,\n\n );\n\n sequence.clear();\n\n }\n\n }\n\n }\n\n\n\n fn collect_diagonal_slash_matches(&self, matches: &mut Matches) {\n\n let mut sequence = Vec::with_capacity(max(GAME_ARENA_COLUMNS, GAME_ARENA_ROWS));\n\n let mut match_collector = Vec::<(char, usize, usize)>::with_capacity(5);\n\n\n", "file_path": "src/blocks/pile.rs", "rank": 69, "score": 28825.06109219088 }, { "content": "pub struct Pile {\n\n matrix: [[Option<Block>; GAME_ARENA_ROWS]; GAME_ARENA_COLUMNS],\n\n pub column_tops: [(isize, f32); GAME_ARENA_COLUMNS],\n\n}\n\n\n\nimpl Pile {\n\n pub fn new() -> Self {\n\n Self {\n\n matrix: [[None; GAME_ARENA_ROWS]; GAME_ARENA_COLUMNS],\n\n column_tops: [(-1, GAME_ARENA_RECT.bottom()); GAME_ARENA_COLUMNS],\n\n }\n\n }\n\n\n\n pub fn from_snapshot(\n\n matrix: &[[Option<Block>; GAME_ARENA_ROWS]; GAME_ARENA_COLUMNS],\n\n column_tops: [(isize, f32); GAME_ARENA_COLUMNS],\n\n ) -> Self {\n\n Self {\n\n matrix: *matrix,\n\n column_tops,\n", "file_path": "src/blocks/pile.rs", "rank": 70, "score": 28825.00390865302 }, { "content": " let mut matched_blocks_row_idxs_by_col_idx = HashMap::new();\n\n for pos_idxs in matches {\n\n let col_idx = pos_idxs[0];\n\n let row_idx = pos_idxs[1];\n\n\n\n let row_idxs = matched_blocks_row_idxs_by_col_idx\n\n .entry(col_idx)\n\n .or_insert_with(Vec::new);\n\n row_idxs.push(row_idx);\n\n }\n\n // MAKE DANGLING BLOCKS FALL\n\n for (col_idx, row_idxs) in matched_blocks_row_idxs_by_col_idx {\n\n #[allow(clippy::unwrap_used)]\n\n let lowest_matched_block_idx = *row_idxs.iter().min_by(Ord::cmp).unwrap();\n\n let mut num_empty_slots: usize = 0;\n\n for row_idx in lowest_matched_block_idx..=self.column_tops[col_idx].0 as usize {\n\n match mem::take(&mut self.matrix[col_idx][row_idx]) {\n\n None => num_empty_slots += 1,\n\n Some(mut block) => {\n\n block.rect.y += num_empty_slots as f32 * BLOCK_SIZE;\n", "file_path": "src/blocks/pile.rs", "rank": 71, "score": 28824.996705291374 }, { "content": "\n\n /*** SEARCHING FOR MATCHES [BEGIN] ***/\n\n pub fn search_for_matches(&self) -> Matches {\n\n let mut matches = Matches::new();\n\n self.collect_vertical_matches(&mut matches);\n\n self.collect_horizontal_matches(&mut matches);\n\n self.collect_diagonal_slash_matches(&mut matches);\n\n self.collect_diagonal_backslash_matches(&mut matches);\n\n matches\n\n }\n\n\n\n fn collect_vertical_matches(&self, matches: &mut Matches) {\n\n let mut sequence = Vec::with_capacity(GAME_ARENA_ROWS);\n\n let mut match_collector = Vec::<(char, usize, usize)>::with_capacity(5);\n\n\n\n for col_idx in 0..GAME_ARENA_COLUMNS {\n\n let column_top_idx = self.column_tops[col_idx].0;\n\n if column_top_idx >= 2 {\n\n for row_idx in (0..=column_top_idx as usize).rev() {\n\n #[allow(clippy::unwrap_used)]\n", "file_path": "src/blocks/pile.rs", "rank": 72, "score": 28824.934182978446 }, { "content": " let mut row_idx = row_idx_start;\n\n let mut col_idx = col_idx_start;\n\n while row_idx < GAME_ARENA_ROWS && col_idx < GAME_ARENA_COLUMNS {\n\n let code = Self::get_block_code(&self.matrix[col_idx][row_idx as usize]);\n\n sequence.push((code, col_idx, row_idx as usize));\n\n row_idx += 1;\n\n col_idx += 1;\n\n }\n\n Self::search_sequence_for_matches(\n\n Direction::DiagonalSlash,\n\n &sequence,\n\n &mut match_collector,\n\n matches,\n\n );\n\n sequence.clear();\n\n }\n\n }\n\n\n\n fn collect_diagonal_backslash_matches(&self, matches: &mut Matches) {\n\n let mut sequence = Vec::with_capacity(max(GAME_ARENA_COLUMNS, GAME_ARENA_ROWS));\n", "file_path": "src/blocks/pile.rs", "rank": 73, "score": 28824.787631879255 }, { "content": " let mut match_collector = Vec::<(char, usize, usize)>::with_capacity(5);\n\n\n\n let row_idx_start: usize = 0;\n\n for col_idx_start in 2..GAME_ARENA_COLUMNS as isize {\n\n let mut row_idx = row_idx_start;\n\n let mut col_idx = col_idx_start;\n\n while row_idx < GAME_ARENA_ROWS && col_idx >= 0 {\n\n let code = Self::get_block_code(&self.matrix[col_idx as usize][row_idx]);\n\n sequence.push((code, col_idx as usize, row_idx as usize));\n\n row_idx += 1;\n\n col_idx -= 1;\n\n }\n\n Self::search_sequence_for_matches(\n\n Direction::DiagonalBackslash,\n\n &sequence,\n\n &mut match_collector,\n\n matches,\n\n );\n\n sequence.clear();\n\n }\n", "file_path": "src/blocks/pile.rs", "rank": 74, "score": 28824.26429676644 }, { "content": " let col_idx_start = GAME_ARENA_COLUMNS as isize - 1;\n\n for row_idx_start in 1..GAME_ARENA_ROWS - 2 {\n\n let mut row_idx = row_idx_start;\n\n let mut col_idx = col_idx_start;\n\n while row_idx < GAME_ARENA_ROWS && col_idx >= 0 {\n\n let code = Self::get_block_code(&self.matrix[col_idx as usize][row_idx]);\n\n sequence.push((code, col_idx as usize, row_idx as usize));\n\n row_idx += 1;\n\n col_idx -= 1;\n\n }\n\n Self::search_sequence_for_matches(\n\n Direction::DiagonalBackslash,\n\n &sequence,\n\n &mut match_collector,\n\n matches,\n\n );\n\n sequence.clear();\n\n }\n\n }\n\n\n", "file_path": "src/blocks/pile.rs", "rank": 75, "score": 28823.974717865316 }, { "content": " }\n\n }\n\n\n\n pub fn take_cargo(&mut self, cargo: &Cargo) -> isize {\n\n let pile_column_top = self.column_tops[cargo.column_idx];\n\n let mut pile_column_top_row_idx = pile_column_top.0;\n\n\n\n let spaces_to_fill = GAME_ARENA_ROWS as isize - 1 - pile_column_top_row_idx;\n\n let row_increment = min(3, spaces_to_fill) as usize;\n\n for cargo_block_idx in (3 - row_increment..3).rev() {\n\n pile_column_top_row_idx += 1;\n\n self.matrix[cargo.column_idx][pile_column_top_row_idx as usize] =\n\n Some(cargo.blocks[cargo_block_idx]);\n\n }\n\n\n\n self.column_tops[cargo.column_idx].0 += row_increment as isize;\n\n self.column_tops[cargo.column_idx].1 -= BLOCK_SIZE * row_increment as f32;\n\n\n\n spaces_to_fill - 3\n\n }\n", "file_path": "src/blocks/pile.rs", "rank": 76, "score": 28823.61143339745 }, { "content": " let col_idx_start: usize = 0;\n\n for row_idx_start in (0..GAME_ARENA_ROWS - 2).rev() {\n\n let mut row_idx = row_idx_start;\n\n let mut col_idx = col_idx_start;\n\n while row_idx < GAME_ARENA_ROWS && col_idx < GAME_ARENA_COLUMNS {\n\n let code = Self::get_block_code(&self.matrix[col_idx][row_idx as usize]);\n\n sequence.push((code, col_idx, row_idx as usize));\n\n row_idx += 1;\n\n col_idx += 1;\n\n }\n\n Self::search_sequence_for_matches(\n\n Direction::DiagonalSlash,\n\n &sequence,\n\n &mut match_collector,\n\n matches,\n\n );\n\n sequence.clear();\n\n }\n\n let row_idx_start: usize = 0;\n\n for col_idx_start in 1..GAME_ARENA_COLUMNS - 2 {\n", "file_path": "src/blocks/pile.rs", "rank": 77, "score": 28823.497450885443 }, { "content": "\n\n pub fn descend_one_step(&mut self, pile: &Pile) -> bool {\n\n let pile_column_top = pile.column_tops[self.column_idx].1;\n\n let is_descending_over = self.rect.bottom() == pile_column_top;\n\n if !is_descending_over {\n\n self.rect.y += BLOCK_SIZE;\n\n for i in 0..3 {\n\n self.blocks[i].rect.y += BLOCK_SIZE;\n\n }\n\n }\n\n self.rect.bottom() == pile_column_top\n\n }\n\n\n\n #[inline]\n\n pub fn is_at_bottom(&self, pile: &Pile) -> bool {\n\n self.rect.bottom() == pile.column_tops[self.column_idx].1\n\n }\n\n\n\n pub fn get_visible_blocks(&self) -> Vec<Block> {\n\n let mut blocks = Vec::with_capacity(3);\n", "file_path": "src/blocks/cargo.rs", "rank": 78, "score": 28823.039767822385 }, { "content": " self.blocks[i].draw(ctx)?;\n\n }\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Display for Cargo {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> Result {\n\n let repr = self\n\n .blocks\n\n .iter()\n\n .map(|block| format!(\"{}\", block))\n\n .fold(String::new(), |accumulator, element| {\n\n accumulator + &element + \", \"\n\n });\n\n let mut repr_iter = repr.chars();\n\n repr_iter.next_back();\n\n repr_iter.next_back();\n\n write!(f, \"{}\", repr_iter.as_str())\n\n }\n\n}\n", "file_path": "src/blocks/cargo.rs", "rank": 79, "score": 28822.378752374276 }, { "content": " self.matrix[col_idx][row_idx - num_empty_slots] = Some(block);\n\n }\n\n }\n\n }\n\n\n\n let num_matched_blocks = row_idxs.len();\n\n self.column_tops[col_idx].0 -= num_matched_blocks as isize;\n\n self.column_tops[col_idx].1 += num_matched_blocks as f32 * BLOCK_SIZE;\n\n }\n\n // CHECK IF PILE IS FULL (TOP OF UPMOST CARGO == TOP OF ARENA)\n\n self.get_topmost_column_idx() >= (GAME_ARENA_ROWS - 1) as isize\n\n }\n\n\n\n #[inline]\n\n fn get_topmost_column_idx(&self) -> isize {\n\n #[allow(clippy::unwrap_used)]\n\n self.column_tops\n\n .iter()\n\n .max_by(|top1, top2| top1.0.cmp(&top2.0))\n\n .unwrap()\n", "file_path": "src/blocks/pile.rs", "rank": 80, "score": 28822.00312645313 }, { "content": " .0\n\n }\n\n\n\n pub fn get_blocks(&self) -> Vec<Block> {\n\n let mut num_blocks: usize = 0;\n\n for col_idx in 0..GAME_ARENA_COLUMNS {\n\n let column_top = self.column_tops[col_idx].0;\n\n if column_top > -1 {\n\n num_blocks += column_top as usize + 1;\n\n }\n\n }\n\n\n\n let mut blocks = Vec::with_capacity(num_blocks);\n\n for col_idx in 0..GAME_ARENA_COLUMNS {\n\n let column_top = self.column_tops[col_idx].0;\n\n if column_top > -1 {\n\n for row_idx in 0..=column_top as usize {\n\n if let Some(block) = self.matrix[col_idx][row_idx] {\n\n blocks.push(block);\n\n }\n", "file_path": "src/blocks/pile.rs", "rank": 81, "score": 28821.970296801734 }, { "content": " if let Some(block) = self.matrix[col_idx][row_idx] {\n\n print!(\"{} \", block.color.code);\n\n } else {\n\n print!(\"{} \", NO_BLOCK_CODE);\n\n }\n\n }\n\n println!();\n\n }\n\n }\n\n}\n", "file_path": "src/blocks/pile.rs", "rank": 82, "score": 28820.40377489299 }, { "content": "use ggez::graphics::{Align, Color, DrawParam, Font, PxScale, Text, TextFragment};\n\nuse ggez::mint::Point2;\n\nuse ggez::{graphics, Context, GameResult};\n\nuse glam::Vec2;\n\n\n\nuse super::StageTrait;\n\nuse crate::blocks::Block;\n\nuse crate::constants::{\n\n BLOCK_COLOR_BLUE, BLOCK_COLOR_GREEN, BLOCK_COLOR_YELLOW, COLOR_BLUE, COLOR_GREEN, COLOR_YELLOW,\n\n MAIN_MENU_ITEMS_Y_POSITIONS, MAIN_MENU_ITEM_AREA_CENTER, MAIN_MENU_ITEM_AREA_WIDTH,\n\n MAIN_MENU_ITEM_AREA_X, MAIN_MENU_ITEM_CHAR_SCALE,\n\n MAIN_MENU_SELECTED_ITEM_BLOCK_ALPHA_INCREMENT_ACCELERATION,\n\n MAIN_MENU_SELECTED_ITEM_BLOCK_FADE_IN_TRESHOLD, MAIN_MENU_SELECTED_ITEM_BLOCK_MARGIN_X,\n\n MAIN_MENU_SELECTED_ITEM_BLOCK_MARGIN_Y, MAIN_MENU_SELECTED_ITEM_BLOCK_SIZE,\n\n};\n\nuse crate::input::Event;\n\nuse crate::resources::Resources;\n\nuse crate::stages::Stage;\n\n\n\n/*******************************************************************************\n\n**** ITEM LABELS CACHE\n\n*******************************************************************************/\n", "file_path": "src/stages/main_menu.rs", "rank": 87, "score": 30.832963001647098 }, { "content": "use ggez::graphics::{Color, Font, PxScale, Text, TextFragment};\n\n\n\nuse crate::constants::{COLOR_GRAY, NAVIGATION_INSTRUCTIONS_CHAR_SCALE};\n\n\n\n/*******************************************************************************\n\n**** NAVIGATION INSTRUCTIONS\n\n*******************************************************************************/\n\n#[derive(Clone)]\n\npub struct NavigationInstructions {\n\n playing_ready: Text,\n\n playing_go: Text,\n\n playing_pause: Text,\n\n playing_gameover: Text,\n\n\n\n go_back: Text,\n\n}\n\n\n\nimpl NavigationInstructions {\n\n pub fn new(font: Font) -> Self {\n\n let label_factory = Factory::new(font);\n", "file_path": "src/navigation_instructions.rs", "rank": 88, "score": 30.409095157399097 }, { "content": "use ggez::{\n\n graphics::{self, Align, DrawParam, PxScale, Text, TextFragment},\n\n Context, GameResult,\n\n};\n\nuse glam::Vec2;\n\n\n\nuse crate::{\n\n constants::{\n\n ABOUT_CHAR_SCALE, ABOUT_TEXT_POSITION, ABOUT_VERSION_AND_BUILDTIME_AREA_WIDTH,\n\n ABOUT_VERSION_AND_BUILDTIME_CHAR_SCALE, ABOUT_VERSION_AND_BUILDTIME_POSITION, BUILD_TIME,\n\n COLOR_GRAY, COLOR_LIGHT_GRAY, GO_BACK_LABEL_POSITION, HOWTOPLAY_AND_ABOUT_AREA_WIDTH,\n\n },\n\n input::Event,\n\n resources::Resources,\n\n};\n\n\n\nuse super::{Stage, StageTrait};\n\n\n\npub struct About {\n\n go_back_instruction: Text,\n", "file_path": "src/stages/about.rs", "rank": 89, "score": 28.223620925366287 }, { "content": "use ggez::{\n\n graphics::{self, Align, DrawParam, Font, PxScale, Text, TextFragment},\n\n Context, GameResult,\n\n};\n\nuse glam::Vec2;\n\nuse rand::prelude::{SliceRandom, ThreadRng};\n\n\n\nuse crate::{\n\n blocks,\n\n constants::{\n\n APP_NAME, COLOR_ORANGE, COLOR_RED, NO_BLOCK_CODE, TITLE_SCREEN_AREA_WIDTH,\n\n TITLE_SCREEN_NAVIGATION_INSTRUCTIONS_CHAR_SCALE,\n\n TITLE_SCREEN_NAVIGATION_INSTRUCTIONS_POSITION, TITLE_SCREEN_NUM_FRAMES_FOR_ANIMATION,\n\n TITLE_SCREEN_TITLE_CHAR_SCALE, TITLE_SCREEN_TITLE_POSITION,\n\n },\n\n input::Event,\n\n resources::Resources,\n\n};\n\n\n\nuse super::{Stage, StageTrait};\n", "file_path": "src/stages/title_screen.rs", "rank": 90, "score": 26.503650571769864 }, { "content": "\n\npub struct TitleScreen {\n\n title: Text,\n\n navigation_instructions: Text,\n\n num_frames: usize,\n\n rng: ThreadRng,\n\n previous_frame_title_colors_codes: [char; APP_NAME.len()],\n\n}\n\n\n\nimpl TitleScreen {\n\n pub fn new(resources: &Resources) -> Self {\n\n let fonts = resources.get_fonts();\n\n\n\n let mut title = Text::default();\n\n for char in APP_NAME.chars() {\n\n title.add(TextFragment {\n\n text: char.to_string(),\n\n color: None,\n\n font: Some(fonts.bold),\n\n scale: Some(PxScale::from(TITLE_SCREEN_TITLE_CHAR_SCALE)),\n", "file_path": "src/stages/title_screen.rs", "rank": 92, "score": 25.32706839528866 }, { "content": "use std::{env, mem};\n\n\n\nuse ggez::graphics::{self, Color, DrawMode, DrawParam, Mesh, Rect, StrokeOptions};\n\nuse ggez::{Context, GameResult};\n\n\n\nuse self::hud::{GameInfo, GameInfoType};\n\nuse self::scoring::Scoring;\n\n\n\nuse super::{Stage, StageTrait};\n\nuse crate::blocks::cargo::Cargo;\n\nuse crate::blocks::matches::{ComboPointsAnimationsHolder, Matching};\n\nuse crate::blocks::pile::Pile;\n\nuse crate::blocks::{Block, Factory};\n\nuse crate::constants::{\n\n COLOR_GRAY, GAME_ARENA_RECT, NUM_DESCENDED_CARGOES_GAMEPLAY_ACCELERATION,\n\n NUM_TICKS_FOR_PAUSED_BLOCKS_SHUFFLE, NUM_TICKS_GAMEPLAY_ACCELERATION_LIMIT,\n\n NUM_TICKS_SEQUENCE_FOR_MATCHES_REMOVAL, STARTING_NUM_TICKS_FOR_CARGO_DESCENT,\n\n};\n\nuse crate::input::Event;\n\nuse crate::resources::Resources;\n\nuse crate::snapshot;\n\nuse crate::stages::playing::hud::Hud;\n\n\n\nmod hud;\n\nmod scoring;\n\n\n\n/*******************************************************************************\n\n**** GAME ARENA\n\n*******************************************************************************/\n", "file_path": "src/stages/playing.rs", "rank": 93, "score": 24.91991920878068 }, { "content": " scale: Some(PxScale::from(\n\n TITLE_SCREEN_NAVIGATION_INSTRUCTIONS_CHAR_SCALE,\n\n )),\n\n }\n\n }\n\n\n\n // FIXME avoid font as parameter\n\n fn create_navigation_instructions_textfragment_key(font: Font, key: &str) -> TextFragment {\n\n TextFragment {\n\n text: format!(\"[{}]\", key),\n\n color: Some(COLOR_RED),\n\n font: Some(font),\n\n scale: Some(PxScale::from(\n\n TITLE_SCREEN_NAVIGATION_INSTRUCTIONS_CHAR_SCALE,\n\n )),\n\n }\n\n }\n\n\n\n fn shuffle_title_colors(&mut self) {\n\n let mut prev_color_code = NO_BLOCK_CODE;\n", "file_path": "src/stages/title_screen.rs", "rank": 94, "score": 24.20026577013515 }, { "content": "use ggez::event::KeyCode;\n\n\n\n#[derive(Debug)]\n\npub enum Event {\n\n None,\n\n Enter,\n\n Escape,\n\n LostFocus,\n\n SaveScoreOnQuit,\n\n Right,\n\n Left,\n\n Up,\n\n Down,\n\n Drop,\n\n}\n\n\n\nimpl Default for Event {\n\n fn default() -> Self {\n\n Self::None\n\n }\n", "file_path": "src/input.rs", "rank": 95, "score": 22.793245714118196 }, { "content": " color: Some(COLOR_LIGHT_GRAY),\n\n font: Some(font),\n\n scale: Some(PxScale::from(ABOUT_CHAR_SCALE)),\n\n });\n\n about.set_bounds(\n\n Vec2::new(HOWTOPLAY_AND_ABOUT_AREA_WIDTH, f32::INFINITY),\n\n Align::Left,\n\n );\n\n\n\n let mut version = env!(\"CARGO_PKG_VERSION\").to_string();\n\n if cfg!(debug_assertions) {\n\n version = format!(\"{}_dev\", version);\n\n }\n\n let version_and_buildtime_str = format!(\"Version: {}\\n{}\", version, BUILD_TIME);\n\n let mut version_and_buildtime = Text::new(TextFragment {\n\n text: version_and_buildtime_str,\n\n color: Some(COLOR_GRAY),\n\n font: Some(font),\n\n scale: Some(PxScale::from(ABOUT_VERSION_AND_BUILDTIME_CHAR_SCALE)),\n\n });\n", "file_path": "src/stages/about.rs", "rank": 96, "score": 22.669801170619436 }, { "content": "use glam::Vec2;\n\n\n\nuse ggez::{\n\n graphics::{self, Color, DrawParam, Font, PxScale, Text, TextFragment},\n\n Context, GameResult,\n\n};\n\n\n\nuse crate::{\n\n constants::{\n\n COLOR_BLUE, COLOR_GREEN, COLOR_LIGHT_GRAY, COLOR_ORANGE, COLOR_RED, COLOR_YELLOW,\n\n HUD_HIGHSCORE_POSITION, HUD_LABEL_HIGHSCORE_POSITION, HUD_LABEL_INSTRUCTIONS_POSITION,\n\n HUD_LABEL_MAXCOMBO_POSITION, HUD_LABEL_PLAYING_STATE_CHAR_SCALE,\n\n HUD_LABEL_PLAYING_STATE_POSITION, HUD_LABEL_SCORE_POSITION, HUD_LABEL_SCORING_CHAR_SCALE,\n\n HUD_MAXCOMBO_POSITION, HUD_SCORE_POSITION, HUD_SCORING_CHAR_SCALE,\n\n NUM_TICKS_FOR_PLAYING_STATE_GO_BLINKING, NUM_TICKS_FOR_PLAYING_STATE_SPEEDUP_BLINKING,\n\n },\n\n fonts::Fonts,\n\n resources::Resources,\n\n};\n\n\n\nuse super::scoring::Scoring;\n\n\n\n/*******************************************************************************\n\n**** HUD LABELS CACHE\n\n*******************************************************************************/\n", "file_path": "src/stages/playing/hud.rs", "rank": 97, "score": 22.292407716071313 }, { "content": " [left_block, right_block]\n\n }\n\n}\n\n\n\n/*******************************************************************************\n\n**** MAIN MENU\n\n*******************************************************************************/\n\npub struct MainMenu {\n\n item_labels: ItemLabels,\n\n selected_item_indicator: SelectedItemIndicator,\n\n selected_item_idx: usize,\n\n selected_item_blocks: [Block; 2],\n\n selected_item_blocks_alpha: f32,\n\n selected_item_blocks_alpha_increment: f32,\n\n}\n\n\n\nimpl MainMenu {\n\n pub const ITEMS: [Stage; 3] = [Stage::Playing, Stage::HowToPlay, Stage::About];\n\n\n\n pub fn new(resources: &Resources, ctx: &mut Context) -> Self {\n", "file_path": "src/stages/main_menu.rs", "rank": 98, "score": 22.227904614744006 }, { "content": "}\n\n\n\nimpl Event {\n\n pub fn map_input(keycode: KeyCode) -> Self {\n\n match keycode {\n\n KeyCode::Right | KeyCode::D => Self::Right,\n\n KeyCode::Left | KeyCode::A => Self::Left,\n\n KeyCode::Up | KeyCode::W => Self::Up,\n\n KeyCode::Down | KeyCode::S => Self::Down,\n\n KeyCode::Space => Self::Drop,\n\n KeyCode::Return => Self::Enter,\n\n KeyCode::Escape => Self::Escape,\n\n _ => Self::default(),\n\n }\n\n }\n\n\n\n /*pub fn __print(input_event: &InputEvent) {\n\n match input_event {\n\n InputEvent::None => (),\n\n _ => {\n\n println!(\"--> {:?}\", input_event);\n\n }\n\n }\n\n }*/\n\n}\n", "file_path": "src/input.rs", "rank": 99, "score": 20.82604922533002 } ]
Rust
src/model/yaml/null.rs
dnsl48/yamlette
593f172622d393086450023433549bf23843933f
extern crate skimmer; use model::{ model_issue_rope, EncodedString, Model, Node, Rope, Renderer, Tagged, TaggedValue }; use model::style::CommonStyles; use std::any::Any; use std::borrow::Cow; use std::default::Default; use std::iter::Iterator; pub static TAG: &'static str = "tag:yaml.org,2002:null"; #[derive (Copy, Clone, Debug)] pub struct Null; impl Null { pub fn get_tag () -> Cow<'static, str> { Cow::from (TAG) } fn read_null (&self, value: &[u8], ptr: usize) -> usize { match value.get (ptr).map (|b| *b) { Some (b'~') => 1, Some (b'n') => if value[ptr .. ].starts_with ("null".as_bytes ()) { 4 } else { 0 }, Some (b'N') => if value[ptr .. ].starts_with ("Null".as_bytes ()) || value[ptr .. ].starts_with ("NULL".as_bytes ()) { 4 } else { 0 }, _ => 0 } } } impl Model for Null { fn get_tag (&self) -> Cow<'static, str> { Self::get_tag () } fn as_any (&self) -> &Any { self } fn as_mut_any (&mut self) -> &mut Any { self } fn is_decodable (&self) -> bool { true } fn is_encodable (&self) -> bool { true } fn has_default (&self) -> bool { true } fn get_default (&self) -> TaggedValue { TaggedValue::from (NullValue::default ()) } fn encode (&self, _renderer: &Renderer, value: TaggedValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>) -> Result<Rope, TaggedValue> { let mut val: NullValue = match <TaggedValue as Into<Result<NullValue, TaggedValue>>>::into (value) { Ok (value) => value, Err (value) => return Err (value) }; let issue_tag = val.issue_tag (); let alias = val.take_alias (); let node = Node::String (EncodedString::from ("~".as_bytes ())); Ok (model_issue_rope (self, node, issue_tag, alias, tags)) } fn decode (&self, explicit: bool, value: &[u8]) -> Result<TaggedValue, ()> { if value.len () == 0 { return Ok ( TaggedValue::from (NullValue::default ()) ) } let mut ptr: usize = 0; let mut quote_state: u8 = 0; if explicit { match value.get (ptr).map (|b| *b) { Some (b'\'') => { ptr += 1; quote_state = 1; } Some (b'"') => { ptr += 1; quote_state = 2; } _ => () }; /* if self.s_quote.contained_at_start (value) { ptr += self.s_quote.len (); quote_state = 1; } else if self.d_quote.contained_at_start (value) { ptr += self.d_quote.len (); quote_state = 2; } */ } let maybe_null = self.read_null (value, ptr); if maybe_null > 0 { ptr += maybe_null; if quote_state > 0 { match value.get (ptr).map (|b| *b) { Some (b'\'') if quote_state == 1 => (), Some (b'"') if quote_state == 2 => (), _ => return Err ( () ) }; /* if quote_state == 1 { if self.s_quote.contained_at (value, ptr) { // ptr += self.s_quote.len (); ?? } else { return Err ( () ) } } else if quote_state == 2 { if self.d_quote.contained_at (value, ptr) { // ptr += self.d_quote.len (); ?? } else { return Err ( () ) } } */ } return Ok ( TaggedValue::from (NullValue::default ()) ) } if quote_state > 0 { match value.get (ptr).map (|b| *b) { Some (b'\'') if quote_state == 1 => Ok ( TaggedValue::from (NullValue::default ()) ), Some (b'"') if quote_state == 2 => Ok ( TaggedValue::from (NullValue::default ()) ), _ => Err ( () ) } /* if quote_state == 1 && ptr == self.s_quote.len () { if self.s_quote.contained_at (value, ptr) { return Ok ( TaggedValue::from (NullValue::default ()) ) } } else if quote_state == 2 && ptr == self.d_quote.len () { if self.d_quote.contained_at (value, ptr) { return Ok ( TaggedValue::from (NullValue::default ()) ) } } */ } else { Err ( () ) } } } #[derive (Clone, Debug)] pub struct NullValue { style: u8, alias: Option<Cow<'static, str>> } impl NullValue { pub fn new (styles: CommonStyles, alias: Option<Cow<'static, str>>) -> NullValue { NullValue { style: if styles.issue_tag () { 1 } else { 0 }, alias: alias } } pub fn take_alias (&mut self) -> Option<Cow<'static, str>> { self.alias.take () } pub fn issue_tag (&self) -> bool { self.style & 1 == 1 } pub fn set_issue_tag (&mut self, val: bool) { if val { self.style |= 1; } else { self.style &= !1; } } } impl Default for NullValue { fn default () -> NullValue { NullValue { style: 0, alias: None } } } impl Tagged for NullValue { fn get_tag (&self) -> Cow<'static, str> { Cow::from (TAG) } fn as_any (&self) -> &Any { self as &Any } fn as_mut_any (&mut self) -> &mut Any { self as &mut Any } } impl AsRef<str> for NullValue { fn as_ref (&self) -> &'static str { "~" } } #[cfg (all (test, not (feature = "dev")))] mod tests { use super::*; use model::{ Tagged, Renderer }; use std::iter; #[test] fn tag () { let null = Null; assert_eq! (null.get_tag (), TAG); } #[test] fn encode () { let renderer = Renderer; let null = Null; if let Ok (rope) = null.encode (&renderer, TaggedValue::from (NullValue::default ()), &mut iter::empty ()) { let encode = rope.render (&renderer); assert_eq! (encode, "~".as_bytes ()); } else { assert! (false) } } #[test] fn decode () { let null = Null; let options = ["", "~", "null", "Null", "NULL"]; for i in 0 .. options.len () { if let Ok (tagged) = null.decode (true, options[i].as_bytes ()) { assert_eq! (tagged.get_tag (), Cow::from (TAG)); if let None = tagged.as_any ().downcast_ref::<NullValue> () { assert! (false) } } else { assert! (false) } } let decode = null.decode (true, "nil".as_bytes ()); assert! (decode.is_err ()); } }
extern crate skimmer; use model::{ model_issue_rope, EncodedString, Model, Node, Rope, Renderer, Tagged, TaggedValue }; use model::style::CommonStyles; use std::any::Any; use std::borrow::Cow; use std::default::Default; use std::iter::Iterator; pub static TAG: &'static str = "tag:yaml.org,2002:null"; #[derive (Copy, Clone, Debug)] pub struct Null; impl Null { pub fn get_tag () -> Cow<'static, str> { Cow::from (TAG) } fn read_null (&self, value: &[u8], ptr: usize) -> usize { match value.get (ptr).map (|b| *b) { Some (b'~') => 1, Some (b'n') => if value[ptr .. ].starts_with ("null".as_bytes ()) { 4 } else { 0 }, Some (b'N') => if value[ptr .. ].starts_with ("Null".as_bytes ()) || value[ptr .. ].starts_with ("NULL".as_bytes ()) { 4 } else { 0 }, _ => 0 } } } impl Model for Null { fn get_tag (&self) -> Cow<'static, str> { Self::get_tag () } fn as_any (&self) -> &Any { self } fn as_mut_any (&mut self) -> &mut Any { self } fn is_decodable (&self) -> bool { true } fn is_encodable (&self) -> bool { true } fn has_default (&self) -> bool { true } fn get_default (&self) -> TaggedValue { TaggedValue::from (NullValue::default ()) } fn encode (&self, _renderer: &Renderer, value: TaggedValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>) -> Result<Rope, TaggedValue> { let mut val: NullValue = match <TaggedValue as Into<Result<NullValue, TaggedValue>>>::into (value) { Ok (value) => value, Err (value) => return Err (value) }; let issue_tag = val.issue_tag (); let alias = val.take_alias (); let node = Node::String (EncodedString::from ("~".as_bytes ())); Ok (model_issue_rope (self, node, issue_tag, alias, tags)) } fn decode (&self, explicit: bool, value: &[u8]) -> Result<TaggedValue, ()> { if value.len () == 0 { return Ok ( TaggedValue::from (NullValue::default ()) ) } let mut ptr: usize = 0; let mut quote_state: u8 = 0; if explicit { match value.get (ptr).map (|b| *b) { Some (b'\'') => { ptr += 1; quote_state = 1; } Some (b'"') => { ptr += 1; quote_state = 2; } _ => () }; /* if self.s_quote.contained_at_start (value) { ptr += self.s_quote.len (); quote_state = 1; } else if self.d_quote.contained_at_start (value) { ptr += self.d_quote.len (); quote_state = 2; } */ } let maybe_null = self.read_null (value, ptr); if maybe_null > 0 { ptr += maybe_null; if quote_state > 0 { match value.get (ptr).map (|b| *b) { Some (b'\'') if quote_state == 1 => (), Some (b'"') if quote_state == 2 => (), _ => return Err ( () ) }; /* if quote_state == 1 { if self.s_quote.contained_at (value, ptr) { // ptr += self.s_quote.len (); ?? } else { return Err ( () ) } } else if quote_state == 2 { if self.d_quote.contained_at (value, ptr) { // ptr += self.d_quote.len (); ?? } else { return Err ( () ) } } */ } return Ok ( Tagg
} #[derive (Clone, Debug)] pub struct NullValue { style: u8, alias: Option<Cow<'static, str>> } impl NullValue { pub fn new (styles: CommonStyles, alias: Option<Cow<'static, str>>) -> NullValue { NullValue { style: if styles.issue_tag () { 1 } else { 0 }, alias: alias } } pub fn take_alias (&mut self) -> Option<Cow<'static, str>> { self.alias.take () } pub fn issue_tag (&self) -> bool { self.style & 1 == 1 } pub fn set_issue_tag (&mut self, val: bool) { if val { self.style |= 1; } else { self.style &= !1; } } } impl Default for NullValue { fn default () -> NullValue { NullValue { style: 0, alias: None } } } impl Tagged for NullValue { fn get_tag (&self) -> Cow<'static, str> { Cow::from (TAG) } fn as_any (&self) -> &Any { self as &Any } fn as_mut_any (&mut self) -> &mut Any { self as &mut Any } } impl AsRef<str> for NullValue { fn as_ref (&self) -> &'static str { "~" } } #[cfg (all (test, not (feature = "dev")))] mod tests { use super::*; use model::{ Tagged, Renderer }; use std::iter; #[test] fn tag () { let null = Null; assert_eq! (null.get_tag (), TAG); } #[test] fn encode () { let renderer = Renderer; let null = Null; if let Ok (rope) = null.encode (&renderer, TaggedValue::from (NullValue::default ()), &mut iter::empty ()) { let encode = rope.render (&renderer); assert_eq! (encode, "~".as_bytes ()); } else { assert! (false) } } #[test] fn decode () { let null = Null; let options = ["", "~", "null", "Null", "NULL"]; for i in 0 .. options.len () { if let Ok (tagged) = null.decode (true, options[i].as_bytes ()) { assert_eq! (tagged.get_tag (), Cow::from (TAG)); if let None = tagged.as_any ().downcast_ref::<NullValue> () { assert! (false) } } else { assert! (false) } } let decode = null.decode (true, "nil".as_bytes ()); assert! (decode.is_err ()); } }
edValue::from (NullValue::default ()) ) } if quote_state > 0 { match value.get (ptr).map (|b| *b) { Some (b'\'') if quote_state == 1 => Ok ( TaggedValue::from (NullValue::default ()) ), Some (b'"') if quote_state == 2 => Ok ( TaggedValue::from (NullValue::default ()) ), _ => Err ( () ) } /* if quote_state == 1 && ptr == self.s_quote.len () { if self.s_quote.contained_at (value, ptr) { return Ok ( TaggedValue::from (NullValue::default ()) ) } } else if quote_state == 2 && ptr == self.d_quote.len () { if self.d_quote.contained_at (value, ptr) { return Ok ( TaggedValue::from (NullValue::default ()) ) } } */ } else { Err ( () ) } }
function_block-function_prefixed
[ { "content": "pub fn model_issue_rope (model: &Model, node: Node, issue_tag: bool, alias: Option<Cow<'static, str>>, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>) -> Rope {\n\n if let Some (alias) = alias {\n\n if issue_tag {\n\n Rope::from (vec! [model_tag (model, tags), Node::Space, model_alias (model, alias), Node::Space, node])\n\n } else {\n\n Rope::from (vec! [model_alias (model, alias), Node::Space, node])\n\n }\n\n } else {\n\n if issue_tag {\n\n Rope::from (vec! [model_tag (model, tags), Node::Space, node])\n\n } else {\n\n Rope::from (node)\n\n }\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/model/mod.rs", "rank": 0, "score": 396812.373893331 }, { "content": "pub fn compose (model: &Model, renderer: &Renderer, value: TaggedValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>, children: &mut [Rope]) -> Rope {\n\n let value = match <TaggedValue as Into<Result<MapValue, TaggedValue>>>::into (value) {\n\n Ok (value) => value,\n\n Err (_) => panic! (\"Not a MapValue\")\n\n };\n\n\n\n if children.len () == 0 {\n\n compose_empty (model, value, tags)\n\n } else if value.styles.flow () {\n\n if value.styles.multiline () {\n\n compose_flow_multiline (model, value, tags, children)\n\n } else if value.styles.respect_threshold () {\n\n compose_flow_respect_threshold (model, renderer, value, tags, children)\n\n } else {\n\n compose_flow_no_threshold (model, value, tags, children)\n\n }\n\n } else {\n\n compose_block (model, value, tags, children)\n\n }\n\n}\n\n\n\n\n", "file_path": "src/model/yaml/map.rs", "rank": 1, "score": 377870.7761388682 }, { "content": "pub fn compose (model: &Model, renderer: &Renderer, value: TaggedValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>, children: &mut [Rope]) -> Rope {\n\n let value: SetValue = match <TaggedValue as Into<Result<SetValue, TaggedValue>>>::into (value) {\n\n Ok (value) => value,\n\n Err (_) => panic! (\"Not a SeqValue\")\n\n };\n\n\n\n if children.len () == 0 {\n\n return compose_empty (model, value, tags);\n\n }\n\n\n\n if value.styles.flow () {\n\n if value.styles.multiline () {\n\n compose_flow_multiline (model, value, tags, children)\n\n } else if value.styles.respect_threshold () {\n\n compose_flow_respect_threshold (model, renderer, value, tags, children)\n\n } else {\n\n compose_flow_no_threshold (model, value, tags, children)\n\n }\n\n } else {\n\n compose_block (model, value, tags, children)\n\n }\n\n}\n\n\n\n\n", "file_path": "src/model/yaml/set.rs", "rank": 2, "score": 377870.7761388682 }, { "content": "pub fn compose (model: &Model, renderer: &Renderer, value: TaggedValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>, children: &mut [Rope]) -> Rope {\n\n let value: PairsValue = match <TaggedValue as Into<Result<PairsValue, TaggedValue>>>::into (value) {\n\n Ok (value) => value,\n\n Err (_) => panic! (\"Not a PairsValue\")\n\n };\n\n\n\n if children.len () == 0 {\n\n return compose_empty (model, value, tags);\n\n }\n\n\n\n if value.styles.flow () {\n\n if value.styles.multiline () {\n\n compose_flow_multiline (model, value, tags, children)\n\n } else if value.styles.respect_threshold () {\n\n compose_flow_respect_threshold (model, renderer, value, tags, children)\n\n } else {\n\n compose_flow_no_threshold (model, value, tags, children)\n\n }\n\n } else {\n\n compose_block (model, value, tags, children)\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/model/yaml/pairs.rs", "rank": 3, "score": 377870.7761388682 }, { "content": "pub fn compose (model: &Model, renderer: &Renderer, value: TaggedValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>, children: &mut [Rope]) -> Rope {\n\n let value: SeqValue = match <TaggedValue as Into<Result<SeqValue, TaggedValue>>>::into (value) {\n\n Ok (value) => value,\n\n Err (_) => panic! (\"Not a SeqValue\")\n\n };\n\n\n\n if children.len () == 0 {\n\n return compose_empty (model, value, tags);\n\n }\n\n\n\n if value.styles.flow () {\n\n if value.styles.multiline () {\n\n compose_flow_multiline (model, value, tags, children)\n\n } else if value.styles.respect_threshold () {\n\n compose_flow_respect_threshold (model, renderer, value, tags, children)\n\n } else {\n\n compose_flow_no_threshold (model, value, tags, children)\n\n }\n\n } else {\n\n compose_block (model, value, tags, children)\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/model/yaml/seq.rs", "rank": 4, "score": 377870.7761388682 }, { "content": "pub fn model_tag (model: &Model, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>) -> Node {\n\n match model.get_tag () {\n\n Cow::Borrowed (tag) => _model_tag_static_str (tag, tags),\n\n Cow::Owned (ref tag) => _model_tag_string (tag, tags)\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/model/mod.rs", "rank": 5, "score": 345735.6108719012 }, { "content": "fn compose_flow_respect_threshold (model: &Model, renderer: &Renderer, mut value: PairsValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>, children: &mut [Rope]) -> Rope {\n\n let indent_len = value.styles.indent () as usize;\n\n let compact = value.styles.compact ();\n\n let threshold = value.styles.threshold () as usize;\n\n let issue_tag = value.styles.issue_tag ();\n\n let alias = value.take_alias ();\n\n\n\n let mut rope_length = 3;\n\n\n\n if issue_tag { rope_length += 2; }\n\n if alias.is_some () { rope_length += 2; }\n\n\n\n for child in children.iter () {\n\n rope_length += child.len () + 2;\n\n }\n\n\n\n let mut rope = Rope::with_capacity (rope_length);\n\n\n\n if issue_tag {\n\n rope.push (model_tag (model, tags));\n", "file_path": "src/model/yaml/pairs.rs", "rank": 6, "score": 337247.7383599436 }, { "content": "fn compose_flow_respect_threshold (model: &Model, renderer: &Renderer, mut value: MapValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>, children: &mut [Rope]) -> Rope {\n\n let indent_len = value.styles.indent () as usize;\n\n let compact = value.styles.compact ();\n\n let threshold = value.styles.threshold () as usize;\n\n let issue_tag = value.styles.issue_tag ();\n\n let alias = value.take_alias ();\n\n\n\n let mut rope_length = 3;\n\n\n\n if issue_tag { rope_length += 2; }\n\n if alias.is_some () { rope_length += 2; }\n\n\n\n for child in children.iter () {\n\n rope_length += child.len () + 2;\n\n }\n\n\n\n let mut rope = Rope::with_capacity (rope_length);\n\n\n\n if issue_tag {\n\n rope.push (model_tag (model, tags));\n", "file_path": "src/model/yaml/map.rs", "rank": 7, "score": 337247.7383599436 }, { "content": "fn compose_flow_respect_threshold (model: &Model, renderer: &Renderer, mut value: SeqValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>, children: &mut [Rope]) -> Rope {\n\n let indent_len = value.styles.indent () as usize;\n\n let compact = value.styles.compact ();\n\n let threshold = value.styles.threshold () as usize;\n\n let issue_tag = value.styles.issue_tag ();\n\n let alias = value.take_alias ();\n\n\n\n let mut rope_length = if compact { 2 } else { 3 }; // brackets\n\n\n\n if issue_tag { rope_length += 2; }\n\n if alias.is_some () { rope_length += 2; }\n\n\n\n for child in children.iter () {\n\n rope_length += child.len () + 2; // comma, space/newline\n\n }\n\n\n\n\n\n let mut rope = Rope::with_capacity (rope_length);\n\n\n\n if issue_tag {\n", "file_path": "src/model/yaml/seq.rs", "rank": 8, "score": 337247.7383599436 }, { "content": "fn compose_flow_respect_threshold (model: &Model, renderer: &Renderer, mut value: SetValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>, children: &mut [Rope]) -> Rope {\n\n let indent_len = value.styles.indent () as usize;\n\n let compact = value.styles.compact ();\n\n let threshold = value.styles.threshold () as usize;\n\n let issue_tag = value.styles.issue_tag ();\n\n let alias = value.take_alias ();\n\n\n\n let mut rope_length = if compact { 2 } else { 3 }; // brackets\n\n\n\n if issue_tag { rope_length += 2; }\n\n if alias.is_some () { rope_length += 2; }\n\n\n\n for child in children.iter () {\n\n rope_length += child.len () + 2; // comma, space/newline\n\n }\n\n\n\n\n\n let mut rope = Rope::with_capacity (rope_length);\n\n\n\n if issue_tag {\n", "file_path": "src/model/yaml/set.rs", "rank": 9, "score": 337247.7383599436 }, { "content": "fn compose_block (model: &Model, mut value: SeqValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>, children: &mut [Rope]) -> Rope {\n\n let indent_len = value.styles.indent () as usize;\n\n let issue_tag = value.styles.issue_tag ();\n\n let alias = value.take_alias ();\n\n\n\n let mut rope_length = 1;\n\n\n\n if issue_tag { rope_length += 2; }\n\n if alias.is_some () { rope_length += 2; }\n\n\n\n for child in children.iter () {\n\n rope_length += child.len () + 1;\n\n }\n\n\n\n let mut rope = Rope::with_capacity (rope_length);\n\n\n\n if issue_tag {\n\n rope.push (model_tag (model, tags));\n\n if let Some (alias) = alias {\n\n rope.push (Node::Space);\n", "file_path": "src/model/yaml/seq.rs", "rank": 10, "score": 329472.0379618025 }, { "content": "fn compose_block (model: &Model, mut value: SetValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>, children: &mut [Rope]) -> Rope {\n\n let indent_len = value.styles.indent () as usize;\n\n let issue_tag = value.styles.issue_tag ();\n\n let alias = value.take_alias ();\n\n\n\n let mut rope_length = 1;\n\n\n\n if issue_tag { rope_length += 2; }\n\n if alias.is_some () { rope_length += 2; }\n\n\n\n for child in children.iter () {\n\n rope_length += child.len () + 1;\n\n }\n\n\n\n let mut rope = Rope::with_capacity (rope_length);\n\n\n\n if issue_tag {\n\n rope.push (model_tag (model, tags));\n\n if let Some (alias) = alias {\n\n rope.push (Node::Space);\n", "file_path": "src/model/yaml/set.rs", "rank": 11, "score": 329472.0379618025 }, { "content": "fn compose_block (model: &Model, mut value: PairsValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>, children: &mut [Rope]) -> Rope {\n\n let indent_len = value.styles.indent () as usize;\n\n let issue_tag = value.styles.issue_tag ();\n\n let alias = value.take_alias ();\n\n\n\n let mut rope_length = if issue_tag { 3 } else { 1 };\n\n for child in children.iter () { rope_length += child.len () + 3; }\n\n if alias.is_some () { rope_length += 2; }\n\n\n\n let mut rope = Rope::with_capacity (rope_length);\n\n\n\n if issue_tag {\n\n rope.push (model_tag (model, tags));\n\n if let Some (alias) = alias {\n\n rope.push (Node::Space);\n\n rope.push (model_alias (model, alias));\n\n }\n\n rope.push (Node::NewlineIndent (0));\n\n } else if let Some (alias) = alias {\n\n rope.push (model_alias (model, alias));\n", "file_path": "src/model/yaml/pairs.rs", "rank": 12, "score": 329472.0379618025 }, { "content": "fn compose_block (model: &Model, mut value: MapValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>, children: &mut [Rope]) -> Rope {\n\n let indent_len = value.styles.indent () as usize;\n\n let issue_tag = value.styles.issue_tag ();\n\n let alias = value.take_alias ();\n\n\n\n let mut rope_length = if issue_tag { 3 } else { 1 };\n\n for child in children.iter () { rope_length += child.len () + 2; }\n\n if alias.is_some () { rope_length += 2; }\n\n\n\n let mut rope = Rope::with_capacity (rope_length);\n\n\n\n if issue_tag {\n\n rope.push (model_tag (model, tags));\n\n if let Some (alias) = alias {\n\n rope.push (Node::Space);\n\n rope.push (model_alias (model, alias));\n\n }\n\n rope.push (Node::NewlineIndent (0));\n\n } else if let Some (alias) = alias {\n\n rope.push (model_alias (model, alias));\n", "file_path": "src/model/yaml/map.rs", "rank": 13, "score": 329472.0379618025 }, { "content": "fn _model_tag_static_str (tag: &'static str, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>) -> Node {\n\n for &(ref shortcut, ref handle) in tags {\n\n let h = handle.as_ref ();\n\n if h.len () == 0 || h.contains (' ') { continue; }\n\n\n\n if tag.starts_with (h) {\n\n return match *shortcut {\n\n Cow::Borrowed (s) => {\n\n let f = s.as_bytes ();\n\n let l = tag[h.len () ..].as_bytes ();\n\n Node::StringConcat (EncodedString::from (f), EncodedString::from (l))\n\n }\n\n Cow::Owned (ref s) => {\n\n let mut string = Vec::with_capacity (s.len () + (tag.len () - h.len ()));\n\n\n\n string.extend (s.as_bytes ());\n\n string.extend (tag[h.len () ..].as_bytes ());\n\n\n\n Node::String (EncodedString::from (string))\n\n }\n\n }\n\n }\n\n }\n\n\n\n Node::StringSpecificTag (EncodedString::from (tag.as_bytes ()))\n\n}\n\n\n\n\n\n\n", "file_path": "src/model/mod.rs", "rank": 14, "score": 327321.7598120811 }, { "content": "pub fn model_alias (_model: &Model, alias: Cow<'static, str>) -> Node {\n\n match alias {\n\n Cow::Borrowed (alias) => Node::AmpersandString (EncodedString::from (alias.as_bytes ())),\n\n Cow::Owned (alias) => Node::AmpersandString (EncodedString::from (alias.into_bytes ()))\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/model/mod.rs", "rank": 15, "score": 325184.50878053973 }, { "content": "fn compose_flow_multiline (model: &Model, mut value: SeqValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>, children: &mut [Rope]) -> Rope {\n\n let indent_len = value.styles.indent () as usize;\n\n let issue_tag = value.styles.issue_tag ();\n\n let alias = value.take_alias ();\n\n\n\n let mut rope_length = 3;\n\n\n\n if issue_tag { rope_length += 2; }\n\n if alias.is_some () { rope_length += 2; }\n\n\n\n for child in children.iter () {\n\n rope_length += child.len () + 1; // comma/comma+newline\n\n }\n\n\n\n let mut rope = Rope::with_capacity (rope_length);\n\n\n\n if issue_tag {\n\n rope.push (model_tag (model, tags));\n\n if let Some (alias) = alias {\n\n rope.push (Node::Space);\n", "file_path": "src/model/yaml/seq.rs", "rank": 16, "score": 324947.46816216304 }, { "content": "fn compose_flow_no_threshold (model: &Model, mut value: MapValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>, children: &mut [Rope]) -> Rope {\n\n let indent_len = value.styles.indent () as usize;\n\n let compact = value.styles.compact ();\n\n let issue_tag = value.styles.issue_tag ();\n\n let alias = value.take_alias ();\n\n\n\n let mut rope_length = 3;\n\n\n\n if issue_tag { rope_length += 2; }\n\n if alias.is_some () { rope_length += 2; }\n\n\n\n for child in children.iter () {\n\n rope_length += child.len () + 1; // colon+space / comma+newline\n\n }\n\n\n\n let mut rope = Rope::with_capacity (rope_length);\n\n\n\n if issue_tag {\n\n rope.push (model_tag (model, tags));\n\n if let Some (alias) = alias {\n", "file_path": "src/model/yaml/map.rs", "rank": 17, "score": 324947.4681621631 }, { "content": "fn compose_flow_no_threshold (model: &Model, mut value: PairsValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>, children: &mut [Rope]) -> Rope {\n\n let indent_len = value.styles.indent () as usize;\n\n let compact = value.styles.compact ();\n\n let issue_tag = value.styles.issue_tag ();\n\n let alias = value.take_alias ();\n\n\n\n let mut rope_length = 3;\n\n\n\n if issue_tag { rope_length += 2; }\n\n if alias.is_some () { rope_length += 2; }\n\n\n\n for child in children.iter () {\n\n rope_length += child.len () + 1; // colon+space / comma+newline\n\n }\n\n\n\n let mut rope = Rope::with_capacity (rope_length);\n\n\n\n if issue_tag {\n\n rope.push (model_tag (model, tags));\n\n if let Some (alias) = alias {\n", "file_path": "src/model/yaml/pairs.rs", "rank": 18, "score": 324947.4681621631 }, { "content": "fn compose_flow_no_threshold (model: &Model, mut value: SeqValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>, children: &mut [Rope]) -> Rope {\n\n let indent_len = value.styles.indent () as usize;\n\n let compact = value.styles.compact ();\n\n let issue_tag = value.styles.issue_tag ();\n\n let alias = value.take_alias ();\n\n\n\n let mut rope_length = if compact { 2 } else { 3 };\n\n\n\n if issue_tag { rope_length += 2; }\n\n if alias.is_some () { rope_length += 2; }\n\n\n\n for child in children.iter () {\n\n rope_length += child.len () + 1; // comma/comma+space\n\n }\n\n\n\n let mut rope = Rope::with_capacity (rope_length);\n\n\n\n if issue_tag {\n\n rope.push (model_tag (model, tags));\n\n if let Some (alias) = alias {\n", "file_path": "src/model/yaml/seq.rs", "rank": 19, "score": 324947.4681621631 }, { "content": "fn compose_flow_multiline (model: &Model, mut value: PairsValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>, children: &mut [Rope]) -> Rope {\n\n let indent_len = value.styles.indent () as usize;\n\n let issue_tag = value.styles.issue_tag ();\n\n let alias = value.take_alias ();\n\n\n\n let mut rope_length = 3;\n\n\n\n if issue_tag { rope_length += 2; }\n\n if alias.is_some () { rope_length += 2; }\n\n\n\n for child in children.iter () {\n\n rope_length += child.len () + 1; // colon+space / comma+newline\n\n }\n\n\n\n let mut rope = Rope::with_capacity (rope_length);\n\n\n\n if issue_tag {\n\n rope.push (model_tag (model, tags));\n\n if let Some (alias) = alias {\n\n rope.push (Node::Space);\n", "file_path": "src/model/yaml/pairs.rs", "rank": 20, "score": 324947.4681621631 }, { "content": "fn compose_flow_no_threshold (model: &Model, mut value: SetValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>, children: &mut [Rope]) -> Rope {\n\n let indent_len = value.styles.indent () as usize;\n\n let compact = value.styles.compact ();\n\n let issue_tag = value.styles.issue_tag ();\n\n let alias = value.take_alias ();\n\n\n\n let mut rope_length = if compact { 2 } else { 3 };\n\n\n\n if issue_tag { rope_length += 2; }\n\n if alias.is_some () { rope_length += 2; }\n\n\n\n for child in children.iter () {\n\n rope_length += child.len () + 1; // comma/comma+space\n\n }\n\n\n\n let mut rope = Rope::with_capacity (rope_length);\n\n\n\n if issue_tag {\n\n rope.push (model_tag (model, tags));\n\n if let Some (alias) = alias {\n", "file_path": "src/model/yaml/set.rs", "rank": 21, "score": 324947.4681621631 }, { "content": "fn compose_flow_multiline (model: &Model, mut value: SetValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>, children: &mut [Rope]) -> Rope {\n\n let indent_len = value.styles.indent () as usize;\n\n let issue_tag = value.styles.issue_tag ();\n\n let alias = value.take_alias ();\n\n\n\n let mut rope_length = 3;\n\n\n\n if issue_tag { rope_length += 2; }\n\n if alias.is_some () { rope_length += 2; }\n\n\n\n for child in children.iter () {\n\n rope_length += child.len () + 1; // comma/comma+newline\n\n }\n\n\n\n let mut rope = Rope::with_capacity (rope_length);\n\n\n\n if issue_tag {\n\n rope.push (model_tag (model, tags));\n\n if let Some (alias) = alias {\n\n rope.push (Node::Space);\n", "file_path": "src/model/yaml/set.rs", "rank": 22, "score": 324947.4681621631 }, { "content": "fn compose_flow_multiline (model: &Model, mut value: MapValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>, children: &mut [Rope]) -> Rope {\n\n let indent_len = value.styles.indent () as usize;\n\n let issue_tag = value.styles.issue_tag ();\n\n let alias = value.take_alias ();\n\n\n\n let mut rope_length = 3;\n\n\n\n if issue_tag { rope_length += 2; }\n\n if alias.is_some () { rope_length += 2; }\n\n\n\n for child in children.iter () {\n\n rope_length += child.len () + 1; // colon+space / comma+newline\n\n }\n\n\n\n let mut rope = Rope::with_capacity (rope_length);\n\n\n\n if issue_tag {\n\n rope.push (model_tag (model, tags));\n\n if let Some (alias) = alias {\n\n rope.push (Node::Space);\n", "file_path": "src/model/yaml/map.rs", "rank": 23, "score": 324947.4681621631 }, { "content": "fn compose_empty (model: &Model, mut value: SeqValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>) -> Rope {\n\n if let Some (alias) = value.take_alias () {\n\n if value.styles.issue_tag () {\n\n Rope::from (vec! [model_tag (model, tags), Node::Space, model_alias (model, alias), Node::Space, Node::SquareBrackets])\n\n } else {\n\n Rope::from (vec! [model_alias (model, alias), Node::Space, Node::SquareBrackets])\n\n }\n\n } else {\n\n if value.styles.issue_tag () {\n\n Rope::from (vec! [model_tag (model, tags), Node::Space, Node::SquareBrackets])\n\n } else {\n\n Rope::from (Node::SquareBrackets)\n\n }\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/model/yaml/seq.rs", "rank": 24, "score": 321774.3524052104 }, { "content": "fn compose_empty (model: &Model, mut value: MapValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>) -> Rope {\n\n if let Some (alias) = value.take_alias () {\n\n if value.styles.issue_tag () {\n\n Rope::from (vec! [model_tag (model, tags), Node::Space, model_alias (model, alias), Node::Space, Node::CurlyBrackets])\n\n } else {\n\n Rope::from (vec! [model_alias (model, alias), Node::Space, Node::CurlyBrackets])\n\n }\n\n } else {\n\n if value.styles.issue_tag () {\n\n Rope::from (vec! [model_tag (model, tags), Node::Space, Node::CurlyBrackets])\n\n } else {\n\n Rope::from (Node::CurlyBrackets)\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "src/model/yaml/map.rs", "rank": 25, "score": 321774.3524052104 }, { "content": "fn compose_empty (model: &Model, mut value: PairsValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>) -> Rope {\n\n if let Some (alias) = value.take_alias () {\n\n if value.styles.issue_tag () {\n\n Rope::from (vec! [model_tag (model, tags), Node::Space, model_alias (model, alias), Node::Space, Node::SquareBrackets])\n\n } else {\n\n Rope::from (vec! [model_alias (model, alias), Node::Space, Node::SquareBrackets])\n\n }\n\n } else {\n\n if value.styles.issue_tag () {\n\n Rope::from (vec! [model_tag (model, tags), Node::Space, Node::SquareBrackets])\n\n } else {\n\n Rope::from(Node::SquareBrackets)\n\n }\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/model/yaml/pairs.rs", "rank": 26, "score": 321774.3524052104 }, { "content": "fn compose_empty (model: &Model, mut value: SetValue, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>) -> Rope {\n\n if let Some (alias) = value.take_alias () {\n\n if value.styles.issue_tag () {\n\n Rope::from (vec! [model_tag (model, tags), Node::Space, model_alias (model, alias), Node::Space, Node::CurlyBrackets])\n\n } else {\n\n Rope::from (vec! [model_alias (model, alias), Node::Space, Node::CurlyBrackets])\n\n }\n\n } else {\n\n if value.styles.issue_tag () {\n\n Rope::from (vec! [model_tag (model, tags), Node::Space, Node::CurlyBrackets])\n\n } else {\n\n Rope::from(Node::CurlyBrackets)\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "src/model/yaml/set.rs", "rank": 27, "score": 321774.3524052104 }, { "content": "fn _model_tag_string (tag: &str, tags: &mut Iterator<Item=&(Cow<'static, str>, Cow<'static, str>)>) -> Node {\n\n for &(ref shortcut, ref handle) in tags {\n\n let h = handle.as_ref ();\n\n if h.len () == 0 || h.contains (' ') { continue; }\n\n\n\n if tag.starts_with (h) {\n\n return match *shortcut {\n\n Cow::Borrowed (s) => {\n\n let f = s.as_bytes ();\n\n let l = tag[h.len () ..].as_bytes ();\n\n\n\n let mut string = Vec::with_capacity (f.len () + l.len ());\n\n string.extend (f);\n\n string.extend (l);\n\n Node::String (EncodedString::from (string))\n\n\n\n }\n\n Cow::Owned (ref s) => {\n\n let mut string = Vec::with_capacity (s.len () + (tag.len () - h.len ()));\n\n\n", "file_path": "src/model/mod.rs", "rank": 28, "score": 316804.75057305914 }, { "content": "pub fn apply_styles (tagged: &mut Tagged, styles: &mut [&mut Style]) {\n\n for style in styles {\n\n style.tagged_styles_apply (tagged);\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/orchestra/chord.rs", "rank": 29, "score": 207584.61471611806 }, { "content": "pub fn line_at<Reader: Read> (reader: &mut Reader, at: usize) -> usize {\n\n let mut scanned = at;\n\n loop {\n\n match reader.get_byte_at (scanned) {\n\n None |\n\n Some (b'\\n') |\n\n Some (b'\\r') => break,\n\n _ => scanned += 1\n\n };\n\n }\n\n scanned - at\n\n}\n\n\n\n\n\n\n\n\n", "file_path": "src/reader/tokenizer.rs", "rank": 30, "score": 173092.94386841904 }, { "content": "pub fn line<Reader: Read> (reader: &mut Reader) -> usize {\n\n let mut scanned = 0;\n\n loop {\n\n match reader.get_byte_at (scanned) {\n\n None |\n\n Some (b'\\n') |\n\n Some (b'\\r') => break,\n\n _ => scanned += 1\n\n };\n\n }\n\n scanned\n\n}\n\n\n\n\n\n\n", "file_path": "src/reader/tokenizer.rs", "rank": 31, "score": 170083.83097554918 }, { "content": "pub fn anchor_stops<Reader: Read> (reader: &mut Reader, at: usize) -> usize {\n\n let mut scanned = at;\n\n\n\n loop {\n\n match reader.get_byte_at (scanned) {\n\n None |\n\n Some (b' ') |\n\n Some (b'\\t') |\n\n Some (b'\\n') |\n\n Some (b'\\r') |\n\n Some (b'{') |\n\n Some (b'}') |\n\n Some (b'[') |\n\n Some (b']') => break,\n\n _ => scanned += 1\n\n };\n\n }\n\n\n\n scanned - at\n\n}\n\n\n\n\n\n\n", "file_path": "src/reader/tokenizer.rs", "rank": 32, "score": 168906.23453205446 }, { "content": "pub fn raw_stops<Reader: Read> (reader: &mut Reader) -> usize {\n\n let mut scanned = 0;\n\n\n\n loop {\n\n match reader.get_byte_at (scanned) {\n\n None |\n\n Some (b'\\n') |\n\n Some (b'\\r') |\n\n Some (b'{') |\n\n Some (b'}') |\n\n Some (b'[') |\n\n Some (b']') |\n\n Some (b'#') |\n\n Some (b':') |\n\n Some (b',') => break,\n\n _ => scanned += 1\n\n };\n\n }\n\n\n\n scanned\n\n}\n\n\n\n\n\n\n", "file_path": "src/reader/tokenizer.rs", "rank": 33, "score": 165640.28264790485 }, { "content": "#[inline (always)]\n\nfn is<T: BitAnd<Output=T> + Eq + Copy> (state: T, val: T) -> bool { val == state & val }\n\n\n", "file_path": "src/reader/reader.rs", "rank": 34, "score": 165567.60790742666 }, { "content": "pub fn scan_while_spaces_and_tabs<Reader: Read> (reader: &mut Reader, at: usize) -> usize {\n\n let mut scanned = at;\n\n\n\n loop {\n\n match reader.get_byte_at (scanned) {\n\n Some (b' ') |\n\n Some (b'\\t') => { scanned += 1; continue }\n\n _ => break\n\n }\n\n }\n\n\n\n scanned - at\n\n}\n\n\n\n\n\n\n", "file_path": "src/reader/tokenizer.rs", "rank": 35, "score": 164974.8721617015 }, { "content": "pub fn scan_single_quoted<R: Read> (reader: &mut R) -> usize {\n\n let mut scanned = if let Some (b'\\'') = reader.get_byte_at_start () {\n\n 1\n\n } else { return 0 };\n\n\n\n loop {\n\n match reader.get_byte_at (scanned) {\n\n None => break,\n\n Some (b'\\'') => match reader.get_byte_at (scanned + 1) {\n\n Some (b'\\'') => { scanned += 2 }\n\n _ => { scanned += 1; break }\n\n },\n\n _ => scanned += 1\n\n }\n\n }\n\n\n\n scanned\n\n}\n\n\n\n\n\n\n", "file_path": "src/reader/tokenizer.rs", "rank": 36, "score": 161497.75659105464 }, { "content": "pub fn scan_double_quoted<R: Read> (reader: &mut R) -> usize {\n\n let mut scanned = if let Some (b'\"') = reader.get_byte_at_start () {\n\n 1\n\n } else { return 0 };\n\n\n\n loop {\n\n match reader.get_byte_at (scanned) {\n\n None => break,\n\n Some (b'\"') => { scanned += 1; break },\n\n Some (b'\\\\') => match reader.get_byte_at (scanned + 1) {\n\n Some (b'\"') |\n\n Some (b'\\\\') => { scanned += 2; },\n\n None => { scanned += 1; break; }\n\n _ => { scanned += 1; }\n\n },\n\n _ => scanned += 1\n\n };\n\n }\n\n\n\n scanned\n\n}\n\n\n\n\n", "file_path": "src/reader/tokenizer.rs", "rank": 37, "score": 161497.7565910546 }, { "content": "pub fn scan_until_colon_and_line_breakers<Reader: Read> (reader: &mut Reader, at: usize) -> usize {\n\n let mut scanned = at;\n\n loop {\n\n match reader.get_byte_at (scanned) {\n\n None |\n\n Some (b':') |\n\n Some (b'\\n') |\n\n Some (b'\\r') => break,\n\n _ => scanned += 1\n\n };\n\n }\n\n scanned - at\n\n}\n\n\n\n\n\n\n", "file_path": "src/reader/tokenizer.rs", "rank": 38, "score": 161275.8474052527 }, { "content": "pub fn scan_one_line_breaker<Reader: Read> (reader: &mut Reader, at: usize) -> usize {\n\n match reader.get_byte_at (at) {\n\n Some (b'\\n') => 1,\n\n Some (b'\\r') => if let Some (b'\\n') = reader.get_byte_at (at + 1) { 2 } else { 1 },\n\n _ => 0\n\n }\n\n}\n\n\n\n\n\n\n\n\n", "file_path": "src/reader/tokenizer.rs", "rank": 39, "score": 161275.8474052527 }, { "content": "pub fn scan_until_question_and_line_breakers<Reader: Read> (reader: &mut Reader, at: usize) -> usize {\n\n let mut scanned = at;\n\n loop {\n\n match reader.get_byte_at (scanned) {\n\n None |\n\n Some (b'?') |\n\n Some (b'\\n') |\n\n Some (b'\\r') => break,\n\n _ => scanned += 1\n\n };\n\n }\n\n scanned - at\n\n}\n\n\n\n\n\n\n\n\n", "file_path": "src/reader/tokenizer.rs", "rank": 40, "score": 161275.8474052527 }, { "content": "pub fn scan_while_spaces_and_line_breakers<Reader: Read> (reader: &mut Reader, at: usize) -> usize {\n\n let mut scanned = at;\n\n\n\n loop {\n\n match reader.get_byte_at (scanned) {\n\n Some (b' ') => scanned += 1,\n\n Some (b'\\n') => scanned += 1,\n\n Some (b'\\t') => scanned += 1,\n\n Some (b'\\r') => scanned += if let Some (b'\\n') = reader.get_byte_at (scanned + 1) { 2 } else { 1 },\n\n _ => break\n\n };\n\n }\n\n\n\n scanned - at\n\n}\n\n\n\n\n\n\n", "file_path": "src/reader/tokenizer.rs", "rank": 41, "score": 161275.8474052527 }, { "content": "pub fn get_token<Reader: Read> (reader: &mut Reader) -> Option<(Token, usize, usize)> {\n\n match reader.get_byte_at_start () {\n\n None => return None,\n\n Some (b',') => return Some ((Token::Comma, 1, 1)),\n\n Some (b':') => return Some ((Token::Colon, 1, 1)),\n\n Some (b'{') => return Some ((Token::DictionaryStart, 1, 1)),\n\n Some (b'}') => return Some ((Token::DictionaryEnd, 1, 1)),\n\n Some (b'[') => return Some ((Token::SequenceStart, 1, 1)),\n\n Some (b']') => return Some ((Token::SequenceEnd, 1, 1)),\n\n Some (b'>') => return Some ((Token::GT, 1, 1)),\n\n Some (b'|') => return Some ((Token::Pipe, 1, 1)),\n\n Some (b'\"') => return Some ((Token::StringDouble, scan_double_quoted (reader), 1)),\n\n Some (b'\\'') => return Some ((Token::StringSingle, scan_single_quoted (reader), 1)),\n\n Some (b'#') => return Some ((Token::Comment, line_at (reader, 1) + 1, 1)),\n\n Some (b'*') => return Some ((Token::Alias, alias_stops (reader), 1)),\n\n Some (b'&') => return Some ((Token::Anchor, anchor_stops (reader, 1) + 1, 1)),\n\n Some (b'.') => {\n\n if let Some ((b'.', b'.')) = reader.get_bytes_2_at (1) { return Some ((Token::DocumentEnd, 3, 0)) };\n\n },\n\n\n", "file_path": "src/reader/tokenizer.rs", "rank": 42, "score": 158879.86459906332 }, { "content": "fn tag_flow_stops<Reader: Read> (reader: &mut Reader, at: usize) -> usize {\n\n let mut scanned = at;\n\n\n\n loop {\n\n match reader.get_byte_at (scanned) {\n\n None |\n\n Some (b' ') |\n\n Some (b'\\t') |\n\n Some (b'\\n') |\n\n Some (b'\\r') |\n\n Some (b'{') |\n\n Some (b'}') |\n\n Some (b'[') |\n\n Some (b']') |\n\n Some (b':') |\n\n Some (b',') => break,\n\n _ => scanned += 1\n\n };\n\n }\n\n\n\n scanned - at\n\n}\n\n\n\n\n\n\n", "file_path": "src/reader/tokenizer.rs", "rank": 43, "score": 158146.27592950643 }, { "content": "fn alias_stops<Reader: Read> (reader: &mut Reader) -> usize {\n\n let mut scanned = 0;\n\n\n\n loop {\n\n match reader.get_byte_at (scanned) {\n\n None => break,\n\n\n\n Some (b' ') |\n\n Some (b'\\t') |\n\n Some (b'\\n') |\n\n Some (b'\\r') |\n\n Some (b'{') |\n\n Some (b'}') |\n\n Some (b'[') |\n\n Some (b']') => break,\n\n\n\n Some (b':') => match reader.get_byte_at (scanned + 1) {\n\n Some (b' ') |\n\n Some (b'\\t') |\n\n Some (b'\\n') |\n", "file_path": "src/reader/tokenizer.rs", "rank": 44, "score": 158096.36410824515 }, { "content": "pub fn scan_one_spaces_and_line_breakers<Reader: Read> (reader: &mut Reader, at: usize) -> usize {\n\n match reader.get_byte_at (at) {\n\n Some (b' ') => 1,\n\n Some (b'\\n') => 1,\n\n Some (b'\\t') => 1,\n\n Some (b'\\r') => if let Some (b'\\n') = reader.get_byte_at (at + 1) { 2 } else { 1 },\n\n _ => 0\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/reader/tokenizer.rs", "rank": 45, "score": 157788.86621832446 }, { "content": "pub fn scan_one_colon_and_line_breakers<Reader: Read> (reader: &mut Reader, at: usize) -> usize {\n\n match reader.get_byte_at (at) {\n\n Some (b':') => 1,\n\n Some (b'\\n') => 1,\n\n Some (b'\\r') => {\n\n if let Some (b'\\n') = reader.get_byte_at (at + 1) { 2 } else { 1 }\n\n }\n\n _ => 0\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/reader/tokenizer.rs", "rank": 46, "score": 157788.86621832446 }, { "content": "#[inline (always)]\n\nfn on<T: BitOr<Output=T> + Copy> (state: &mut T, val: T) { *state = *state | val; }\n\n\n\n#[inline (always)]\n", "file_path": "src/reader/reader.rs", "rank": 47, "score": 157187.68942518308 }, { "content": "#[inline (always)]\n\nfn not<T: BitAnd<Output=T> + Eq + Copy> (state: T, val: T) -> bool { !is (state, val) }\n\n\n", "file_path": "src/reader/reader.rs", "rank": 48, "score": 155542.01877051685 }, { "content": "pub trait Tagged : Any {\n\n fn get_tag (&self) -> Cow<'static, str>;\n\n\n\n fn as_any (&self) -> &Any;\n\n\n\n fn as_mut_any (&mut self) -> &mut Any;\n\n}\n\n\n\n\n\n\n\n\n", "file_path": "src/model/mod.rs", "rank": 49, "score": 144544.3053263571 }, { "content": "// fn off<T: BitAnd<Output=T> + Eq + BitXor<Output=T> + Copy> (state: &mut T, val: T) { *state = *state ^ (val & *state) }\n\nfn off<T: BitAnd<Output=T> + Not<Output=T> + Eq + Copy> (state: &mut T, val: T) { *state = *state & !val }\n\n\n\n\n\n\n\n#[derive (Debug)]\n\npub struct ReadError {\n\n pub position: usize,\n\n pub description: Cow<'static, str>\n\n}\n\n\n\n\n\n\n\nimpl fmt::Display for ReadError {\n\n fn fmt (&self, fmtter: &mut fmt::Formatter) -> fmt::Result {\n\n write! (fmtter, \"{}\", self.description)\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/reader/reader.rs", "rank": 50, "score": 141676.78998884096 }, { "content": "pub trait Model : Send + Sync {\n\n // type Char: CopySymbol;\n\n // type DoubleChar: CopySymbol + Combo;\n\n\n\n fn get_tag (&self) -> Cow<'static, str>;\n\n\n\n fn as_any (&self) -> &Any;\n\n\n\n fn as_mut_any (&mut self) -> &mut Any;\n\n\n\n\n\n // fn get_encoding (&self) -> Encoding;\n\n\n\n\n\n fn is_collection (&self) -> bool { self.is_dictionary () || self.is_sequence () }\n\n\n\n fn is_sequence (&self) -> bool { false }\n\n\n\n fn is_dictionary (&self) -> bool { false }\n\n\n", "file_path": "src/model/mod.rs", "rank": 51, "score": 128613.53505760942 }, { "content": "\n\n TaggedValue::Other (_, ref mut value) => value\n\n }\n\n }\n\n}\n\n\n\n\n\n\n\nmacro_rules! impl_from_into {\n\n ( $constructor:path => $value:ty ) => {\n\n impl From<$value> for TaggedValue {\n\n fn from (value: $value) -> Self { $constructor (value) }\n\n }\n\n\n\n\n\n impl Into<Result<$value, TaggedValue>> for TaggedValue {\n\n fn into (self) -> Result<$value, Self> {\n\n match self {\n\n $constructor (v) => Ok (v),\n\n _ => Err (self)\n", "file_path": "src/model/tagged_value.rs", "rank": 52, "score": 126376.58283721741 }, { "content": "\n\n\n\nuse std::any::Any;\n\nuse std::borrow::Cow;\n\n\n\n\n\n\n\n#[derive (Debug)]\n\npub enum TaggedValue {\n\n Binary (BinaryValue),\n\n Bool (BoolValue),\n\n Float (FloatValue),\n\n Int (IntValue),\n\n Map (MapValue),\n\n Merge (MergeValue),\n\n Null (NullValue),\n\n Omap (OmapValue),\n\n Pairs (PairsValue),\n\n Seq (SeqValue),\n\n Set (SetValue),\n", "file_path": "src/model/tagged_value.rs", "rank": 53, "score": 126373.31842730497 }, { "content": " Str (StrValue),\n\n Timestamp (TimestampValue),\n\n Value (ValueValue),\n\n Yaml (YamlValue),\n\n\n\n Literal (LiteralValue),\n\n Incognitum (IncognitumValue),\n\n\n\n Other (Cow<'static, str>, Box<Any + Send>)\n\n}\n\n\n\n\n\n\n\nimpl TaggedValue {\n\n pub fn new (tag: Cow<'static, str>, value: Box<Any + Send>) -> Self {\n\n TaggedValue::Other (tag, value)\n\n }\n\n\n\n\n\n pub fn get_boxed (self) -> Box<Any + Send> {\n", "file_path": "src/model/tagged_value.rs", "rank": 54, "score": 126372.7153958452 }, { "content": "use model::Tagged;\n\n\n\nuse model::yaml::binary::BinaryValue;\n\nuse model::yaml::bool::BoolValue;\n\nuse model::yaml::float::FloatValue;\n\nuse model::yaml::int::IntValue;\n\nuse model::yaml::map::MapValue;\n\nuse model::yaml::merge::MergeValue;\n\nuse model::yaml::null::NullValue;\n\nuse model::yaml::omap::OmapValue;\n\nuse model::yaml::pairs::PairsValue;\n\nuse model::yaml::seq::SeqValue;\n\nuse model::yaml::set::SetValue;\n\nuse model::yaml::str::StrValue;\n\nuse model::yaml::timestamp::TimestampValue;\n\nuse model::yaml::value::ValueValue;\n\nuse model::yaml::yaml::YamlValue;\n\n\n\nuse model::yamlette::literal::LiteralValue;\n\nuse model::yamlette::incognitum::IncognitumValue;\n", "file_path": "src/model/tagged_value.rs", "rank": 55, "score": 126370.67785667023 }, { "content": " }\n\n }\n\n }\n\n };\n\n}\n\n\n\n\n\nimpl_from_into! ( TaggedValue::Binary => BinaryValue );\n\nimpl_from_into! ( TaggedValue::Bool => BoolValue );\n\nimpl_from_into! ( TaggedValue::Float => FloatValue );\n\nimpl_from_into! ( TaggedValue::Int => IntValue );\n\nimpl_from_into! ( TaggedValue::Map => MapValue );\n\nimpl_from_into! ( TaggedValue::Merge => MergeValue );\n\nimpl_from_into! ( TaggedValue::Null => NullValue );\n\nimpl_from_into! ( TaggedValue::Omap => OmapValue );\n\nimpl_from_into! ( TaggedValue::Pairs => PairsValue );\n\nimpl_from_into! ( TaggedValue::Seq => SeqValue );\n\nimpl_from_into! ( TaggedValue::Set => SetValue );\n\nimpl_from_into! ( TaggedValue::Str => StrValue );\n\nimpl_from_into! ( TaggedValue::Timestamp => TimestampValue );\n\nimpl_from_into! ( TaggedValue::Value => ValueValue );\n\nimpl_from_into! ( TaggedValue::Yaml => YamlValue );\n\nimpl_from_into! ( TaggedValue::Literal => LiteralValue );\n\nimpl_from_into! ( TaggedValue::Incognitum => IncognitumValue );\n", "file_path": "src/model/tagged_value.rs", "rank": 56, "score": 126368.69711240227 }, { "content": " TaggedValue::Other (_, b) => b\n\n }\n\n }\n\n}\n\n\n\n\n\n\n\nimpl Tagged for TaggedValue {\n\n fn get_tag (&self) -> Cow<'static, str> {\n\n match *self {\n\n TaggedValue::Binary (ref v) => v.get_tag (),\n\n TaggedValue::Bool (ref v) => v.get_tag (),\n\n TaggedValue::Float (ref v) => v.get_tag (),\n\n TaggedValue::Int (ref v) => v.get_tag (),\n\n TaggedValue::Map (ref v) => v.get_tag (),\n\n TaggedValue::Merge (ref v) => v.get_tag (),\n\n TaggedValue::Null (ref v) => v.get_tag (),\n\n TaggedValue::Omap (ref v) => v.get_tag (),\n\n TaggedValue::Pairs (ref v) => v.get_tag (),\n\n TaggedValue::Seq (ref v) => v.get_tag (),\n", "file_path": "src/model/tagged_value.rs", "rank": 57, "score": 126368.13400587256 }, { "content": " fn as_mut_any (&mut self) -> &mut Any {\n\n match *self {\n\n TaggedValue::Binary (ref mut v) => v.as_mut_any (),\n\n TaggedValue::Bool (ref mut v) => v.as_mut_any (),\n\n TaggedValue::Float (ref mut v) => v.as_mut_any (),\n\n TaggedValue::Int (ref mut v) => v.as_mut_any (),\n\n TaggedValue::Map (ref mut v) => v.as_mut_any (),\n\n TaggedValue::Merge (ref mut v) => v.as_mut_any (),\n\n TaggedValue::Null (ref mut v) => v.as_mut_any (),\n\n TaggedValue::Omap (ref mut v) => v.as_mut_any (),\n\n TaggedValue::Pairs (ref mut v) => v.as_mut_any (),\n\n TaggedValue::Seq (ref mut v) => v.as_mut_any (),\n\n TaggedValue::Set (ref mut v) => v.as_mut_any (),\n\n TaggedValue::Str (ref mut v) => v.as_mut_any (),\n\n TaggedValue::Timestamp (ref mut v) => v.as_mut_any (),\n\n TaggedValue::Value (ref mut v) => v.as_mut_any (),\n\n TaggedValue::Yaml (ref mut v) => v.as_mut_any (),\n\n\n\n TaggedValue::Literal (ref mut v) => v.as_mut_any (),\n\n TaggedValue::Incognitum (ref mut v) => v.as_mut_any (),\n", "file_path": "src/model/tagged_value.rs", "rank": 58, "score": 126366.81003355622 }, { "content": " TaggedValue::Set (ref v) => v.get_tag (),\n\n TaggedValue::Str (ref v) => v.get_tag (),\n\n TaggedValue::Timestamp (ref v) => v.get_tag (),\n\n TaggedValue::Value (ref v) => v.get_tag (),\n\n TaggedValue::Yaml (ref v) => v.get_tag (),\n\n\n\n TaggedValue::Literal (ref v) => v.get_tag (),\n\n TaggedValue::Incognitum (ref v) => Tagged::get_tag (v),\n\n\n\n TaggedValue::Other (ref tag, _) => tag.clone ()\n\n }\n\n }\n\n\n\n\n\n fn as_any (&self) -> &Any {\n\n match *self {\n\n TaggedValue::Binary (ref v) => v.as_any (),\n\n TaggedValue::Bool (ref v) => v.as_any (),\n\n TaggedValue::Float (ref v) => v.as_any (),\n\n TaggedValue::Int (ref v) => v.as_any (),\n", "file_path": "src/model/tagged_value.rs", "rank": 59, "score": 126363.11652458276 }, { "content": " match self {\n\n TaggedValue::Binary (v) => Box::new (v),\n\n TaggedValue::Bool (v) => Box::new (v),\n\n TaggedValue::Float (v) => Box::new (v),\n\n TaggedValue::Int (v) => Box::new (v),\n\n TaggedValue::Map (v) => Box::new (v),\n\n TaggedValue::Merge (v) => Box::new (v),\n\n TaggedValue::Null (v) => Box::new (v),\n\n TaggedValue::Omap (v) => Box::new (v),\n\n TaggedValue::Pairs (v) => Box::new (v),\n\n TaggedValue::Seq (v) => Box::new (v),\n\n TaggedValue::Set (v) => Box::new (v),\n\n TaggedValue::Str (v) => Box::new (v),\n\n TaggedValue::Timestamp (v) => Box::new (v),\n\n TaggedValue::Value (v) => Box::new (v),\n\n TaggedValue::Yaml (v) => Box::new (v),\n\n\n\n TaggedValue::Literal (v) => Box::new (v),\n\n TaggedValue::Incognitum (v) => Box::new (v),\n\n\n", "file_path": "src/model/tagged_value.rs", "rank": 60, "score": 126361.3730916838 }, { "content": " TaggedValue::Map (ref v) => v.as_any (),\n\n TaggedValue::Merge (ref v) => v.as_any (),\n\n TaggedValue::Null (ref v) => v.as_any (),\n\n TaggedValue::Omap (ref v) => v.as_any (),\n\n TaggedValue::Pairs (ref v) => v.as_any (),\n\n TaggedValue::Seq (ref v) => v.as_any (),\n\n TaggedValue::Set (ref v) => v.as_any (),\n\n TaggedValue::Str (ref v) => v.as_any (),\n\n TaggedValue::Timestamp (ref v) => v.as_any (),\n\n TaggedValue::Value (ref v) => v.as_any (),\n\n TaggedValue::Yaml (ref v) => v.as_any (),\n\n\n\n TaggedValue::Literal (ref v) => v.as_any (),\n\n TaggedValue::Incognitum (ref v) => v.as_any (),\n\n\n\n TaggedValue::Other (_, ref value) => value\n\n }\n\n }\n\n\n\n\n", "file_path": "src/model/tagged_value.rs", "rank": 61, "score": 126357.70987441382 }, { "content": "#[test]\n\nfn example_02_01_block_tagged_no_tagged () {\n\n let should_be = \n\nr#\"!!seq\n\n- Mark McGwire\n\n- !!str \"Sammy Sosa\"\n\n- Ken Griffey\n\n\"#;\n\n\n\n let mark = \"Mark McGwire\";\n\n let sammy = \"Sammy Sosa\";\n\n let ken = \"Ken Griffey\";\n\n\n\n let orc = get_orc ();\n\n\n\n use yamlette::model::style::{ IssueTag };\n\n use yamlette::model::yaml::str::{ ForceQuotes, PreferDoubleQuotes };\n\n\n\n\n\n yamlette_compose! ( orchestra ; orc ; [[ ( # IssueTag (true) => [ # IssueTag (false) =>\n\n mark,\n", "file_path": "tests/examples/orchestra.rs", "rank": 62, "score": 119472.31212177276 }, { "content": "#[test]\n\nfn example_02_01_block_tagged () {\n\n let should_be = \n\nr#\"!!seq\n\n- !!str Mark McGwire\n\n- !!str Sammy Sosa\n\n- !!str Ken Griffey\n\n\"#;\n\n\n\n let mark = \"Mark McGwire\";\n\n let sammy = \"Sammy Sosa\";\n\n let ken = \"Ken Griffey\";\n\n\n\n let orc = get_orc ();\n\n\n\n use yamlette::model::style::{ IssueTag };\n\n\n\n yamlette_compose! ( orchestra ; orc ; [[ # IssueTag (true) => [ mark, sammy, ken ] ]] );\n\n\n\n let maybe_music = orc.listen ();\n\n\n\n let result = unsafe { String::from_utf8_unchecked (maybe_music.ok ().unwrap ()) };\n\n\n\n assert_eq! (should_be, result);\n\n}\n\n\n\n\n", "file_path": "tests/examples/orchestra.rs", "rank": 63, "score": 109577.89037187006 }, { "content": "pub trait Style {\n\n fn common_styles_apply (&mut self, _: &mut CommonStyles) {}\n\n\n\n fn tagged_styles_apply (&mut self, _: &mut Tagged) {}\n\n}\n\n\n\n\n\n\n\nimpl<'a, 'b> Style for &'a mut [&'b mut Style] {\n\n fn common_styles_apply (&mut self, styles: &mut CommonStyles) { for style in self.into_iter () { style.common_styles_apply (styles); } }\n\n\n\n fn tagged_styles_apply (&mut self, value: &mut Tagged) { for style in self.into_iter () { style.tagged_styles_apply (value); } }\n\n}\n\n\n\n\n\n\n\n\n\n\n\n#[derive (Copy, Clone, Debug)]\n\npub struct CommonStyles {\n", "file_path": "src/model/style.rs", "rank": 64, "score": 107715.311023988 }, { "content": "#[test]\n\nfn example_02_01_block_dir_tags_01 () {\n\n let should_be = \n", "file_path": "tests/examples/orchestra.rs", "rank": 65, "score": 106291.18349177085 }, { "content": "pub trait Unicode {\n\n fn char_max_bytes_len (self) -> u8;\n\n\n\n fn check_bom (self, bom: &[u8]) -> bool;\n\n\n\n unsafe fn to_unicode_ptr (self, ptr: *const u8, len: usize) -> (u32, u8);\n\n\n\n fn to_unicode (self, stream: &[u8]) -> (u32, u8);\n\n\n\n fn from_unicode (self, point: u32) -> [u8; 5];\n\n\n\n fn check_is_dec_num (self, stream: &[u8]) -> bool;\n\n\n\n fn check_is_flo_num (self, stream: &[u8]) -> bool;\n\n\n\n fn extract_bin_digit (self, &[u8]) -> Option<(u8, u8)>;\n\n\n\n fn extract_dec_digit (self, &[u8]) -> Option<(u8, u8)>;\n\n\n\n fn extract_oct_digit (self, &[u8]) -> Option<(u8, u8)>;\n", "file_path": "src/txt/encoding/unicode.rs", "rank": 66, "score": 105189.39335935068 }, { "content": "pub trait Schema: Send + Sync {\n\n fn get_common_styles (&self) -> CommonStyles;\n\n\n\n fn get_yaml_version (&self) -> (u8, u8);\n\n\n\n fn get_tag_handles (&self) -> &[(Cow<'static, str>, Cow<'static, str>)];\n\n\n\n fn look_up_model<'a, 'b> (&'a self, &'b str) -> Option<&'a Model>;\n\n\n\n fn try_decodable_models (&self, &[u8]) -> Option<TaggedValue>;\n\n\n\n fn try_decodable_models_11 (&self, &[u8]) -> Option<TaggedValue>;\n\n\n\n fn look_up_model_callback (&self, &mut (FnMut (&Model) -> bool)) -> Option<&Model>;\n\n\n\n fn get_metamodel (&self) -> Option<&Model>;\n\n\n\n fn get_model_literal (&self) -> Literal;\n\n\n\n fn get_model_null (&self) -> Null;\n\n\n\n fn get_tag_model_map (&self) -> Cow<'static, str>;\n\n\n\n fn get_tag_model_seq (&self) -> Cow<'static, str>;\n\n}\n", "file_path": "src/model/schema/mod.rs", "rank": 67, "score": 91496.19861208822 }, { "content": "extern crate skimmer;\n\n\n\n// use self::skimmer::symbol::CopySymbol;\n\n// use txt::CharSet;\n\n\n\nuse std::ptr;\n\n\n\n\n\n\n\n#[derive (Debug, Clone)]\n\npub enum EncodedString {\n\n Static (&'static [u8]),\n\n String (Vec<u8>)\n\n}\n\n\n\n\n\nimpl EncodedString {\n\n pub fn len (&self) -> usize { match *self {\n\n EncodedString::Static (s) => s.len (),\n\n EncodedString::String (ref v) => v.len ()\n", "file_path": "src/model/renderer.rs", "rank": 68, "score": 88291.18953276148 }, { "content": " } }\n\n\n\n pub fn as_ptr (&self) -> *const u8 { match *self {\n\n EncodedString::Static (s) => s.as_ptr (),\n\n EncodedString::String (ref v) => v.as_ptr ()\n\n } }\n\n}\n\n\n\n\n\nimpl From<&'static [u8]> for EncodedString {\n\n fn from (val: &'static [u8]) -> EncodedString { EncodedString::Static (val) }\n\n}\n\n\n\n\n\nimpl From<Vec<u8>> for EncodedString {\n\n fn from (val: Vec<u8>) -> EncodedString { EncodedString::String (val) }\n\n}\n\n\n\n\n\n\n", "file_path": "src/model/renderer.rs", "rank": 69, "score": 88266.3928758338 }, { "content": " Node::DoubleQuotedString (_) => true,\n\n _ => false\n\n }\n\n }\n\n\n\n pub fn is_flow_dict_opening (&self) -> bool {\n\n match *self {\n\n Node::CurlyBrackets |\n\n Node::CurlyBracketOpen => true,\n\n _ => false\n\n }\n\n }\n\n}\n\n\n\n\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Renderer;\n\n\n\n\n", "file_path": "src/model/renderer.rs", "rank": 70, "score": 88265.77717339057 }, { "content": "extern crate skimmer;\n\n\n\n// use self::skimmer::symbol::CopySymbol;\n\n\n\n\n\nuse std::mem;\n\n\n\n\n\nuse model::renderer::{ Renderer, Node };\n\n\n\n\n\n\n\n#[derive (Debug)]\n\npub enum Rope {\n\n Empty,\n\n Node ([Node; 1]),\n\n Many (Vec<Node>)\n\n}\n\n\n\n\n", "file_path": "src/model/rope.rs", "rank": 71, "score": 88264.52941578567 }, { "content": "impl Renderer {\n\n pub fn render_into_vec (&self, vec: &mut Vec<u8>, node: Node) {\n\n let node_len = self.node_len (&node);\n\n let vec_len = vec.len ();\n\n vec.reserve (node_len);\n\n\n\n unsafe {\n\n vec.set_len (vec_len + node_len);\n\n let ptr = vec.as_mut_ptr ().offset (vec_len as isize);\n\n self.render_onto_ptr (ptr, &node);\n\n }\n\n }\n\n\n\n\n\n pub fn node_len (&self, node: &Node) -> usize {\n\n match *node {\n\n Node::Empty => 0,\n\n\n\n Node::Indent (size) => size,\n\n Node::NewlineIndent (size) => 1 + size,\n", "file_path": "src/model/renderer.rs", "rank": 72, "score": 88261.31251038105 }, { "content": " Node::QuestionNewlineIndent (size) => 2 + size,\n\n Node::ColonNewlineIndent (size) => 2 + size,\n\n\n\n Node::TripleHyphenNewline => 3 + 1,\n\n Node::TripleDotNewline => 3 + 1\n\n }\n\n }\n\n\n\n\n\n pub unsafe fn render_onto_ptr (&self, mut dst_ptr: *mut u8, node: &Node) -> *mut u8 {\n\n match *node {\n\n Node::Empty => (),\n\n\n\n // Node::Indent (size) => { dst_ptr = self.space.copy_to_ptr_times (dst_ptr, size); }\n\n Node::Indent (size) => { dst_ptr = copy_to_ptr_times (b' ', dst_ptr, size); }\n\n\n\n Node::NewlineIndent (size) => {\n\n // dst_ptr = self.newline.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'\\n', dst_ptr);\n\n // dst_ptr = self.space.copy_to_ptr_times (dst_ptr, size);\n", "file_path": "src/model/renderer.rs", "rank": 73, "score": 88258.08531090303 }, { "content": "#[derive (Debug)]\n\npub enum Node {\n\n Empty,\n\n\n\n StringSpecificTag (EncodedString),\n\n\n\n String (EncodedString),\n\n SingleQuotedString (EncodedString),\n\n DoubleQuotedString (EncodedString),\n\n\n\n StringConcat (EncodedString, EncodedString),\n\n\n\n StringNewline (EncodedString),\n\n\n\n AmpersandString (EncodedString),\n\n AsteriskString (EncodedString),\n\n\n\n Indent (usize),\n\n NewlineIndent (usize),\n\n CommaNewlineIndent (usize),\n", "file_path": "src/model/renderer.rs", "rank": 74, "score": 88252.38269068686 }, { "content": "\n\n match *self {\n\n Rope::Many (ref mut vec) => {\n\n match *rope {\n\n Rope::Empty => (),\n\n Rope::Node (ref mut node) => {\n\n let node = mem::replace (&mut node[0], Node::Empty);\n\n vec.push (node);\n\n }\n\n Rope::Many (ref mut other) => vec.append (other)\n\n };\n\n\n\n mem::replace (rope, Rope::Empty);\n\n }\n\n _ => unreachable! ()\n\n }\n\n }\n\n\n\n\n\n pub fn unrope<'a, 'b, 'c> (&'a self, ptr: &'b mut *const [Node], renderer: &'c Renderer, index: usize, threshold: usize) -> (usize, usize, bool) {\n", "file_path": "src/model/rope.rs", "rank": 75, "score": 88250.91127751637 }, { "content": "\n\n\n\n#[inline (always)]\n\nunsafe fn copy_to_ptr (byte: u8, dst: *mut u8) -> *mut u8 {\n\n *dst = byte;\n\n dst.offset (1)\n\n}\n\n\n\n\n\n\n\n#[inline (always)]\n\nunsafe fn copy_to_ptr_times (byte: u8, mut dst: *mut u8, times: usize) -> *mut u8 {\n\n for _ in 0 .. times {\n\n *dst = byte;\n\n dst = dst.offset (1);\n\n }\n\n dst\n\n}\n", "file_path": "src/model/renderer.rs", "rank": 76, "score": 88249.90296635499 }, { "content": " }\n\n }\n\n\n\n\n\n pub fn render (self, renderer: &Renderer) -> Vec<u8> {\n\n let mut vec: Vec<u8> = Vec::with_capacity (self.bytes_len (renderer));\n\n\n\n match self {\n\n Rope::Empty => (),\n\n Rope::Node (mut node) => renderer.render_into_vec (&mut vec, mem::replace (&mut node[0], Node::Empty)),\n\n Rope::Many (nodes) => for node in nodes { renderer.render_into_vec (&mut vec, node); }\n\n }\n\n\n\n vec\n\n }\n\n\n\n\n\n pub fn push (&mut self, node: Node) {\n\n let is_many = match *self {\n\n Rope::Many (_) => true,\n", "file_path": "src/model/rope.rs", "rank": 77, "score": 88248.85552585083 }, { "content": " pub fn indent (&mut self, len: usize) {\n\n match *self {\n\n Node::Indent (ref mut size) => { *size += len; }\n\n Node::NewlineIndent (ref mut size) => { *size += len; }\n\n Node::CommaNewlineIndent (ref mut size) => { *size += len; }\n\n Node::IndentHyphenSpace (ref mut size) => { *size += len; }\n\n Node::NewlineIndentHyphenSpace (ref mut size) => { *size += len; }\n\n Node::IndentQuestionSpace (ref mut size) => { *size += len; }\n\n Node::NewlineIndentQuestionSpace (ref mut size) => { *size += len; }\n\n Node::QuestionNewlineIndent (ref mut size) => { *size += len; }\n\n Node::ColonNewlineIndent (ref mut size) => { *size += len; }\n\n _ => ()\n\n }\n\n }\n\n\n\n pub fn is_newline (&self) -> bool {\n\n match *self {\n\n Node::Newline |\n\n Node::ColonNewline |\n\n Node::ColonNewlineIndent (_) |\n", "file_path": "src/model/renderer.rs", "rank": 78, "score": 88247.77779465093 }, { "content": " Node::QuestionNewline |\n\n Node::QuestionNewlineIndent (_) |\n\n Node::TripleDotNewline |\n\n Node::StringNewline (_) |\n\n Node::TripleHyphenNewline |\n\n Node::NewlineIndent (_) |\n\n Node::CommaNewlineIndent (_) |\n\n Node::NewlineIndentHyphenSpace (_) => true,\n\n Node::NewlineIndentQuestionSpace (_) => true,\n\n _ => false\n\n }\n\n }\n\n\n\n pub fn is_flow_opening (&self) -> bool {\n\n match *self {\n\n Node::CurlyBrackets |\n\n Node::CurlyBracketOpen |\n\n Node::SquareBrackets |\n\n Node::SquareBracketOpen |\n\n Node::SingleQuotedString (_) |\n", "file_path": "src/model/renderer.rs", "rank": 79, "score": 88243.88897278508 }, { "content": " Node::Question => { dst_ptr = copy_to_ptr (b'?', dst_ptr); }\n\n // Node::Comma => { dst_ptr = self.comma.copy_to_ptr (dst_ptr); }\n\n Node::Comma => { dst_ptr = copy_to_ptr (b',', dst_ptr); }\n\n // Node::Colon => { dst_ptr = self.colon.copy_to_ptr (dst_ptr); }\n\n Node::Colon => { dst_ptr = copy_to_ptr (b':', dst_ptr); }\n\n // Node::Space => { dst_ptr = self.space.copy_to_ptr (dst_ptr); }\n\n Node::Space => { dst_ptr = copy_to_ptr (b' ', dst_ptr); }\n\n // Node::Newline => { dst_ptr = self.newline.copy_to_ptr (dst_ptr); }\n\n Node::Newline => { dst_ptr = copy_to_ptr (b'\\n', dst_ptr); }\n\n\n\n\n\n Node::TripleHyphenNewline => {\n\n // dst_ptr = self.hyphen.copy_to_ptr_times (dst_ptr, 3);\n\n dst_ptr = copy_to_ptr_times (b'-', dst_ptr, 3);\n\n // dst_ptr = self.newline.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'\\n', dst_ptr);\n\n }\n\n\n\n Node::TripleDotNewline => {\n\n // dst_ptr = self.dot.copy_to_ptr_times (dst_ptr, 3);\n", "file_path": "src/model/renderer.rs", "rank": 80, "score": 88243.29347562049 }, { "content": " dst_ptr = copy_to_ptr_times (b'.', dst_ptr, 3);\n\n // dst_ptr = self.newline.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'\\n', dst_ptr);\n\n }\n\n\n\n\n\n Node::StringSpecificTag (ref vec) => {\n\n // dst_ptr = self.exclamation.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'!', dst_ptr);\n\n // dst_ptr = self.lt.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'<', dst_ptr);\n\n let len = vec.len ();\n\n ptr::copy_nonoverlapping (vec.as_ptr (), dst_ptr, len);\n\n dst_ptr = dst_ptr.offset (len as isize);\n\n // dst_ptr = self.gt.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'>', dst_ptr);\n\n }\n\n\n\n\n\n Node::StringConcat (ref former, ref latter) => {\n", "file_path": "src/model/renderer.rs", "rank": 81, "score": 88243.28566534293 }, { "content": " // Node::SquareBracketClose => { dst_ptr = self.square_bracket_close.copy_to_ptr (dst_ptr); }\n\n Node::SquareBracketClose => { dst_ptr = copy_to_ptr (b']', dst_ptr); }\n\n\n\n\n\n Node::CurlyBrackets => {\n\n // dst_ptr = self.curly_bracket_open.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'{', dst_ptr);\n\n // dst_ptr = self.curly_bracket_close.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'}', dst_ptr);\n\n }\n\n // Node::CurlyBracketOpen => { dst_ptr = self.curly_bracket_open.copy_to_ptr (dst_ptr); }\n\n Node::CurlyBracketOpen => { dst_ptr = copy_to_ptr (b'{', dst_ptr); }\n\n // Node::CurlyBracketClose => { dst_ptr = self.curly_bracket_close.copy_to_ptr (dst_ptr); }\n\n Node::CurlyBracketClose => { dst_ptr = copy_to_ptr (b'}', dst_ptr); }\n\n\n\n // Node::Hyphen => { dst_ptr = self.hyphen.copy_to_ptr (dst_ptr); }\n\n Node::Hyphen => { dst_ptr = copy_to_ptr (b'-', dst_ptr); }\n\n // Node::Dot => { dst_ptr = self.dot.copy_to_ptr (dst_ptr); }\n\n Node::Dot => { dst_ptr = copy_to_ptr (b'.', dst_ptr); }\n\n // Node::Question => { dst_ptr = self.question.copy_to_ptr (dst_ptr); }\n", "file_path": "src/model/renderer.rs", "rank": 82, "score": 88243.04239650493 }, { "content": " }\n\n\n\n\n\n pub fn indent (&mut self, len: usize) {\n\n match *self {\n\n Rope::Empty => (),\n\n Rope::Node (ref mut node) => node[0].indent (len),\n\n Rope::Many (ref mut nodes) => for node in nodes { node.indent (len); }\n\n }\n\n }\n\n\n\n\n\n pub fn knit (&mut self, rope: &mut Rope) {\n\n let is_empty = match *self {\n\n Rope::Empty => true,\n\n _ => false\n\n };\n\n\n\n if is_empty {\n\n let ro = mem::replace (rope, Rope::Empty);\n", "file_path": "src/model/rope.rs", "rank": 83, "score": 88242.89293657178 }, { "content": "\n\nimpl Rope {\n\n pub fn with_capacity (size: usize) -> Rope { Rope::Many (Vec::with_capacity (size)) }\n\n\n\n pub fn clear (&mut self) {\n\n *self = match *self {\n\n Rope::Empty => Rope::Empty,\n\n Rope::Node (_) => Rope::Empty,\n\n Rope::Many (ref mut vec) => {\n\n let mut v = mem::replace (vec, Vec::new ());\n\n v.clear ();\n\n Rope::Many (v)\n\n }\n\n };\n\n }\n\n\n\n pub fn len (&self) -> usize {\n\n match *self {\n\n Rope::Empty => 0,\n\n Rope::Node (_) => 1,\n", "file_path": "src/model/rope.rs", "rank": 84, "score": 88242.58188857377 }, { "content": " Rope::Empty => false,\n\n Rope::Node (ref nodes) => nodes[0].is_flow_dict_opening (),\n\n Rope::Many (ref nodes) => nodes.len () > 0 && nodes[0].is_flow_dict_opening ()\n\n }\n\n }\n\n\n\n\n\n pub fn last_line_bytes_len (&self, renderer: &Renderer) -> (usize, bool) {\n\n match *self {\n\n Rope::Empty => (0, false),\n\n Rope::Node (ref nodes) => self._line_bytes_len (renderer, nodes.iter ()),\n\n Rope::Many (ref nodes) => self._line_bytes_len (renderer, nodes.iter ().rev ())\n\n }\n\n }\n\n\n\n\n\n pub fn first_line_bytes_len (&self, renderer: &Renderer) -> (usize, bool) {\n\n match *self {\n\n Rope::Empty => (0, false),\n\n Rope::Node (ref nodes) => self._line_bytes_len (renderer, nodes.iter ()),\n", "file_path": "src/model/rope.rs", "rank": 85, "score": 88242.21306135693 }, { "content": " dst_ptr = copy_to_ptr (b'\\n', dst_ptr);\n\n // dst_ptr = self.space.copy_to_ptr_times (dst_ptr, size);\n\n dst_ptr = copy_to_ptr_times (b' ', dst_ptr, size);\n\n }\n\n\n\n Node::HyphenSpace => {\n\n // dst_ptr = self.hyphen.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'-', dst_ptr);\n\n // dst_ptr = self.space.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b' ', dst_ptr);\n\n }\n\n\n\n Node::SquareBrackets => {\n\n // dst_ptr = self.square_bracket_open.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'[', dst_ptr);\n\n // dst_ptr = self.square_bracket_close.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b']', dst_ptr);\n\n }\n\n // Node::SquareBracketOpen => { dst_ptr = self.square_bracket_open.copy_to_ptr (dst_ptr); }\n\n Node::SquareBracketOpen => { dst_ptr = copy_to_ptr (b'[', dst_ptr); }\n", "file_path": "src/model/renderer.rs", "rank": 86, "score": 88241.9523916021 }, { "content": " }\n\n\n\n Node::QuestionSpace => {\n\n // dst_ptr = self.question.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'?', dst_ptr);\n\n // dst_ptr = self.space.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b' ', dst_ptr);\n\n }\n\n\n\n Node::ColonNewline => {\n\n // dst_ptr = self.colon.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b':', dst_ptr);\n\n // dst_ptr = self.newline.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'\\n', dst_ptr);\n\n }\n\n\n\n Node::ColonNewlineIndent (size) => {\n\n // dst_ptr = self.colon.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b':', dst_ptr);\n\n // dst_ptr = self.newline.copy_to_ptr (dst_ptr);\n", "file_path": "src/model/renderer.rs", "rank": 87, "score": 88241.92836007859 }, { "content": " dst_ptr = copy_to_ptr (b' ', dst_ptr);\n\n }\n\n\n\n Node::CommaNewlineIndent (size) => {\n\n // dst_ptr = self.comma.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b',', dst_ptr);\n\n // dst_ptr = self.newline.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'\\n', dst_ptr);\n\n // dst_ptr = self.space.copy_to_ptr_times (dst_ptr, size);\n\n dst_ptr = copy_to_ptr_times (b' ', dst_ptr, size);\n\n }\n\n\n\n Node::CommaSpace => {\n\n // dst_ptr = self.comma.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b',', dst_ptr);\n\n // dst_ptr = self.space.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b' ', dst_ptr);\n\n }\n\n\n\n Node::ColonSpace => {\n", "file_path": "src/model/renderer.rs", "rank": 88, "score": 88241.83364944172 }, { "content": " Node::TripleHyphenNewline => { len += renderer.node_len (&Node::Hyphen) * 3; nl = true; break; }\n\n Node::TripleDotNewline => { len += renderer.node_len (&Node::Dot) * 3; nl = true; break; }\n\n\n\n ref node @ _ => len += renderer.node_len (node)\n\n }\n\n }\n\n\n\n (len, nl)\n\n }\n\n\n\n\n\n pub fn bytes_len (&self, renderer: &Renderer) -> usize {\n\n match *self {\n\n Rope::Empty => 0,\n\n Rope::Node (ref node) => renderer.node_len (&node[0]),\n\n Rope::Many (ref nodes) => {\n\n let mut size = 0;\n\n for node in nodes { size += renderer.node_len (node); }\n\n size\n\n }\n", "file_path": "src/model/rope.rs", "rank": 89, "score": 88241.36244063824 }, { "content": " // dst_ptr = self.colon.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b':', dst_ptr);\n\n // dst_ptr = self.space.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b' ', dst_ptr);\n\n }\n\n\n\n Node::QuestionNewline => {\n\n // dst_ptr = self.question.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'?', dst_ptr);\n\n // dst_ptr = self.newline.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'\\n', dst_ptr);\n\n }\n\n\n\n Node::QuestionNewlineIndent (size) => {\n\n // dst_ptr = self.question.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'?', dst_ptr);\n\n // dst_ptr = self.newline.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'\\n', dst_ptr);\n\n // dst_ptr = self.space.copy_to_ptr_times (dst_ptr, size);\n\n dst_ptr = copy_to_ptr_times (b' ', dst_ptr, size);\n", "file_path": "src/model/renderer.rs", "rank": 90, "score": 88240.92387214472 }, { "content": "\n\n Node::AmpersandString (ref s) => {\n\n // dst_ptr = self.ampersand.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'&', dst_ptr);\n\n let len = s.len ();\n\n ptr::copy_nonoverlapping (s.as_ptr (), dst_ptr, len);\n\n dst_ptr = dst_ptr.offset (len as isize);\n\n }\n\n Node::AsteriskString (ref s) => {\n\n // dst_ptr = self.asterisk.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'*', dst_ptr);\n\n let len = s.len ();\n\n ptr::copy_nonoverlapping (s.as_ptr (), dst_ptr, len);\n\n dst_ptr = dst_ptr.offset (len as isize);\n\n }\n\n };\n\n\n\n dst_ptr\n\n }\n\n}\n", "file_path": "src/model/renderer.rs", "rank": 91, "score": 88240.91400521799 }, { "content": " dst_ptr = copy_to_ptr (b' ', dst_ptr);\n\n }\n\n\n\n Node::IndentQuestionSpace (size) => {\n\n // dst_ptr = self.space.copy_to_ptr_times (dst_ptr, size);\n\n dst_ptr = copy_to_ptr_times (b' ', dst_ptr, size);\n\n // dst_ptr = self.question.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'?', dst_ptr);\n\n // dst_ptr = self.space.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b' ', dst_ptr);\n\n }\n\n\n\n Node::NewlineIndentQuestionSpace (size) => {\n\n // dst_ptr = self.newline.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'\\n', dst_ptr);\n\n // dst_ptr = self.space.copy_to_ptr_times (dst_ptr, size);\n\n dst_ptr = copy_to_ptr_times (b' ', dst_ptr, size);\n\n // dst_ptr = self.question.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'?', dst_ptr);\n\n // dst_ptr = self.space.copy_to_ptr (dst_ptr);\n", "file_path": "src/model/renderer.rs", "rank": 92, "score": 88240.74124466577 }, { "content": " dst_ptr = copy_to_ptr (b'\\n', dst_ptr);\n\n }\n\n Node::SingleQuotedString (ref s) => {\n\n // dst_ptr = self.apostrophe.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'\\'', dst_ptr);\n\n let len = s.len ();\n\n ptr::copy_nonoverlapping (s.as_ptr (), dst_ptr, len);\n\n dst_ptr = dst_ptr.offset (len as isize);\n\n // dst_ptr = self.apostrophe.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'\\'', dst_ptr);\n\n }\n\n Node::DoubleQuotedString (ref s) => {\n\n // dst_ptr = self.quotation.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'\"', dst_ptr);\n\n let len = s.len ();\n\n ptr::copy_nonoverlapping (s.as_ptr (), dst_ptr, len);\n\n dst_ptr = dst_ptr.offset (len as isize);\n\n // dst_ptr = self.quotation.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'\"', dst_ptr);\n\n }\n", "file_path": "src/model/renderer.rs", "rank": 93, "score": 88240.7140937695 }, { "content": " dst_ptr = copy_to_ptr_times (b' ', dst_ptr, size);\n\n }\n\n\n\n Node::IndentHyphenSpace (size) => {\n\n // dst_ptr = self.space.copy_to_ptr_times (dst_ptr, size);\n\n dst_ptr = copy_to_ptr_times (b' ', dst_ptr, size);\n\n // dst_ptr = self.hyphen.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'-', dst_ptr);\n\n // dst_ptr = self.space.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b' ', dst_ptr);\n\n }\n\n\n\n Node::NewlineIndentHyphenSpace (size) => {\n\n // dst_ptr = self.newline.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'\\n', dst_ptr);\n\n // dst_ptr = self.space.copy_to_ptr_times (dst_ptr, size);\n\n dst_ptr = copy_to_ptr_times (b' ', dst_ptr, size);\n\n // dst_ptr = self.hyphen.copy_to_ptr (dst_ptr);\n\n dst_ptr = copy_to_ptr (b'-', dst_ptr);\n\n // dst_ptr = self.space.copy_to_ptr (dst_ptr);\n", "file_path": "src/model/renderer.rs", "rank": 94, "score": 88240.69043845465 }, { "content": " let len = former.len ();\n\n ptr::copy_nonoverlapping (former.as_ptr (), dst_ptr, len);\n\n dst_ptr = dst_ptr.offset (len as isize);\n\n\n\n let len = latter.len ();\n\n ptr::copy_nonoverlapping (latter.as_ptr (), dst_ptr, len);\n\n dst_ptr = dst_ptr.offset (len as isize);\n\n }\n\n\n\n\n\n Node::String (ref s) => {\n\n let len = s.len ();\n\n ptr::copy_nonoverlapping (s.as_ptr (), dst_ptr, len);\n\n dst_ptr = dst_ptr.offset (len as isize);\n\n }\n\n Node::StringNewline (ref s) => {\n\n let len = s.len ();\n\n ptr::copy_nonoverlapping (s.as_ptr (), dst_ptr, len);\n\n dst_ptr = dst_ptr.offset (len as isize);\n\n // dst_ptr = self.newline.copy_to_ptr (dst_ptr);\n", "file_path": "src/model/renderer.rs", "rank": 95, "score": 88240.07285921728 }, { "content": " match *self {\n\n Rope::Empty => {\n\n *ptr = &[] as *const [Node];\n\n (0, 0, true)\n\n }\n\n Rope::Node (ref node) => {\n\n if index == 0 {\n\n *ptr = node as *const [Node];\n\n (renderer.node_len (&node[0]), 0, true)\n\n } else {\n\n *ptr = &[] as *const [Node];\n\n (0, 0, true)\n\n }\n\n }\n\n Rope::Many (ref nodes) => {\n\n if index >= nodes.len () {\n\n *ptr = &[] as *const [Node];\n\n (0, 0, true)\n\n } else {\n\n let len = nodes.len ();\n", "file_path": "src/model/rope.rs", "rank": 96, "score": 88239.94994724458 }, { "content": " Rope::Many (ref nodes) => nodes.len ()\n\n }\n\n }\n\n\n\n\n\n pub fn is_multiline (&self) -> bool {\n\n match *self {\n\n Rope::Empty => (),\n\n Rope::Node (_) => (),\n\n Rope::Many (ref nodes) => {\n\n let len = nodes.len ();\n\n if len <= 1 { return false; }\n\n\n\n let mut passed_nls = false;\n\n\n\n for node in nodes.iter ().rev () {\n\n if node.is_newline () {\n\n if passed_nls { return true }\n\n } else if !passed_nls {\n\n passed_nls = true;\n", "file_path": "src/model/rope.rs", "rank": 97, "score": 88239.1878220128 }, { "content": " Rope::Many (ref nodes) => self._line_bytes_len (renderer, nodes.iter ())\n\n }\n\n }\n\n\n\n fn _line_bytes_len<'a, 'b, 'c, Iter: Iterator<Item=&'a Node>> (&'b self, renderer: &'c Renderer, nodes: Iter) -> (usize, bool) {\n\n let mut len = 0;\n\n let mut nl = false;\n\n\n\n for node in nodes {\n\n match *node {\n\n Node::StringNewline (ref s) => { len += s.len (); nl = true; break; }\n\n Node::Newline => { nl = true; break; }\n\n Node::NewlineIndent (_) => { nl = true; break; }\n\n Node::NewlineIndentHyphenSpace (_) => { nl = true; break; }\n\n Node::NewlineIndentQuestionSpace (_) => { nl = true; break; }\n\n Node::CommaNewlineIndent (_) => { len += renderer.node_len (&Node::Comma); nl = true; break; }\n\n Node::ColonNewlineIndent (_) |\n\n Node::ColonNewline => { len += renderer.node_len (&Node::Colon); nl = true; break; }\n\n Node::QuestionNewlineIndent (_) |\n\n Node::QuestionNewline => { len += renderer.node_len (&Node::Question); nl = true; break; }\n", "file_path": "src/model/rope.rs", "rank": 98, "score": 88237.24714931138 }, { "content": " CurlyBrackets,\n\n CurlyBracketOpen,\n\n CurlyBracketClose,\n\n\n\n QuestionSpace,\n\n CommaSpace,\n\n ColonSpace,\n\n HyphenSpace,\n\n Space,\n\n\n\n ColonNewline,\n\n ColonNewlineIndent (usize),\n\n\n\n TripleHyphenNewline,\n\n TripleDotNewline\n\n}\n\n\n\n\n\n\n\nimpl Node {\n", "file_path": "src/model/renderer.rs", "rank": 99, "score": 88235.55530650514 } ]
Rust
tss-esapi/tests/integration_tests/context_tests/tpm_commands/object_commands_tests.rs
Superhepper/rust-tss-esapi
a6ae84793e73b10dd672b613ada820566c84fe85
mod test_create { use crate::common::{create_ctx_with_session, decryption_key_pub}; use std::convert::TryFrom; use tss_esapi::{interface_types::resource_handles::Hierarchy, structures::Auth}; #[test] fn test_create() { let mut context = create_ctx_with_session(); let random_digest = context.get_random(16).unwrap(); let key_auth = Auth::try_from(random_digest.value().to_vec()).unwrap(); let prim_key_handle = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), Some(&key_auth), None, None, None, ) .unwrap() .key_handle; let _ = context .create( prim_key_handle, &decryption_key_pub(), Some(&key_auth), None, None, None, ) .unwrap(); } } mod test_load { use crate::common::{create_ctx_with_session, decryption_key_pub, signing_key_pub}; use std::convert::TryFrom; use tss_esapi::{interface_types::resource_handles::Hierarchy, structures::Auth}; #[test] fn test_load() { let mut context = create_ctx_with_session(); let random_digest = context.get_random(16).unwrap(); let key_auth = Auth::try_from(random_digest.value().to_vec()).unwrap(); let prim_key_handle = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), Some(&key_auth), None, None, None, ) .unwrap() .key_handle; let result = context .create( prim_key_handle, &signing_key_pub(), Some(&key_auth), None, None, None, ) .unwrap(); let _ = context .load(prim_key_handle, result.out_private, &result.out_public) .unwrap(); } } mod test_load_external_public { use crate::common::{create_ctx_with_session, KEY}; use std::convert::TryFrom; use tss_esapi::{ attributes::ObjectAttributesBuilder, interface_types::{ algorithm::{HashingAlgorithm, PublicAlgorithm, RsaSchemeAlgorithm}, key_bits::RsaKeyBits, resource_handles::Hierarchy, }, structures::{Public, PublicBuilder, PublicKeyRsa, PublicRsaParametersBuilder, RsaScheme}, }; pub fn get_ext_rsa_pub() -> Public { let object_attributes = ObjectAttributesBuilder::new() .with_user_with_auth(true) .with_decrypt(false) .with_sign_encrypt(true) .with_restricted(false) .build() .expect("Failed to build object attributes"); PublicBuilder::new() .with_public_algorithm(PublicAlgorithm::Rsa) .with_name_hashing_algorithm(HashingAlgorithm::Sha256) .with_object_attributes(object_attributes) .with_rsa_parameters( PublicRsaParametersBuilder::new_unrestricted_signing_key( RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256)) .expect("Failed to create rsa scheme"), RsaKeyBits::Rsa2048, Default::default(), ) .build() .expect("Failed to create rsa parameters for public structure"), ) .with_rsa_unique_identifier( &PublicKeyRsa::try_from(&KEY[..256]) .expect("Failed to create Public RSA key from buffer"), ) .build() .expect("Failed to build Public structure") } #[test] fn test_load_external_public() { let mut context = create_ctx_with_session(); let pub_key = get_ext_rsa_pub(); context .load_external_public(&pub_key, Hierarchy::Owner) .unwrap(); } } mod test_load_external { use crate::common::create_ctx_with_session; use std::convert::TryFrom; use tss_esapi::{ attributes::ObjectAttributesBuilder, interface_types::{ algorithm::{HashingAlgorithm, PublicAlgorithm, RsaSchemeAlgorithm}, key_bits::RsaKeyBits, resource_handles::Hierarchy, }, structures::{Public, PublicBuilder, PublicKeyRsa, PublicRsaParametersBuilder, RsaScheme}, }; use tss_esapi_sys::{ TPM2B_PRIVATE_KEY_RSA, TPM2B_SENSITIVE, TPMT_SENSITIVE, TPMU_SENSITIVE_COMPOSITE, }; const KEY: [u8; 256] = [ 0xc9, 0x75, 0xf8, 0xb2, 0x30, 0xf4, 0x24, 0x6e, 0x95, 0xb1, 0x3c, 0x55, 0x0f, 0xe4, 0x48, 0xe9, 0xac, 0x06, 0x1f, 0xa8, 0xbe, 0xa4, 0xd7, 0x1c, 0xa5, 0x5e, 0x2a, 0xbf, 0x60, 0xc2, 0x98, 0x63, 0x6c, 0xb4, 0xe2, 0x61, 0x54, 0x31, 0xc3, 0x3e, 0x9d, 0x1a, 0x83, 0x84, 0x18, 0x51, 0xe9, 0x8c, 0x24, 0xcf, 0xac, 0xc6, 0x0d, 0x26, 0x2c, 0x9f, 0x2b, 0xd5, 0x91, 0x98, 0x89, 0xe3, 0x68, 0x97, 0x36, 0x02, 0xec, 0x16, 0x37, 0x24, 0x08, 0xb4, 0x77, 0xd1, 0x56, 0x10, 0x3e, 0xf0, 0x64, 0xf6, 0x68, 0x50, 0x68, 0x31, 0xf8, 0x9b, 0x88, 0xf2, 0xc5, 0xfb, 0xc9, 0x21, 0xd2, 0xdf, 0x93, 0x6f, 0x98, 0x94, 0x53, 0x68, 0xe5, 0x25, 0x8d, 0x8a, 0xf1, 0xd7, 0x5b, 0xf3, 0xf9, 0xdf, 0x8c, 0x77, 0x24, 0x9e, 0x28, 0x09, 0x36, 0xf0, 0xa2, 0x93, 0x17, 0xad, 0xbb, 0x1a, 0xd7, 0x6f, 0x25, 0x6b, 0x0c, 0xd3, 0x76, 0x7f, 0xcf, 0x3a, 0xe3, 0x1a, 0x84, 0x57, 0x62, 0x71, 0x8a, 0x6a, 0x42, 0x94, 0x71, 0x21, 0x6a, 0x13, 0x73, 0x17, 0x56, 0xa2, 0x38, 0xc1, 0x5e, 0x76, 0x0b, 0x67, 0x6b, 0x6e, 0xcd, 0xd3, 0xe2, 0x8a, 0x80, 0x61, 0x6c, 0x1c, 0x60, 0x9d, 0x65, 0xbd, 0x5a, 0x4e, 0xeb, 0xa2, 0x06, 0xd6, 0xbe, 0xf5, 0x49, 0xc1, 0x7d, 0xd9, 0x46, 0x3e, 0x9f, 0x2f, 0x92, 0xa4, 0x1a, 0x14, 0x2c, 0x1e, 0xb7, 0x6d, 0x71, 0x29, 0x92, 0x43, 0x7b, 0x76, 0xa4, 0x8b, 0x33, 0xf3, 0xd0, 0xda, 0x7c, 0x7f, 0x73, 0x50, 0xe2, 0xc5, 0x30, 0xad, 0x9e, 0x0f, 0x61, 0x73, 0xa0, 0xbb, 0x87, 0x1f, 0x0b, 0x70, 0xa9, 0xa6, 0xaa, 0x31, 0x2d, 0x62, 0x2c, 0xaf, 0xea, 0x49, 0xb2, 0xce, 0x6c, 0x23, 0x90, 0xdd, 0x29, 0x37, 0x67, 0xb1, 0xc9, 0x99, 0x3a, 0x3f, 0xa6, 0x69, 0xc9, 0x0d, 0x24, 0x3f, ]; const PRIV_KEY: [u8; 256] = [ 0xcf, 0x7c, 0xe8, 0xa1, 0x9c, 0x47, 0xe1, 0x70, 0xbd, 0x38, 0x0a, 0xaf, 0x26, 0x5c, 0x48, 0x94, 0x48, 0x54, 0x98, 0x07, 0xae, 0xb9, 0x5c, 0x46, 0xaf, 0x8f, 0x59, 0xc8, 0x30, 0x1b, 0x98, 0xe3, 0x2a, 0x93, 0xb2, 0xdb, 0xab, 0x81, 0xbf, 0xd2, 0xad, 0x0d, 0xb6, 0x5b, 0x57, 0xbf, 0x98, 0xcb, 0xbc, 0x97, 0xb8, 0xc3, 0xa4, 0xb0, 0xc9, 0xf1, 0x05, 0x46, 0xed, 0x06, 0xdf, 0xdc, 0x58, 0xf4, 0xe0, 0x23, 0x15, 0x77, 0x25, 0x7b, 0x46, 0x6f, 0xea, 0x0c, 0xeb, 0xa5, 0x49, 0x53, 0x1d, 0xa0, 0x2e, 0x3a, 0x7e, 0x8e, 0x8d, 0xec, 0xdd, 0xa6, 0x07, 0x95, 0x40, 0xab, 0x3e, 0x10, 0x9b, 0x07, 0xce, 0xe9, 0xf3, 0xdb, 0x99, 0xb7, 0x52, 0xab, 0xa6, 0x22, 0x43, 0x70, 0xc2, 0x2c, 0xdc, 0x98, 0x4e, 0x05, 0x62, 0xdf, 0xe4, 0x6a, 0xba, 0xbd, 0x28, 0x4c, 0xbe, 0xbd, 0xb9, 0x80, 0x54, 0xed, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, ]; pub fn get_ext_rsa_priv() -> TPM2B_SENSITIVE { TPM2B_SENSITIVE { size: std::mem::size_of::<TPM2B_SENSITIVE>() as u16, sensitiveArea: TPMT_SENSITIVE { sensitiveType: PublicAlgorithm::Rsa.into(), sensitive: TPMU_SENSITIVE_COMPOSITE { rsa: TPM2B_PRIVATE_KEY_RSA { size: 128, buffer: PRIV_KEY, }, }, ..Default::default() }, } } pub fn get_ext_rsa_pub() -> Public { let object_attributes = ObjectAttributesBuilder::new() .with_user_with_auth(true) .with_decrypt(false) .with_sign_encrypt(true) .with_restricted(false) .build() .expect("Failed to build object attributes"); PublicBuilder::new() .with_public_algorithm(PublicAlgorithm::Rsa) .with_name_hashing_algorithm(HashingAlgorithm::Sha256) .with_object_attributes(object_attributes) .with_rsa_parameters( PublicRsaParametersBuilder::new_unrestricted_signing_key( RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256)) .expect("Failed to create rsa scheme"), RsaKeyBits::Rsa2048, Default::default(), ) .build() .expect("Failed to create rsa parameters for public structure"), ) .with_rsa_unique_identifier( &PublicKeyRsa::try_from(&KEY[..]) .expect("Failed to create Public RSA key from buffer"), ) .build() .expect("Failed to build Public structure") } #[test] fn test_load_external() { let mut context = create_ctx_with_session(); let pub_key = get_ext_rsa_pub(); let priv_key = get_ext_rsa_priv(); let key_handle = context .load_external(&priv_key, &pub_key, Hierarchy::Null) .unwrap(); context.flush_context(key_handle.into()).unwrap(); } } mod test_read_public { use crate::common::{create_ctx_with_session, signing_key_pub}; use std::convert::TryFrom; use tss_esapi::{interface_types::resource_handles::Hierarchy, structures::Auth}; #[test] fn test_read_public() { let mut context = create_ctx_with_session(); let random_digest = context.get_random(16).unwrap(); let key_auth = Auth::try_from(random_digest.value().to_vec()).unwrap(); let key_handle = context .create_primary( Hierarchy::Owner, &signing_key_pub(), Some(&key_auth), None, None, None, ) .unwrap() .key_handle; let _ = context.read_public(key_handle).unwrap(); } } mod test_make_credential { use crate::common::{create_ctx_with_session, decryption_key_pub}; use std::convert::TryInto; use tss_esapi::interface_types::resource_handles::Hierarchy; #[test] fn test_make_credential() { let mut context = create_ctx_with_session(); let key_handle = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), None, None, None, None, ) .unwrap() .key_handle; let (_, key_name, _) = context.read_public(key_handle).unwrap(); let cred = vec![1, 2, 3, 4, 5]; context .execute_without_session(|ctx| { ctx.make_credential(key_handle, cred.try_into().unwrap(), key_name) }) .unwrap(); } } mod test_activate_credential { use crate::common::{create_ctx_with_session, decryption_key_pub}; use std::convert::{TryFrom, TryInto}; use tss_esapi::{ attributes::SessionAttributesBuilder, constants::SessionType, interface_types::{algorithm::HashingAlgorithm, resource_handles::Hierarchy}, structures::{Digest, SymmetricDefinition}, }; #[test] fn test_make_activate_credential() { let mut context = create_ctx_with_session(); let (session_attributes, session_attributes_mask) = SessionAttributesBuilder::new().build(); let session_1 = context .execute_without_session(|ctx| { ctx.start_auth_session( None, None, None, SessionType::Hmac, SymmetricDefinition::AES_256_CFB, HashingAlgorithm::Sha256, ) }) .expect("session_1: Call to start_auth_session failed.") .expect("session_1: The auth session returned was NONE"); context .tr_sess_set_attributes(session_1, session_attributes, session_attributes_mask) .expect("Call to tr_sess_set_attributes failed"); let session_2 = context .execute_without_session(|ctx| { ctx.start_auth_session( None, None, None, SessionType::Hmac, SymmetricDefinition::AES_256_CFB, HashingAlgorithm::Sha256, ) }) .expect("session_2: Call to start_auth_session failed.") .expect("session_2: The auth session returned was NONE"); context .tr_sess_set_attributes(session_2, session_attributes, session_attributes_mask) .unwrap(); let key_handle = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), None, None, None, None, ) .unwrap() .key_handle; let (_, key_name, _) = context .read_public(key_handle) .expect("Call to read_public failed"); let cred = vec![1, 2, 3, 4, 5]; let (credential_blob, secret) = context .execute_without_session(|ctx| { ctx.make_credential(key_handle, cred.try_into().unwrap(), key_name) }) .expect("Call to make_credential failed"); context.set_sessions((Some(session_1), Some(session_2), None)); let decrypted = context .activate_credential(key_handle, key_handle, credential_blob, secret) .expect("Call to active_credential failed"); let expected = Digest::try_from(vec![1, 2, 3, 4, 5]).expect("Failed to create digest for expected"); assert_eq!(expected, decrypted); } } mod test_unseal { use crate::common::{create_ctx_with_session, create_public_sealed_object, decryption_key_pub}; use std::convert::TryFrom; use tss_esapi::{interface_types::resource_handles::Hierarchy, structures::SensitiveData}; #[test] fn unseal() { let testbytes: [u8; 5] = [0x01, 0x02, 0x03, 0x04, 0x42]; let mut context = create_ctx_with_session(); let key_handle_seal = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), None, None, None, None, ) .unwrap() .key_handle; let key_handle_unseal = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), None, None, None, None, ) .unwrap() .key_handle; let key_pub = create_public_sealed_object(); let result = context .create( key_handle_seal, &key_pub, None, Some(SensitiveData::try_from(testbytes.to_vec()).unwrap()).as_ref(), None, None, ) .unwrap(); let loaded_key = context .load(key_handle_unseal, result.out_private, &result.out_public) .unwrap(); let unsealed = context.unseal(loaded_key.into()).unwrap(); let unsealed = unsealed.value(); assert!(unsealed == testbytes); } }
mod test_create { use crate::common::{create_ctx_with_session, decryption_key_pub}; use std::convert::TryFrom; use tss_esapi::{interface_types::resource_handles::Hierarchy, structures::Auth}; #[test] fn test_create() { let mut context = create_ctx_with_session(); let random_digest = context.get_random(16).unwrap(); let key_auth = Auth::try_from(random_digest.value().to_vec()).unwrap(); let prim_key_handle = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), Some(&key_auth), None, None, None, ) .unwrap() .key_handle; let _ = context .create( prim_key_handle, &decryption_key_pub(), Some(&key_auth), None, None, None, ) .unwrap(); } } mod test_load { use crate::common::{create_ctx_with_session, decryption_key_pub, signing_key_pub}; use std::convert::TryFrom; use tss_esapi::{interface_types::resource_handles::Hierarchy, structures::Auth}; #[test] fn test_load() { let mut context = create_ctx_with_session(); let random_digest = context.get_random(16).unwrap(); let key_auth = Auth::try_from(random_digest.value().to_vec()).unwrap(); let prim_key_handle = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), Some(&key_auth), None, None, None, ) .unwrap() .key_handle; let result = context .create( prim_key_handle, &signing_key_pub(), Some(&key_auth), None, None, None, ) .unwrap(); let _ = context .load(prim_key_handle, result.out_private, &result.out_public) .unwrap(); } } mod test_load_external_public { use crate::common::{create_ctx_with_session, KEY}; use std::convert::TryFrom; use tss_esapi::{ attributes::ObjectAttributesBuilder, interface_types::{ algorithm::{HashingAlgorithm, PublicAlgorithm, RsaSchemeAlgorithm}, key_bits::RsaKeyBits, resource_handles::Hierarchy, }, structures::{Public, PublicBuilder, PublicKeyRsa, PublicRsaParametersBuilder, RsaScheme}, }; pub fn get_ext_rsa_pub() -> Public { let object_attributes = ObjectAttributesBuilder::new() .with_user_with_auth(true) .with_decrypt(false) .with_sign_encrypt(true) .with_restricted(false) .build() .expect("Failed to build object attributes"); PublicBuilder::new() .with_public_algorithm(PublicAlgorithm::Rsa) .with_name_hashing_algorithm(HashingAlgorithm::Sha256) .with_object_attributes(object_attributes) .with_rsa_parameters( PublicRsaParametersBuilder::new_unrestricted_signing_key( RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256)) .expect("Failed to create rsa scheme"), RsaKeyBits::Rsa2048, Default::default(), ) .build() .expect("Failed to create rsa parameters for public structure"), ) .with_rsa_unique_identifier( &PublicKeyRsa::try_from(&KEY[..256]) .expect("Failed to create Public RSA key from buffer"), ) .build() .expect("Failed to build Public structure") } #[test] fn test_load_external_public() { let mut context = create_ctx_with_session(); let pub_key = get_ext_rsa_pub(); context .load_external_public(&pub_key, Hierarchy::Owner) .unwrap(); } } mod test_load_external { use crate::common::create_ctx_with_session; use std::convert::TryFrom; use tss_esapi::{ attributes::ObjectAttributesBuilder, interface_types::{ algorithm::{HashingAlgorithm, PublicAlgorithm, RsaSchemeAlgorithm}, key_bits::RsaKeyBits, resource_handles::Hierarchy, }, structures::{Public, PublicBuilder, PublicKeyRsa, PublicRsaParametersBuilder, RsaScheme}, }; use tss_esapi_sys::{ TPM2B_PRIVATE_KEY_RSA, TPM2B_SENSITIVE, TPMT_SENSITIVE, TPMU_SENSITIVE_COMPOSITE, }; const KEY: [u8; 256] = [ 0xc9, 0x75, 0xf8, 0xb2, 0x30, 0xf4, 0x24, 0x6e, 0x95, 0xb1, 0x3c, 0x55, 0x0f, 0xe4, 0x48, 0xe9, 0xac, 0x06, 0x1f, 0xa8, 0xbe, 0xa4, 0xd7, 0x1c, 0xa5, 0x5e, 0x2a, 0xbf, 0x60, 0xc2, 0x98, 0x63, 0x6c, 0xb4, 0xe2, 0x61, 0x54, 0x31, 0xc3, 0x3e, 0x9d, 0x1a, 0x83, 0x84, 0x18, 0x51, 0xe9, 0x8c, 0x24, 0xcf, 0xac, 0xc6, 0x0d, 0x26, 0x2c, 0x9f, 0x2b, 0xd5, 0x91, 0x98, 0x89, 0xe3, 0x68, 0x97, 0x36, 0x02, 0xec, 0x16, 0x37, 0x24, 0x08, 0xb4, 0x77, 0xd1, 0x56, 0x10, 0x3e, 0xf0, 0x64, 0xf6, 0x68, 0x50, 0x68, 0x31, 0xf8, 0x9b, 0x88, 0xf2, 0xc5, 0xfb, 0xc9, 0x21, 0xd2, 0xdf, 0x93, 0x6f, 0x98, 0x94, 0x53, 0x68, 0xe5, 0x25, 0x8d, 0x8a, 0xf1, 0xd7, 0x5b, 0xf3, 0xf9, 0xdf, 0x8c, 0x77, 0x24, 0x9e, 0x28, 0x09, 0x36, 0xf0, 0xa2, 0x93, 0x17, 0xad, 0xbb, 0x1a, 0xd7, 0x6f, 0x25, 0x6b, 0x0c, 0xd3, 0x76, 0x7f, 0xcf, 0x3a, 0xe3, 0x1a, 0x84, 0x57, 0x62, 0x71, 0x8a, 0x6a, 0x42, 0x94, 0x71, 0x21, 0x6a, 0x13, 0x73, 0x17, 0x56, 0xa2, 0x38, 0xc1, 0x5e, 0x76, 0x0b, 0x67, 0x6b, 0x6e, 0xcd, 0xd3, 0xe2, 0x8a, 0x80, 0x61, 0x6c, 0x1c, 0x60, 0x9d, 0x65, 0xbd, 0x5a, 0x4e, 0xeb, 0xa2, 0x06, 0xd6, 0xbe, 0xf5, 0x49, 0xc1, 0x7d, 0xd9, 0x46, 0x3e, 0x9f, 0x2f, 0x92, 0xa4, 0x1a, 0x14, 0x2c, 0x1e, 0xb7, 0x6d, 0x71, 0x29, 0x92, 0x43, 0x7b, 0x76, 0xa4, 0x8b, 0x33, 0xf3, 0xd0, 0xda, 0x7c, 0x7f, 0x73, 0x50, 0xe2, 0xc5, 0x30, 0xad, 0x9e, 0x0f, 0x61, 0x73, 0xa0, 0xbb, 0x87, 0x1f, 0x0b, 0x70, 0xa9, 0xa6, 0xaa, 0x31, 0x2d, 0x62, 0x2c, 0xaf, 0xea, 0x49, 0xb2, 0xce, 0x6c, 0x23, 0x90, 0xdd, 0x29, 0x37, 0x67, 0xb1, 0xc9, 0x99, 0x3a, 0x3f, 0xa6, 0x69, 0xc9, 0x0d, 0x24, 0x3f, ]; const PRIV_KEY: [u8; 256] = [ 0xcf, 0x7c, 0xe8, 0xa1, 0x9c, 0x47, 0xe1, 0x70, 0xbd, 0x38, 0x0a, 0xaf, 0x26, 0x5c, 0x48, 0x94, 0x48, 0x54, 0x98, 0x07, 0xae, 0xb9, 0x5c, 0x46, 0xaf, 0x8f, 0x59, 0xc8, 0x30, 0x1b, 0x98, 0xe3, 0x2a, 0x93, 0xb2, 0xdb, 0xab, 0x81, 0xbf, 0xd2, 0xad, 0x0d, 0xb6, 0x5b, 0x57, 0xbf, 0x98, 0xcb, 0xbc, 0x97, 0xb8, 0xc3, 0xa4, 0xb0, 0xc9, 0xf1, 0x05, 0x46, 0xed, 0x06, 0xdf, 0xdc, 0x58, 0xf4, 0xe0, 0x23, 0x15, 0x77, 0x25, 0x7b, 0x46, 0x6f, 0xea, 0x0c, 0xeb, 0xa5, 0x49, 0x53, 0x1d, 0xa0, 0x2e, 0x3a, 0x7e, 0x8e, 0x8d, 0xec, 0xdd, 0xa6, 0x07, 0x95, 0x40, 0xab, 0x3e, 0x10, 0x9b, 0x07, 0xce, 0xe9, 0xf3, 0xdb, 0x99, 0xb7, 0x52, 0xab, 0xa6, 0x22, 0x43, 0x70, 0xc2, 0x2c, 0xdc, 0x98, 0x4e, 0x05, 0x62, 0xdf, 0xe4, 0x6a, 0xba, 0xbd, 0x28, 0x4c, 0xbe, 0xbd, 0xb9, 0x80, 0x54, 0xed, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, ]; pub fn get_ext_rsa_priv() -> TPM2B_SENSITIVE { TPM2B_SENSITIVE { size: std::mem::size_of::<TPM2B_SENSITIVE>() as u16, sensitiveArea: TPMT_SENSITIVE { sensitiveType: PublicAlgorithm::Rsa.into(), sensitive: TPMU_SENSITIVE_COMPOSITE { rsa: TPM2B_PRIVATE_KEY_RSA { size: 128, buffer: PRIV_KEY, }, }, ..Default::default() }, } } pub fn get_ext_rsa_pub() -> Public { let object_attributes = ObjectAttributesBuilder::new() .with_user_with_auth(true) .with_decrypt(false) .with_sign_encrypt(true) .with_restricted(false) .build() .expect("Failed to build object attributes"); PublicBuilder::new() .with_public_algorithm(PublicAlgorithm::Rsa) .with_name_hashing_algorithm(HashingAlgorithm::Sha256) .with_object_attributes(object_attributes) .with_rsa_parameters( PublicRsaParametersBuilder::new_unrestricted_signing_key( RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256)) .expect("Failed to create rsa scheme"), RsaKeyBits::Rsa2048, Default::default(), ) .build() .expect("Failed to create rsa parameters for public structure"), ) .with_rsa_unique_identifier( &PublicKeyRsa::try_from(&KEY[..]) .expect("Failed to create Public RSA key from buffer"), ) .build() .expect("Failed to build Public structure") } #[test] fn test_load_external() { let mut context = create_ctx_with_session(); let pub_key = get_ext_rsa_pub(); let priv_key = get_ext_rsa_priv(); let key_handle = context .load_external(&priv_key, &pub_key, Hierarchy::Null) .unwrap(); context.flush_context(key_handle.into()).unwrap(); } } mod test_read_public { use crate::common::{create_ctx_with_session, signing_key_pub}; use std::convert::TryFrom; use tss_esapi::{interface_types::resource_handles::Hierarchy, structures::Auth}; #[test] fn test_read_public() { let mut context = create_ctx_with_session(); let random_digest = context.get_random(16).unwrap(); let key_auth = Auth::try_from(random_digest.value().to_vec()).unwrap(); let ke
} mod test_make_credential { use crate::common::{create_ctx_with_session, decryption_key_pub}; use std::convert::TryInto; use tss_esapi::interface_types::resource_handles::Hierarchy; #[test] fn test_make_credential() { let mut context = create_ctx_with_session(); let key_handle = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), None, None, None, None, ) .unwrap() .key_handle; let (_, key_name, _) = context.read_public(key_handle).unwrap(); let cred = vec![1, 2, 3, 4, 5]; context .execute_without_session(|ctx| { ctx.make_credential(key_handle, cred.try_into().unwrap(), key_name) }) .unwrap(); } } mod test_activate_credential { use crate::common::{create_ctx_with_session, decryption_key_pub}; use std::convert::{TryFrom, TryInto}; use tss_esapi::{ attributes::SessionAttributesBuilder, constants::SessionType, interface_types::{algorithm::HashingAlgorithm, resource_handles::Hierarchy}, structures::{Digest, SymmetricDefinition}, }; #[test] fn test_make_activate_credential() { let mut context = create_ctx_with_session(); let (session_attributes, session_attributes_mask) = SessionAttributesBuilder::new().build(); let session_1 = context .execute_without_session(|ctx| { ctx.start_auth_session( None, None, None, SessionType::Hmac, SymmetricDefinition::AES_256_CFB, HashingAlgorithm::Sha256, ) }) .expect("session_1: Call to start_auth_session failed.") .expect("session_1: The auth session returned was NONE"); context .tr_sess_set_attributes(session_1, session_attributes, session_attributes_mask) .expect("Call to tr_sess_set_attributes failed"); let session_2 = context .execute_without_session(|ctx| { ctx.start_auth_session( None, None, None, SessionType::Hmac, SymmetricDefinition::AES_256_CFB, HashingAlgorithm::Sha256, ) }) .expect("session_2: Call to start_auth_session failed.") .expect("session_2: The auth session returned was NONE"); context .tr_sess_set_attributes(session_2, session_attributes, session_attributes_mask) .unwrap(); let key_handle = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), None, None, None, None, ) .unwrap() .key_handle; let (_, key_name, _) = context .read_public(key_handle) .expect("Call to read_public failed"); let cred = vec![1, 2, 3, 4, 5]; let (credential_blob, secret) = context .execute_without_session(|ctx| { ctx.make_credential(key_handle, cred.try_into().unwrap(), key_name) }) .expect("Call to make_credential failed"); context.set_sessions((Some(session_1), Some(session_2), None)); let decrypted = context .activate_credential(key_handle, key_handle, credential_blob, secret) .expect("Call to active_credential failed"); let expected = Digest::try_from(vec![1, 2, 3, 4, 5]).expect("Failed to create digest for expected"); assert_eq!(expected, decrypted); } } mod test_unseal { use crate::common::{create_ctx_with_session, create_public_sealed_object, decryption_key_pub}; use std::convert::TryFrom; use tss_esapi::{interface_types::resource_handles::Hierarchy, structures::SensitiveData}; #[test] fn unseal() { let testbytes: [u8; 5] = [0x01, 0x02, 0x03, 0x04, 0x42]; let mut context = create_ctx_with_session(); let key_handle_seal = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), None, None, None, None, ) .unwrap() .key_handle; let key_handle_unseal = context .create_primary( Hierarchy::Owner, &decryption_key_pub(), None, None, None, None, ) .unwrap() .key_handle; let key_pub = create_public_sealed_object(); let result = context .create( key_handle_seal, &key_pub, None, Some(SensitiveData::try_from(testbytes.to_vec()).unwrap()).as_ref(), None, None, ) .unwrap(); let loaded_key = context .load(key_handle_unseal, result.out_private, &result.out_public) .unwrap(); let unsealed = context.unseal(loaded_key.into()).unwrap(); let unsealed = unsealed.value(); assert!(unsealed == testbytes); } }
y_handle = context .create_primary( Hierarchy::Owner, &signing_key_pub(), Some(&key_auth), None, None, None, ) .unwrap() .key_handle; let _ = context.read_public(key_handle).unwrap(); }
function_block-function_prefixed
[ { "content": "#[allow(dead_code)]\n\npub fn create_public_sealed_object() -> Public {\n\n let object_attributes = ObjectAttributesBuilder::new()\n\n .with_fixed_tpm(true)\n\n .with_fixed_parent(true)\n\n .with_no_da(true)\n\n .with_admin_with_policy(true)\n\n .with_user_with_auth(true)\n\n .build()\n\n .expect(\"Failed to create object attributes\");\n\n\n\n PublicBuilder::new()\n\n .with_public_algorithm(PublicAlgorithm::KeyedHash)\n\n .with_name_hashing_algorithm(HashingAlgorithm::Sha256)\n\n .with_object_attributes(object_attributes)\n\n .with_auth_policy(&Default::default())\n\n .with_keyed_hash_parameters(PublicKeyedHashParameters::new(KeyedHashScheme::Null))\n\n .with_keyed_hash_unique_identifier(&Default::default())\n\n .build()\n\n .expect(\"Failed to create public structure.\")\n\n}\n", "file_path": "tss-esapi/tests/integration_tests/common/mod.rs", "rank": 0, "score": 408684.8991893703 }, { "content": "fn get_ek_object_public(context: &mut crate::Context) -> Result<PublicKey> {\n\n let key_handle = ek::create_ek_object(context, AsymmetricAlgorithm::Rsa, None)?;\n\n let (attesting_key_pub, _, _) = context.read_public(key_handle).or_else(|e| {\n\n context.flush_context(key_handle.into())?;\n\n Err(e)\n\n })?;\n\n context.flush_context(key_handle.into())?;\n\n\n\n PublicKey::try_from(attesting_key_pub)\n\n}\n", "file_path": "tss-esapi/src/abstraction/transient/key_attestation.rs", "rank": 1, "score": 393008.3198443566 }, { "content": "#[allow(dead_code)]\n\npub fn signing_key_pub() -> Public {\n\n utils::create_unrestricted_signing_rsa_public(\n\n RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256))\n\n .expect(\"Failed to create RSA scheme\"),\n\n RsaKeyBits::Rsa2048,\n\n RsaExponent::default(),\n\n )\n\n .expect(\"Failed to create an unrestricted signing rsa public structure\")\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/common/mod.rs", "rank": 2, "score": 385500.05408678896 }, { "content": "#[allow(dead_code)]\n\npub fn decryption_key_pub() -> Public {\n\n utils::create_restricted_decryption_rsa_public(\n\n Cipher::aes_256_cfb()\n\n .try_into()\n\n .expect(\"Failed to create symmetric object\"),\n\n RsaKeyBits::Rsa2048,\n\n RsaExponent::default(),\n\n )\n\n .expect(\"Failed to create a restricted decryption rsa public structure\")\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/common/mod.rs", "rank": 3, "score": 385500.05408678896 }, { "content": "#[test]\n\nfn rsa_exponent_create_test() {\n\n let expected_error = Err(Error::WrapperError(WrapperErrorKind::InvalidParam));\n\n // Valid values for RsaExponent are only 0 or a prime number value larger then 2.\n\n assert_eq!(expected_error, RsaExponent::create(1));\n\n\n\n // The specification says that 0 or any prime number larger then 2 should be accepted.\n\n let _ = RsaExponent::create(0).expect(\"Failed to create a RsaExponent from the value 0\");\n\n let _ = RsaExponent::create(5).expect(\"Failed to create a RsaExponent from the value 5\");\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/public_rsa_exponent_tests.rs", "rank": 4, "score": 373685.7864703021 }, { "content": "#[allow(dead_code)]\n\npub fn encryption_decryption_key_pub() -> Public {\n\n utils::create_unrestricted_encryption_decryption_rsa_public(\n\n RsaKeyBits::Rsa2048,\n\n RsaExponent::default(),\n\n )\n\n .expect(\"Failed to create an unrestricted encryption decryption rsa public structure\")\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/common/mod.rs", "rank": 5, "score": 361667.7918831455 }, { "content": "#[test]\n\nfn test_signing_with_default_symmetric() {\n\n assert!(PublicRsaParametersBuilder::new()\n\n .with_restricted(false)\n\n .with_is_decryption_key(false)\n\n .with_is_signing_key(true)\n\n .with_scheme(RsaScheme::Null)\n\n .with_symmetric(SymmetricDefinitionObject::Null)\n\n .with_key_bits(RsaKeyBits::Rsa1024)\n\n .build()\n\n .is_ok());\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/public_rsa_parameters_tests.rs", "rank": 6, "score": 359315.09137569944 }, { "content": "#[test]\n\nfn test_signing_with_wrong_symmetric() {\n\n assert!(matches!(\n\n PublicRsaParametersBuilder::new()\n\n .with_restricted(false)\n\n .with_is_decryption_key(false)\n\n .with_is_signing_key(true)\n\n .with_scheme(RsaScheme::Null)\n\n .with_symmetric(SymmetricDefinitionObject::AES_128_CFB)\n\n .with_key_bits(RsaKeyBits::Rsa1024)\n\n .build(),\n\n Err(Error::WrapperError(WrapperErrorKind::InconsistentParams))\n\n ));\n\n}\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/public_rsa_parameters_tests.rs", "rank": 7, "score": 359315.09137569944 }, { "content": "#[test]\n\nfn test_restricted_decryption_with_null_symmetric() {\n\n assert!(matches!(\n\n PublicRsaParametersBuilder::new()\n\n .with_restricted(true)\n\n .with_is_decryption_key(true)\n\n .with_scheme(RsaScheme::Null)\n\n .with_symmetric(SymmetricDefinitionObject::Null)\n\n .with_key_bits(RsaKeyBits::Rsa1024)\n\n .build(),\n\n Err(Error::WrapperError(WrapperErrorKind::InconsistentParams))\n\n ));\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/public_rsa_parameters_tests.rs", "rank": 8, "score": 354306.67274968943 }, { "content": "#[test]\n\nfn test_restricted_decryption_with_default_symmetric() {\n\n assert!(matches!(\n\n PublicRsaParametersBuilder::new()\n\n .with_restricted(true)\n\n .with_is_decryption_key(true)\n\n .with_scheme(RsaScheme::Null)\n\n .with_key_bits(RsaKeyBits::Rsa1024)\n\n .build(),\n\n Err(Error::WrapperError(WrapperErrorKind::ParamsMissing))\n\n ));\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/public_rsa_parameters_tests.rs", "rank": 9, "score": 354306.67274968943 }, { "content": "#[test]\n\nfn test_restricted_decryption_with_wrong_symmetric() {\n\n assert!(PublicRsaParametersBuilder::new()\n\n .with_restricted(true)\n\n .with_is_decryption_key(true)\n\n .with_scheme(RsaScheme::Null)\n\n .with_symmetric(SymmetricDefinitionObject::AES_128_CFB)\n\n .with_key_bits(RsaKeyBits::Rsa1024)\n\n .build()\n\n .is_ok());\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/public_rsa_parameters_tests.rs", "rank": 10, "score": 354306.67274968943 }, { "content": "#[allow(dead_code)]\n\npub fn create_ctx_with_session() -> Context {\n\n let mut ctx = create_ctx_without_session();\n\n let session = ctx\n\n .start_auth_session(\n\n None,\n\n None,\n\n None,\n\n SessionType::Hmac,\n\n SymmetricDefinition::AES_256_CFB,\n\n HashingAlgorithm::Sha256,\n\n )\n\n .unwrap();\n\n let (session_attributes, session_attributes_mask) = SessionAttributesBuilder::new()\n\n .with_decrypt(true)\n\n .with_encrypt(true)\n\n .build();\n\n ctx.tr_sess_set_attributes(\n\n session.unwrap(),\n\n session_attributes,\n\n session_attributes_mask,\n\n )\n\n .unwrap();\n\n ctx.set_sessions((session, None, None));\n\n\n\n ctx\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/common/mod.rs", "rank": 11, "score": 352794.7820164545 }, { "content": "/// Get the TPM vendor name\n\npub fn get_tpm_vendor(context: &mut Context) -> Result<String> {\n\n // Retrieve the TPM property values\n\n Ok([\n\n PropertyTag::VendorString1,\n\n PropertyTag::VendorString2,\n\n PropertyTag::VendorString3,\n\n PropertyTag::VendorString4,\n\n ]\n\n .iter()\n\n // Retrieve property values\n\n .map(|propid| context.get_tpm_property(*propid))\n\n // Collect and return an error if we got one\n\n .collect::<Result<Vec<Option<u32>>>>()?\n\n .iter()\n\n // Filter out the Option::None values\n\n .filter_map(|x| *x)\n\n // Filter out zero values\n\n .filter(|x| *x != 0)\n\n // Map through int_to_string\n\n .map(tpm_int_to_string)\n\n // Collect to a single string\n\n .collect())\n\n}\n", "file_path": "tss-esapi/src/utils/mod.rs", "rank": 12, "score": 351537.5897205756 }, { "content": "#[allow(dead_code)]\n\npub fn create_ctx_without_session() -> Context {\n\n let tcti = create_tcti();\n\n Context::new(tcti).unwrap()\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/common/mod.rs", "rank": 13, "score": 347372.24987556506 }, { "content": "/// Create the [Public] structure for an RSA unrestricted signing key.\n\n///\n\n/// * `scheme` - RSA scheme to be used for signing\n\n/// * `key_bits` - Size in bits of the decryption key\n\n/// * `pub_exponent` - Public exponent of the RSA key\n\npub fn create_unrestricted_signing_rsa_public(\n\n scheme: RsaScheme,\n\n rsa_key_bits: RsaKeyBits,\n\n rsa_pub_exponent: RsaExponent,\n\n) -> Result<Public> {\n\n let object_attributes = ObjectAttributesBuilder::new()\n\n .with_fixed_tpm(true)\n\n .with_fixed_parent(true)\n\n .with_sensitive_data_origin(true)\n\n .with_user_with_auth(true)\n\n .with_decrypt(false)\n\n .with_sign_encrypt(true)\n\n .with_restricted(false)\n\n .build()?;\n\n\n\n PublicBuilder::new()\n\n .with_public_algorithm(PublicAlgorithm::Rsa)\n\n .with_name_hashing_algorithm(HashingAlgorithm::Sha256)\n\n .with_object_attributes(object_attributes)\n\n .with_rsa_parameters(\n", "file_path": "tss-esapi/src/utils/mod.rs", "rank": 14, "score": 336297.8503966811 }, { "content": "/// Create the [Public] structure for a restricted decryption key.\n\n///\n\n/// * `symmetric` - Cipher to be used for decrypting children of the key\n\n/// * `key_bits` - Size in bits of the decryption key\n\n/// * `pub_exponent` - Public exponent of the RSA key\n\npub fn create_restricted_decryption_rsa_public(\n\n symmetric: SymmetricDefinitionObject,\n\n rsa_key_bits: RsaKeyBits,\n\n rsa_pub_exponent: RsaExponent,\n\n) -> Result<Public> {\n\n let object_attributes = ObjectAttributesBuilder::new()\n\n .with_fixed_tpm(true)\n\n .with_fixed_parent(true)\n\n .with_sensitive_data_origin(true)\n\n .with_user_with_auth(true)\n\n .with_decrypt(true)\n\n .with_sign_encrypt(false)\n\n .with_restricted(true)\n\n .build()?;\n\n\n\n PublicBuilder::new()\n\n .with_public_algorithm(PublicAlgorithm::Rsa)\n\n .with_name_hashing_algorithm(HashingAlgorithm::Sha256)\n\n .with_object_attributes(object_attributes)\n\n .with_rsa_parameters(\n", "file_path": "tss-esapi/src/utils/mod.rs", "rank": 15, "score": 336289.73355632194 }, { "content": "/// Lists all the currently defined NV Indexes' names and public components\n\npub fn list(context: &mut Context) -> Result<Vec<(NvPublic, Name)>> {\n\n context.execute_without_session(|ctx| {\n\n ctx.get_capability(\n\n CapabilityType::Handles,\n\n TPM2_NV_INDEX_FIRST,\n\n TPM2_PT_NV_INDEX_MAX,\n\n )\n\n .and_then(|(capability_data, _)| match capability_data {\n\n CapabilityData::Handles(tpm_handles) => Ok(tpm_handles),\n\n _ => Err(Error::local_error(WrapperErrorKind::WrongValueFromTpm)),\n\n })\n\n .and_then(|tpm_handles| {\n\n tpm_handles\n\n .iter()\n\n .map(|&tpm_handle| get_nv_index_info(ctx, NvIndexTpmHandle::try_from(tpm_handle)?))\n\n .collect()\n\n })\n\n })\n\n}\n", "file_path": "tss-esapi/src/abstraction/nv.rs", "rank": 16, "score": 336177.92742693465 }, { "content": "/// Create the [Public] structure for an RSA unrestricted signing key.\n\n///\n\n/// * `scheme` - RSA scheme to be used for signing\n\n/// * `key_bits` - Size in bits of the decryption key\n\n/// * `pub_exponent` - Public exponent of the RSA key\n\n/// * `rsa_public_key` - The public part of the RSA key that is going to be used as unique identifier.\n\npub fn create_unrestricted_signing_rsa_public_with_unique(\n\n scheme: RsaScheme,\n\n rsa_key_bits: RsaKeyBits,\n\n rsa_pub_exponent: RsaExponent,\n\n rsa_public_key: &PublicKeyRsa,\n\n) -> Result<Public> {\n\n let object_attributes = ObjectAttributesBuilder::new()\n\n .with_fixed_tpm(true)\n\n .with_fixed_parent(true)\n\n .with_sensitive_data_origin(true)\n\n .with_user_with_auth(true)\n\n .with_decrypt(false)\n\n .with_sign_encrypt(true)\n\n .with_restricted(false)\n\n .build()?;\n\n\n\n PublicBuilder::new()\n\n .with_public_algorithm(PublicAlgorithm::Rsa)\n\n .with_name_hashing_algorithm(HashingAlgorithm::Sha256)\n\n .with_object_attributes(object_attributes)\n", "file_path": "tss-esapi/src/utils/mod.rs", "rank": 17, "score": 330458.479191295 }, { "content": "/// Create the [Public] structure for an unrestricted encryption/decryption key.\n\n///\n\n/// * `symmetric` - Cipher to be used for decrypting children of the key\n\n/// * `key_bits` - Size in bits of the decryption key\n\n/// * `pub_exponent` - Public exponent of the RSA key\n\npub fn create_unrestricted_encryption_decryption_rsa_public(\n\n rsa_key_bits: RsaKeyBits,\n\n rsa_pub_exponent: RsaExponent,\n\n) -> Result<Public> {\n\n let object_attributes = ObjectAttributesBuilder::new()\n\n .with_fixed_tpm(true)\n\n .with_fixed_parent(true)\n\n .with_sensitive_data_origin(true)\n\n .with_user_with_auth(true)\n\n .with_decrypt(true)\n\n .with_sign_encrypt(true)\n\n .with_restricted(false)\n\n .build()?;\n\n\n\n PublicBuilder::new()\n\n .with_public_algorithm(PublicAlgorithm::Rsa)\n\n .with_name_hashing_algorithm(HashingAlgorithm::Sha256)\n\n .with_object_attributes(object_attributes)\n\n .with_rsa_parameters(\n\n PublicRsaParametersBuilder::new()\n", "file_path": "tss-esapi/src/utils/mod.rs", "rank": 18, "score": 330450.1030907588 }, { "content": "#[test]\n\nfn rsa_exponent_is_valid_test() {\n\n assert!(!RsaExponent::is_valid(1));\n\n assert!(RsaExponent::is_valid(17));\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/public_rsa_exponent_tests.rs", "rank": 19, "score": 330089.9207058163 }, { "content": "#[test]\n\nfn rsa_exponent_value_test() {\n\n let expected_value = 97;\n\n\n\n let rsa_exponent = RsaExponent::create(expected_value).unwrap_or_else(|_| {\n\n panic!(\n\n \"Failed to create a RsaExponent from the value {}\",\n\n expected_value\n\n )\n\n });\n\n\n\n assert_eq!(expected_value, rsa_exponent.value());\n\n}\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/public_rsa_exponent_tests.rs", "rank": 20, "score": 330089.9207058163 }, { "content": "#[test]\n\nfn create_ecc_key_decryption_scheme() {\n\n let mut ctx = create_ctx();\n\n let _ = ctx\n\n .create_key(\n\n KeyParams::Ecc {\n\n curve: EccCurve::NistP256,\n\n scheme: EccScheme::create(\n\n EccSchemeAlgorithm::EcDh,\n\n Some(HashingAlgorithm::Sha256),\n\n None,\n\n )\n\n .expect(\"Failed to create ecc scheme\"),\n\n },\n\n 16,\n\n )\n\n .unwrap_err();\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 21, "score": 329741.0902366916 }, { "content": "/// Retrieve the Endorsement Key public certificate from the TPM\n\npub fn retrieve_ek_pubcert(context: &mut Context, alg: AsymmetricAlgorithm) -> Result<Vec<u8>> {\n\n let nv_idx = match alg {\n\n AsymmetricAlgorithm::Rsa => RSA_2048_EK_CERTIFICATE_NV_INDEX,\n\n AsymmetricAlgorithm::Ecc => ECC_P256_EK_CERTIFICATE_NV_INDEX,\n\n AsymmetricAlgorithm::Null => {\n\n // TDOD: Figure out what to with Null.\n\n return Err(Error::local_error(WrapperErrorKind::UnsupportedParam));\n\n }\n\n };\n\n\n\n let nv_idx = NvIndexTpmHandle::new(nv_idx).unwrap();\n\n\n\n let nv_auth_handle = TpmHandle::NvIndex(nv_idx);\n\n let nv_auth_handle = context.execute_without_session(|ctx| {\n\n ctx.tr_from_tpm_public(nv_auth_handle)\n\n .map(|v| NvAuth::NvIndex(v.into()))\n\n })?;\n\n\n\n context.execute_with_nullauth_session(|ctx| nv::read_full(ctx, nv_auth_handle, nv_idx))\n\n}\n", "file_path": "tss-esapi/src/abstraction/ek.rs", "rank": 22, "score": 326541.5310083525 }, { "content": "#[test]\n\nfn test_signing_with_wrong_symmetric() {\n\n assert!(matches!(\n\n PublicEccParametersBuilder::new()\n\n .with_restricted(false)\n\n .with_is_decryption_key(false)\n\n .with_is_signing_key(true)\n\n .with_ecc_scheme(\n\n EccScheme::create(\n\n EccSchemeAlgorithm::EcDsa,\n\n Some(HashingAlgorithm::Sha256),\n\n None\n\n )\n\n .unwrap()\n\n )\n\n .with_curve(EccCurve::NistP256)\n\n .with_key_derivation_function_scheme(KeyDerivationFunctionScheme::Null)\n\n .with_symmetric(SymmetricDefinitionObject::AES_128_CFB)\n\n .build(),\n\n Err(Error::WrapperError(WrapperErrorKind::InconsistentParams))\n\n ));\n\n}\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/public_ecc_parameters_tests.rs", "rank": 23, "score": 315743.06441292295 }, { "content": "#[test]\n\nfn test_signing_with_default_symmetric() {\n\n assert!(PublicEccParametersBuilder::new()\n\n .with_restricted(false)\n\n .with_is_decryption_key(false)\n\n .with_is_signing_key(true)\n\n .with_ecc_scheme(\n\n EccScheme::create(\n\n EccSchemeAlgorithm::EcDsa,\n\n Some(HashingAlgorithm::Sha256),\n\n None\n\n )\n\n .unwrap()\n\n )\n\n .with_curve(EccCurve::NistP256)\n\n .with_key_derivation_function_scheme(KeyDerivationFunctionScheme::Null)\n\n .with_symmetric(SymmetricDefinitionObject::Null)\n\n .build()\n\n .is_ok());\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/public_ecc_parameters_tests.rs", "rank": 24, "score": 315743.06441292295 }, { "content": "#[test]\n\nfn test_restricted_decryption_with_null_symmetric() {\n\n assert!(matches!(\n\n PublicEccParametersBuilder::new()\n\n .with_restricted(true)\n\n .with_is_decryption_key(true)\n\n .with_ecc_scheme(EccScheme::Null)\n\n .with_curve(EccCurve::NistP256)\n\n .with_key_derivation_function_scheme(KeyDerivationFunctionScheme::Null)\n\n .with_symmetric(SymmetricDefinitionObject::Null)\n\n .build(),\n\n Err(Error::WrapperError(WrapperErrorKind::InconsistentParams))\n\n ));\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/public_ecc_parameters_tests.rs", "rank": 25, "score": 311511.6220535639 }, { "content": "#[test]\n\nfn test_restricted_decryption_with_default_symmetric() {\n\n assert!(matches!(\n\n PublicEccParametersBuilder::new()\n\n .with_restricted(true)\n\n .with_is_decryption_key(true)\n\n .with_ecc_scheme(EccScheme::Null)\n\n .with_curve(EccCurve::NistP256)\n\n .with_key_derivation_function_scheme(KeyDerivationFunctionScheme::Null)\n\n .build(),\n\n Err(Error::WrapperError(WrapperErrorKind::ParamsMissing))\n\n ));\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/public_ecc_parameters_tests.rs", "rank": 26, "score": 311511.6220535639 }, { "content": "#[test]\n\nfn test_restricted_decryption_with_wrong_symmetric() {\n\n assert!(PublicEccParametersBuilder::new()\n\n .with_restricted(true)\n\n .with_is_decryption_key(true)\n\n .with_ecc_scheme(EccScheme::Null)\n\n .with_curve(EccCurve::NistP256)\n\n .with_key_derivation_function_scheme(KeyDerivationFunctionScheme::Null)\n\n .with_symmetric(SymmetricDefinitionObject::AES_128_CFB)\n\n .build()\n\n .is_ok());\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/public_ecc_parameters_tests.rs", "rank": 27, "score": 311511.6220535639 }, { "content": "// Copyright 2021 Contributors to the Parsec project.\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse tss_esapi::interface_types::key_bits::RsaKeyBits;\n\nuse tss_esapi::structures::*;\n\nuse tss_esapi::Error;\n\nuse tss_esapi::WrapperErrorKind;\n\n\n\n#[test]\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/public_rsa_parameters_tests.rs", "rank": 28, "score": 306209.90687528753 }, { "content": "fn create_ctx() -> TransientKeyContext {\n\n TransientKeyContextBuilder::new()\n\n .with_tcti(create_tcti())\n\n .build()\n\n .unwrap()\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 29, "score": 293812.8061636648 }, { "content": "#[test]\n\nfn wrong_key_sizes() {\n\n assert_eq!(\n\n TransientKeyContextBuilder::new()\n\n .with_tcti(create_tcti())\n\n .with_root_key_size(1023)\n\n .build()\n\n .unwrap_err(),\n\n Error::WrapperError(ErrorKind::InvalidParam)\n\n );\n\n assert_eq!(\n\n TransientKeyContextBuilder::new()\n\n .with_tcti(create_tcti())\n\n .with_root_key_size(1025)\n\n .build()\n\n .unwrap_err(),\n\n Error::WrapperError(ErrorKind::InvalidParam)\n\n );\n\n assert_eq!(\n\n TransientKeyContextBuilder::new()\n\n .with_tcti(create_tcti())\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 30, "score": 292963.7511780276 }, { "content": "#[test]\n\nfn create_ecc_key() {\n\n let mut ctx = create_ctx();\n\n let _ = ctx\n\n .create_key(\n\n KeyParams::Ecc {\n\n curve: EccCurve::NistP256,\n\n scheme: EccScheme::create(\n\n EccSchemeAlgorithm::EcDsa,\n\n Some(HashingAlgorithm::Sha256),\n\n None,\n\n )\n\n .expect(\"Failed to create ecc scheme\"),\n\n },\n\n 16,\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 31, "score": 292943.2673040526 }, { "content": "#[test]\n\nfn test_max_sized_data() {\n\n let _ = AttestBuffer::try_from(vec![0xffu8; ATTEST_BUFFER_MAX_SIZE])\n\n .expect(\"Failed to parse buffer if maximum size as AttestBuffer\");\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/attest_buffer_tests.rs", "rank": 32, "score": 290081.6050908671 }, { "content": "#[test]\n\nfn load_bad_sized_key() {\n\n let mut ctx = create_ctx();\n\n let key_params = KeyParams::Rsa {\n\n size: RsaKeyBits::Rsa1024,\n\n scheme: RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256))\n\n .expect(\"Failed to create RSA scheme\"),\n\n pub_exponent: RsaExponent::default(),\n\n };\n\n let _ = ctx\n\n .load_external_public_key(PublicKey::Rsa(vec![0xDE, 0xAD, 0xBE, 0xEF]), key_params)\n\n .unwrap_err();\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 33, "score": 289135.57582741283 }, { "content": "/// Create the [Public] structure for an ECC unrestricted signing key.\n\n///\n\n/// * `scheme` - Asymmetric scheme to be used for signing; *must* be an RSA signing scheme\n\n/// * `curve` - identifier of the precise curve to be used with the key\n\npub fn create_unrestricted_signing_ecc_public(\n\n scheme: EccScheme,\n\n curve: EccCurve,\n\n) -> Result<Public> {\n\n let object_attributes = ObjectAttributesBuilder::new()\n\n .with_fixed_tpm(true)\n\n .with_fixed_parent(true)\n\n .with_sensitive_data_origin(true)\n\n .with_user_with_auth(true)\n\n .with_decrypt(false)\n\n .with_sign_encrypt(true)\n\n .with_restricted(false)\n\n .build()?;\n\n\n\n PublicBuilder::new()\n\n .with_public_algorithm(PublicAlgorithm::Ecc)\n\n .with_name_hashing_algorithm(HashingAlgorithm::Sha256)\n\n .with_object_attributes(object_attributes)\n\n .with_ecc_parameters(\n\n PublicEccParametersBuilder::new_unrestricted_signing_key(scheme, curve).build()?,\n", "file_path": "tss-esapi/src/utils/mod.rs", "rank": 34, "score": 287387.49164226826 }, { "content": "fn create_validated_test_parameters(\n\n expected_attest_info: AttestInfo,\n\n expected_attestation_type: AttestationType,\n\n) -> (Attest, TPMS_ATTEST) {\n\n let expected_qualified_signer =\n\n Name::try_from(vec![0x0eu8; 64]).expect(\"Failed to create qualified name\");\n\n let expected_extra_data =\n\n Data::try_from(vec![0x0du8; 64]).expect(\"Failed to create extra data\");\n\n let expected_clock_info = ClockInfo::try_from(TPMS_CLOCK_INFO {\n\n clock: 1u64,\n\n resetCount: 2u32,\n\n restartCount: 3u32,\n\n safe: YesNo::Yes.into(),\n\n })\n\n .expect(\"Failed to create clock info\");\n\n let expected_firmware_version = 1u64;\n\n\n\n let expected_tpms_attest = TPMS_ATTEST {\n\n magic: TPM2_GENERATED_VALUE,\n\n type_: expected_attestation_type.into(),\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/attest_tests.rs", "rank": 35, "score": 285620.06687228073 }, { "content": "#[allow(dead_code)]\n\npub fn create_tcti() -> TctiNameConf {\n\n setup_logging();\n\n\n\n match env::var(\"TEST_TCTI\") {\n\n Err(_) => TctiNameConf::Mssim(Default::default()),\n\n Ok(tctistr) => TctiNameConf::from_str(&tctistr).expect(\"Error parsing TEST_TCTI\"),\n\n }\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/common/mod.rs", "rank": 36, "score": 282358.7911503079 }, { "content": "#[test]\n\nfn test_max_sized_data() {\n\n let _ = Nonce::try_from([0xff; 64].to_vec()).unwrap();\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/nonce_tests.rs", "rank": 37, "score": 279522.2510437502 }, { "content": "#[test]\n\nfn wrong_auth_size() {\n\n assert_eq!(\n\n TransientKeyContextBuilder::new()\n\n .with_tcti(create_tcti())\n\n .with_root_key_auth_size(33)\n\n .build()\n\n .unwrap_err(),\n\n Error::WrapperError(ErrorKind::WrongParamSize)\n\n );\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 38, "score": 278934.81848946615 }, { "content": "/// Create the Endorsement Key object from the specification templates\n\npub fn create_ek_object<IKC: IntoKeyCustomization>(\n\n context: &mut Context,\n\n alg: AsymmetricAlgorithm,\n\n key_customization: IKC,\n\n) -> Result<KeyHandle> {\n\n let ek_public = create_ek_public_from_default_template(alg, key_customization)?;\n\n\n\n Ok(context\n\n .execute_with_nullauth_session(|ctx| {\n\n ctx.create_primary(Hierarchy::Endorsement, &ek_public, None, None, None, None)\n\n })?\n\n .key_handle)\n\n}\n\n\n", "file_path": "tss-esapi/src/abstraction/ek.rs", "rank": 39, "score": 269590.1755380005 }, { "content": "/// Get the [`Public`] representing a default Endorsement Key\n\n///\n\n/// Source: TCG EK Credential Profile for TPM Family 2.0; Level 0 Version 2.3 Revision 2\n\n/// Appendix B.3.3 and B.3.4\n\npub fn create_ek_public_from_default_template<IKC: IntoKeyCustomization>(\n\n alg: AsymmetricAlgorithm,\n\n key_customization: IKC,\n\n) -> Result<Public> {\n\n let key_customization = key_customization.into_key_customization();\n\n\n\n let obj_attrs_builder = ObjectAttributesBuilder::new()\n\n .with_fixed_tpm(true)\n\n .with_st_clear(false)\n\n .with_fixed_parent(true)\n\n .with_sensitive_data_origin(true)\n\n .with_user_with_auth(false)\n\n .with_admin_with_policy(true)\n\n .with_no_da(false)\n\n .with_encrypted_duplication(false)\n\n .with_restricted(true)\n\n .with_decrypt(true)\n\n .with_sign_encrypt(false);\n\n\n\n let obj_attrs = if let Some(ref k) = key_customization {\n", "file_path": "tss-esapi/src/abstraction/ek.rs", "rank": 40, "score": 260358.60920081811 }, { "content": "// Copyright 2021 Contributors to the Parsec project.\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\nuse tss_esapi::interface_types::algorithm::*;\n\nuse tss_esapi::interface_types::ecc::EccCurve;\n\nuse tss_esapi::structures::*;\n\nuse tss_esapi::Error;\n\nuse tss_esapi::WrapperErrorKind;\n\n\n\n#[test]\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/public_ecc_parameters_tests.rs", "rank": 41, "score": 259214.71689233903 }, { "content": "// Copyright 2021 Contributors to the Parsec project.\n\n// SPDX-License-Identifier: Apache-2.0\n\nuse tss_esapi::{structures::RsaExponent, Error, WrapperErrorKind};\n\n#[test]\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/public_rsa_exponent_tests.rs", "rank": 42, "score": 259189.44353267722 }, { "content": "#[allow(dead_code)]\n\npub fn setup_logging() {\n\n LOG_INIT.call_once(|| {\n\n env_logger::init();\n\n });\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/common/mod.rs", "rank": 43, "score": 254048.01996077283 }, { "content": "#[test]\n\nfn test_none_set() {\n\n let attributes = AlgorithmAttributes::from(0x0);\n\n assert!(!attributes.asymmetric(), \"'asymmetric' is unexpectedly set\");\n\n assert!(!attributes.symmetric(), \"'symmetric' is unexpectedly set\");\n\n assert!(!attributes.hash(), \"'hash' is unexpectedly set\");\n\n assert!(!attributes.object(), \"'object' is unexpectedly set\");\n\n assert!(!attributes.signing(), \"'signing' is unexpectedly set\");\n\n assert!(!attributes.encrypting(), \"'encrypting' is unexpectedly set\");\n\n assert!(!attributes.method(), \"'method' is unexpectedly set\");\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/algorithm_attributes_tests.rs", "rank": 44, "score": 252982.35415261865 }, { "content": "#[test]\n\nfn test_object_set() {\n\n let attributes = AlgorithmAttributes::from(1u32.shl(3));\n\n assert!(!attributes.asymmetric(), \"'asymmetric' is unexpectedly set\");\n\n assert!(!attributes.symmetric(), \"'symmetric' is unexpectedly set\");\n\n assert!(!attributes.hash(), \"'hash' is unexpectedly set\");\n\n assert!(attributes.object(), \"'object' is unexpectedly not set\");\n\n assert!(!attributes.signing(), \"'signing' is unexpectedly set\");\n\n assert!(!attributes.encrypting(), \"'encrypting' is unexpectedly set\");\n\n assert!(!attributes.method(), \"'method' is unexpectedly set\");\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/attributes_tests/algorithm_attributes_tests.rs", "rank": 45, "score": 252916.47431639957 }, { "content": "#[test]\n\nfn test_default() {\n\n {\n\n let attest_buffer: AttestBuffer = Default::default();\n\n let expected: TPM2B_ATTEST = Default::default();\n\n let actual = TPM2B_ATTEST::try_from(attest_buffer).unwrap();\n\n assert_eq!(expected.size, actual.size);\n\n assert_eq!(\n\n expected.attestationData.len(),\n\n actual.attestationData.len(),\n\n \"Native and TSS attest buffer don't have the same length\"\n\n );\n\n assert!(\n\n expected\n\n .attestationData\n\n .iter()\n\n .zip(actual.attestationData.iter())\n\n .all(|(a, b)| a == b),\n\n \"Native and TSS attest buffer is not equal\"\n\n );\n\n }\n\n {\n\n let tss_attest_buffer: TPM2B_ATTEST = Default::default();\n\n let expected: AttestBuffer = Default::default();\n\n let actual = AttestBuffer::try_from(tss_attest_buffer).unwrap();\n\n assert_eq!(expected, actual);\n\n }\n\n}\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/attest_buffer_tests.rs", "rank": 46, "score": 251247.5355112754 }, { "content": "#[test]\n\nfn bindgen_test_layout_TPM2B_PUBLIC_KEY_RSA() {\n\n assert_eq!(\n\n ::std::mem::size_of::<TPM2B_PUBLIC_KEY_RSA>(),\n\n 514usize,\n\n concat!(\"Size of: \", stringify!(TPM2B_PUBLIC_KEY_RSA))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<TPM2B_PUBLIC_KEY_RSA>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(TPM2B_PUBLIC_KEY_RSA))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<TPM2B_PUBLIC_KEY_RSA>())).size as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(TPM2B_PUBLIC_KEY_RSA),\n\n \"::\",\n\n stringify!(size)\n\n )\n", "file_path": "tss-esapi-sys/src/bindings/x86_64-unknown-darwin.rs", "rank": 47, "score": 250924.23427887633 }, { "content": "#[test]\n\nfn test_create_ak_rsa_rsa() {\n\n let mut context = create_ctx_without_session();\n\n\n\n let ek_rsa = ek::create_ek_object(&mut context, AsymmetricAlgorithm::Rsa, None).unwrap();\n\n ak::create_ak(\n\n &mut context,\n\n ek_rsa,\n\n HashingAlgorithm::Sha256,\n\n SignatureSchemeAlgorithm::RsaPss,\n\n None,\n\n None,\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/ak_tests.rs", "rank": 48, "score": 249996.49458160903 }, { "content": "#[test]\n\nfn test_to_large_data() {\n\n assert_eq!(\n\n AttestBuffer::try_from(vec![0xffu8; ATTEST_BUFFER_MAX_SIZE + 1])\n\n .expect_err(\"Converting a buffer that is to large did not produce an error\"),\n\n Error::WrapperError(WrapperErrorKind::WrongParamSize),\n\n \"Wrong kind of error when converting a buffer with size {} to AttestBuffer\",\n\n ATTEST_BUFFER_MAX_SIZE + 1\n\n );\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/attest_buffer_tests.rs", "rank": 49, "score": 248410.98311786613 }, { "content": "#[test]\n\nfn verify_wrong_key() {\n\n let mut ctx = create_ctx();\n\n let key_params1 = KeyParams::Rsa {\n\n size: RsaKeyBits::Rsa2048,\n\n scheme: RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256))\n\n .expect(\"Failed to create RSA scheme\"),\n\n pub_exponent: RsaExponent::default(),\n\n };\n\n let (key1, auth1) = ctx.create_key(key_params1, 16).unwrap();\n\n\n\n let key_params2 = KeyParams::Rsa {\n\n size: RsaKeyBits::Rsa2048,\n\n scheme: RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256))\n\n .expect(\"Failed to create RSA scheme\"),\n\n pub_exponent: RsaExponent::default(),\n\n };\n\n let (key2, _) = ctx.create_key(key_params2, 16).unwrap();\n\n\n\n // Sign with the first key\n\n let signature = ctx\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 50, "score": 246836.06917135065 }, { "content": "#[test]\n\nfn bindgen_test_layout_TPM2B_PUBLIC_KEY_RSA() {\n\n assert_eq!(\n\n ::std::mem::size_of::<TPM2B_PUBLIC_KEY_RSA>(),\n\n 514usize,\n\n concat!(\"Size of: \", stringify!(TPM2B_PUBLIC_KEY_RSA))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<TPM2B_PUBLIC_KEY_RSA>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(TPM2B_PUBLIC_KEY_RSA))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<TPM2B_PUBLIC_KEY_RSA>())).size as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(TPM2B_PUBLIC_KEY_RSA),\n\n \"::\",\n\n stringify!(size)\n\n )\n", "file_path": "tss-esapi-sys/src/bindings/x86_64-unknown-linux-gnu.rs", "rank": 51, "score": 246785.5598940894 }, { "content": "#[test]\n\nfn bindgen_test_layout_TPM2B_PUBLIC_KEY_RSA() {\n\n assert_eq!(\n\n ::std::mem::size_of::<TPM2B_PUBLIC_KEY_RSA>(),\n\n 514usize,\n\n concat!(\"Size of: \", stringify!(TPM2B_PUBLIC_KEY_RSA))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<TPM2B_PUBLIC_KEY_RSA>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(TPM2B_PUBLIC_KEY_RSA))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<TPM2B_PUBLIC_KEY_RSA>())).size as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(TPM2B_PUBLIC_KEY_RSA),\n\n \"::\",\n\n stringify!(size)\n\n )\n", "file_path": "tss-esapi-sys/src/bindings/aarch64-unknown-linux-gnu.rs", "rank": 52, "score": 246785.5598940894 }, { "content": "#[test]\n\nfn bindgen_test_layout_TPM2B_PUBLIC_KEY_RSA() {\n\n assert_eq!(\n\n ::std::mem::size_of::<TPM2B_PUBLIC_KEY_RSA>(),\n\n 514usize,\n\n concat!(\"Size of: \", stringify!(TPM2B_PUBLIC_KEY_RSA))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<TPM2B_PUBLIC_KEY_RSA>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(TPM2B_PUBLIC_KEY_RSA))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<TPM2B_PUBLIC_KEY_RSA>())).size as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(TPM2B_PUBLIC_KEY_RSA),\n\n \"::\",\n\n stringify!(size)\n\n )\n", "file_path": "tss-esapi-sys/src/bindings/arm-unknown-linux-gnueabi.rs", "rank": 53, "score": 246785.5598940894 }, { "content": "#[allow(dead_code)]\n\npub fn get_pcr_policy_digest(\n\n context: &mut Context,\n\n mangle: bool,\n\n do_trial: bool,\n\n) -> (Digest, PolicySession) {\n\n let old_ses = context.sessions();\n\n context.clear_sessions();\n\n\n\n // Read the pcr values using pcr_read\n\n let pcr_selection_list = PcrSelectionListBuilder::new()\n\n .with_selection(HashingAlgorithm::Sha256, &[PcrSlot::Slot0, PcrSlot::Slot1])\n\n .build();\n\n\n\n let (_update_counter, pcr_selection_list_out, pcr_data) = context\n\n .pcr_read(&pcr_selection_list)\n\n .map(|(update_counter, read_pcr_selections, read_pcr_digests)| {\n\n (\n\n update_counter,\n\n read_pcr_selections.clone(),\n\n PcrData::create(&read_pcr_selections, &read_pcr_digests)\n", "file_path": "tss-esapi/tests/integration_tests/common/mod.rs", "rank": 54, "score": 246526.77872425213 }, { "content": "#[test]\n\nfn activate_credential_wrong_key() {\n\n // create a Transient key context, generate two keys and\n\n // obtain the Make Credential parameters for the first one\n\n let mut ctx = create_ctx();\n\n let params = KeyParams::Ecc {\n\n curve: EccCurve::NistP256,\n\n scheme: EccScheme::create(\n\n EccSchemeAlgorithm::EcDsa,\n\n Some(HashingAlgorithm::Sha256),\n\n None,\n\n )\n\n .expect(\"Failed to create ecc scheme\"),\n\n };\n\n // \"Good\" key (for which the credential will be generated)\n\n let (material, auth) = ctx.create_key(params, 16).unwrap();\n\n let obj = ObjectWrapper {\n\n material,\n\n auth,\n\n params,\n\n };\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 55, "score": 243876.93261338162 }, { "content": "fn validate_tss_ecdaa_scheme(\n\n left: &TPMS_SCHEME_ECDAA,\n\n right: &TPMS_SCHEME_ECDAA,\n\n union_field_name: &str,\n\n) {\n\n assert_eq!(\n\n left.hashAlg, right.hashAlg,\n\n \"{} in details, hashAlg did not match\",\n\n union_field_name\n\n );\n\n assert_eq!(\n\n left.count, right.count,\n\n \"{} in details, count did not match\",\n\n union_field_name\n\n );\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/tagged_tests/tagged_signature_scheme_tests.rs", "rank": 56, "score": 241024.75810597115 }, { "content": "fn validate_tss_hash_scheme(\n\n left: &TPMS_SCHEME_HASH,\n\n right: &TPMS_SCHEME_HASH,\n\n union_field_name: &str,\n\n) {\n\n assert_eq!(\n\n left.hashAlg, right.hashAlg,\n\n \"{} in details, hashAlg did not match\",\n\n union_field_name\n\n );\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/tagged_tests/tagged_signature_scheme_tests.rs", "rank": 57, "score": 241024.75810597115 }, { "content": "fn validate_tss_hmac_scheme(\n\n left: &TPMS_SCHEME_HMAC,\n\n right: &TPMS_SCHEME_HMAC,\n\n union_field_name: &str,\n\n) {\n\n assert_eq!(\n\n left.hashAlg, right.hashAlg,\n\n \"{} in details, hashAlg did not match\",\n\n union_field_name\n\n );\n\n}\n\n\n\nmacro_rules! test_valid_conversions_generic {\n\n (SignatureScheme::$item:ident, $union_field_name:ident, $tss_details_field_validator:expr, $native_scheme_field:ident, $native_scheme:expr) => {\n\n let tss_actual: TPMT_SIG_SCHEME = SignatureScheme::$item { $native_scheme_field: $native_scheme }\n\n .try_into()\n\n .expect(&format!(\"Failed to convert {} signature scheem into TSS type\", stringify!($item)));\n\n\n\n\n\n let tss_expected = TPMT_SIG_SCHEME {\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/tagged_tests/tagged_signature_scheme_tests.rs", "rank": 58, "score": 241024.75810597115 }, { "content": "#[test]\n\nfn test_default() {\n\n {\n\n let nonce: Nonce = Default::default();\n\n let expected: TPM2B_NONCE = Default::default();\n\n let actual = TPM2B_NONCE::try_from(nonce).unwrap();\n\n assert_eq!(expected.size, actual.size);\n\n assert_eq!(\n\n expected.buffer.len(),\n\n actual.buffer.len(),\n\n \"Buffers don't have the same length\"\n\n );\n\n assert!(\n\n expected\n\n .buffer\n\n .iter()\n\n .zip(actual.buffer.iter())\n\n .all(|(a, b)| a == b),\n\n \"Buffers are not equal\"\n\n );\n\n }\n\n {\n\n let tss_nonce: TPM2B_NONCE = Default::default();\n\n let expected: Nonce = Default::default();\n\n let actual = Nonce::try_from(tss_nonce).unwrap();\n\n assert_eq!(expected, actual);\n\n }\n\n}\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/nonce_tests.rs", "rank": 59, "score": 240290.12002071104 }, { "content": "#[test]\n\nfn verify() {\n\n let pub_key = vec![\n\n 0x96, 0xDC, 0x72, 0x77, 0x49, 0x82, 0xFD, 0x2D, 0x06, 0x65, 0x8C, 0xE5, 0x3A, 0xCD, 0xED,\n\n 0xBD, 0x50, 0xD7, 0x6F, 0x3B, 0xE5, 0x6A, 0x76, 0xED, 0x3E, 0xD8, 0xF9, 0x93, 0x40, 0x55,\n\n 0x86, 0x6F, 0xBE, 0x76, 0x60, 0xD2, 0x03, 0x23, 0x59, 0x19, 0x8D, 0xFC, 0x51, 0x6A, 0x95,\n\n 0xC8, 0x5D, 0x5A, 0x89, 0x4D, 0xE5, 0xEA, 0x44, 0x78, 0x29, 0x62, 0xDB, 0x3F, 0xF0, 0xF7,\n\n 0x49, 0x15, 0xA5, 0xAE, 0x6D, 0x81, 0x8F, 0x06, 0x7B, 0x0B, 0x50, 0x7A, 0x2F, 0xEB, 0x00,\n\n 0xB6, 0x12, 0xF3, 0x10, 0xAF, 0x4D, 0x4A, 0xA9, 0xD9, 0x81, 0xBB, 0x1E, 0x2B, 0xDF, 0xB9,\n\n 0x33, 0x3D, 0xD6, 0xB7, 0x8D, 0x23, 0x7C, 0x7F, 0xE7, 0x12, 0x48, 0x4F, 0x26, 0x73, 0xAF,\n\n 0x63, 0x51, 0xA9, 0xDB, 0xA4, 0xAB, 0xB7, 0x27, 0x00, 0xD7, 0x1C, 0xFC, 0x2F, 0x61, 0x2A,\n\n 0xB9, 0x5B, 0x66, 0xA0, 0xE0, 0xD8, 0xF3, 0xD9,\n\n ];\n\n\n\n // \"Les carottes sont cuites.\" hashed with SHA256\n\n let digest = Digest::try_from(vec![\n\n 0x02, 0x2b, 0x26, 0xb1, 0xc3, 0x18, 0xdb, 0x73, 0x36, 0xef, 0x6f, 0x50, 0x9c, 0x35, 0xdd,\n\n 0xaa, 0xe1, 0x3d, 0x21, 0xdf, 0x83, 0x68, 0x0f, 0x48, 0xae, 0x5d, 0x8a, 0x5d, 0x37, 0x3c,\n\n 0xc1, 0x05,\n\n ])\n\n .unwrap();\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 60, "score": 239305.56642889668 }, { "content": "#[test]\n\nfn test_create_and_use_ak() {\n\n let mut context = create_ctx_without_session();\n\n\n\n let ek_rsa = ek::create_ek_object(&mut context, AsymmetricAlgorithm::Rsa, None).unwrap();\n\n let ak_auth = Auth::try_from(vec![0x1, 0x2, 0x42]).unwrap();\n\n let att_key = ak::create_ak(\n\n &mut context,\n\n ek_rsa,\n\n HashingAlgorithm::Sha256,\n\n SignatureSchemeAlgorithm::RsaPss,\n\n Some(&ak_auth),\n\n None,\n\n )\n\n .unwrap();\n\n\n\n let loaded_ak = ak::load_ak(\n\n &mut context,\n\n ek_rsa,\n\n Some(&ak_auth),\n\n att_key.out_private,\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/ak_tests.rs", "rank": 61, "score": 239140.4398593869 }, { "content": "#[test]\n\nfn full_test() {\n\n let mut ctx = create_ctx();\n\n for _ in 0..4 {\n\n let key_params = KeyParams::Rsa {\n\n size: RsaKeyBits::Rsa2048,\n\n scheme: RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256))\n\n .expect(\"Failed to create RSA scheme\"),\n\n pub_exponent: RsaExponent::default(),\n\n };\n\n let (key, auth) = ctx.create_key(key_params, 16).unwrap();\n\n let signature = ctx\n\n .sign(\n\n key.clone(),\n\n key_params,\n\n auth,\n\n Digest::try_from(HASH.to_vec()).unwrap(),\n\n )\n\n .unwrap();\n\n let pub_key = ctx\n\n .load_external_public_key(key.public().clone(), key_params)\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 62, "score": 238493.81158824312 }, { "content": "#[test]\n\nfn test_to_large_data() {\n\n // Removed test_start_sess::test_long_nonce_sess\n\n // from context tests.\n\n\n\n let _ = Nonce::try_from(\n\n [\n\n 231, 97, 201, 180, 0, 1, 185, 150, 85, 90, 174, 188, 105, 133, 188, 3, 206, 5, 222, 71,\n\n 185, 1, 209, 243, 36, 130, 250, 116, 17, 0, 24, 4, 25, 225, 250, 198, 245, 210, 140,\n\n 23, 139, 169, 15, 193, 4, 145, 52, 138, 149, 155, 238, 36, 74, 152, 179, 108, 200, 248,\n\n 250, 100, 115, 214, 166, 165, 1, 27, 51, 11, 11, 244, 218, 157, 3, 174, 171, 142, 45,\n\n 8, 9, 36, 202, 171, 165, 43, 208, 186, 232, 15, 241, 95, 81, 174, 189, 30, 213, 47, 86,\n\n 115, 239, 49, 214, 235, 151, 9, 189, 174, 144, 238, 200, 201, 241, 157, 43, 37, 6, 96,\n\n 94, 152, 159, 205, 54, 9, 181, 14, 35, 246, 49, 150, 163, 118, 242, 59, 54, 42, 221,\n\n 215, 248, 23, 18, 223,\n\n ]\n\n .to_vec(),\n\n )\n\n .unwrap_err();\n\n}\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/nonce_tests.rs", "rank": 63, "score": 237224.36131056698 }, { "content": "#[test]\n\nfn encrypt_decrypt() {\n\n let mut ctx = create_ctx();\n\n let key_params = KeyParams::Rsa {\n\n size: RsaKeyBits::Rsa2048,\n\n scheme: RsaScheme::create(RsaSchemeAlgorithm::Oaep, Some(HashingAlgorithm::Sha256))\n\n .expect(\"Failed to create RSA scheme\"),\n\n pub_exponent: RsaExponent::default(),\n\n };\n\n let (dec_key, auth) = ctx.create_key(key_params, 16).unwrap();\n\n let enc_key = ctx\n\n .load_external_public_key(dec_key.public().clone(), key_params)\n\n .unwrap();\n\n let message = vec![0x1, 0x2, 0x3];\n\n\n\n let ciphertext = ctx\n\n .rsa_encrypt(\n\n enc_key,\n\n key_params,\n\n None,\n\n PublicKeyRsa::try_from(message.clone()).unwrap(),\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 64, "score": 235999.003586045 }, { "content": "#[test]\n\nfn activate_credential() {\n\n // create a Transient key context, generate a key and\n\n // obtain the Make Credential parameters\n\n let mut ctx = create_ctx();\n\n let params = KeyParams::Ecc {\n\n curve: EccCurve::NistP256,\n\n scheme: EccScheme::create(\n\n EccSchemeAlgorithm::EcDsa,\n\n Some(HashingAlgorithm::Sha256),\n\n None,\n\n )\n\n .expect(\"Failed to create ecc scheme\"),\n\n };\n\n let (material, auth) = ctx.create_key(params, 16).unwrap();\n\n let obj = ObjectWrapper {\n\n material,\n\n auth,\n\n params,\n\n };\n\n let make_cred_params = ctx.get_make_cred_params(obj.clone(), None).unwrap();\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 65, "score": 235999.003586045 }, { "content": "#[test]\n\nfn sign_with_no_auth() {\n\n let mut ctx = create_ctx();\n\n let key_params = KeyParams::Rsa {\n\n size: RsaKeyBits::Rsa2048,\n\n scheme: RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256))\n\n .expect(\"Failed to create RSA scheme\"),\n\n pub_exponent: RsaExponent::default(),\n\n };\n\n let (key, _) = ctx.create_key(key_params, 16).unwrap();\n\n ctx.sign(\n\n key,\n\n key_params,\n\n None,\n\n Digest::try_from(HASH.to_vec()).unwrap(),\n\n )\n\n .unwrap_err();\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 66, "score": 235999.003586045 }, { "content": "#[test]\n\nfn test_create_ak_rsa_ecc() {\n\n let mut context = create_ctx_without_session();\n\n\n\n let ek_rsa = ek::create_ek_object(&mut context, AsymmetricAlgorithm::Rsa, None).unwrap();\n\n if ak::create_ak(\n\n &mut context,\n\n ek_rsa,\n\n HashingAlgorithm::Sha256,\n\n SignatureSchemeAlgorithm::Sm2,\n\n None,\n\n None,\n\n )\n\n .is_ok()\n\n {\n\n // We can't use unwrap_err because that requires Debug on the T\n\n panic!(\"Should have errored\");\n\n }\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/ak_tests.rs", "rank": 67, "score": 235954.67891995172 }, { "content": "#[test]\n\nfn ctx_migration_test() {\n\n // Create two key contexts using `Context`, one for an RSA keypair,\n\n // one for just the public part of the key\n\n let mut basic_ctx = crate::common::create_ctx_with_session();\n\n let random_digest = basic_ctx.get_random(16).unwrap();\n\n let key_auth = Auth::try_from(random_digest.value().to_vec()).unwrap();\n\n let prim_key_handle = basic_ctx\n\n .create_primary(\n\n Hierarchy::Owner,\n\n &create_restricted_decryption_rsa_public(\n\n SymmetricDefinitionObject::AES_256_CFB,\n\n RsaKeyBits::Rsa2048,\n\n RsaExponent::create(0).unwrap(),\n\n )\n\n .unwrap(),\n\n Some(&key_auth),\n\n None,\n\n None,\n\n None,\n\n )\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 68, "score": 235373.83972944377 }, { "content": "#[test]\n\nfn full_ecc_test() {\n\n let mut ctx = create_ctx();\n\n let key_params = KeyParams::Ecc {\n\n curve: EccCurve::NistP256,\n\n scheme: EccScheme::create(\n\n EccSchemeAlgorithm::EcDsa,\n\n Some(HashingAlgorithm::Sha256),\n\n None,\n\n )\n\n .expect(\"Failed to create ecc scheme\"),\n\n };\n\n for _ in 0..4 {\n\n let (key, auth) = ctx.create_key(key_params, 16).unwrap();\n\n let signature = ctx\n\n .sign(\n\n key.clone(),\n\n key_params,\n\n auth,\n\n Digest::try_from(HASH.to_vec()).unwrap(),\n\n )\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 69, "score": 235373.83972944377 }, { "content": "#[allow(dead_code)]\n\npub fn name_conf() -> TctiNameConf {\n\n match env::var(\"TEST_TCTI\") {\n\n Err(_) => TctiNameConf::Mssim(Default::default()),\n\n Ok(tctistr) => TctiNameConf::from_str(&tctistr).expect(\"Error parsing TEST_TCTI\"),\n\n }\n\n}\n\n\n\nmod tcti_context_tests;\n\nmod tcti_info_tests;\n", "file_path": "tss-esapi/tests/integration_tests/tcti_ldr_tests/mod.rs", "rank": 70, "score": 235329.2952323417 }, { "content": "#[test]\n\nfn test_conversions() {\n\n test_valid_conversions!(SignatureScheme::RsaSsa, rsassa);\n\n test_valid_conversions!(SignatureScheme::RsaPss, rsapss);\n\n test_valid_conversions!(SignatureScheme::EcDsa, ecdsa);\n\n test_valid_conversions!(SignatureScheme::Sm2, sm2);\n\n test_valid_conversions!(SignatureScheme::EcSchnorr, ecschnorr);\n\n test_valid_conversions!(SignatureScheme::EcDaa, ecdaa);\n\n test_valid_conversions!(SignatureScheme::Hmac, hmac);\n\n test_valid_conversions!(SignatureScheme::Null);\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/tagged_tests/tagged_signature_scheme_tests.rs", "rank": 71, "score": 234277.5204558498 }, { "content": "fn write_nv_index(context: &mut Context, nv_index: NvIndexTpmHandle) -> NvIndexHandle {\n\n // Create owner nv public.\n\n let owner_nv_index_attributes = NvIndexAttributesBuilder::new()\n\n .with_owner_write(true)\n\n .with_owner_read(true)\n\n .with_pp_read(true)\n\n .with_owner_read(true)\n\n .build()\n\n .expect(\"Failed to create owner nv index attributes\");\n\n\n\n let owner_nv_public = NvPublicBuilder::new()\n\n .with_nv_index(nv_index)\n\n .with_index_name_algorithm(HashingAlgorithm::Sha256)\n\n .with_index_attributes(owner_nv_index_attributes)\n\n .with_data_area_size(1540)\n\n .build()\n\n .unwrap();\n\n\n\n let owner_nv_index_handle = context\n\n .nv_define_space(Provision::Owner, None, &owner_nv_public)\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/nv_tests.rs", "rank": 72, "score": 234191.20932536968 }, { "content": "#[test]\n\nfn verify_wrong_digest() {\n\n let mut ctx = create_ctx();\n\n let key_params = KeyParams::Rsa {\n\n size: RsaKeyBits::Rsa2048,\n\n scheme: RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256))\n\n .expect(\"Failed to create RSA scheme\"),\n\n pub_exponent: RsaExponent::default(),\n\n };\n\n let (key, auth) = ctx.create_key(key_params, 16).unwrap();\n\n\n\n let signature = ctx\n\n .sign(\n\n key.clone(),\n\n key_params,\n\n auth,\n\n Digest::try_from(HASH.to_vec()).unwrap(),\n\n )\n\n .unwrap();\n\n let pub_key = ctx\n\n .load_external_public_key(key.public().clone(), key_params)\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 73, "score": 232807.1364827892 }, { "content": "#[test]\n\nfn sign_with_bad_auth() {\n\n let mut ctx = create_ctx();\n\n let key_params = KeyParams::Rsa {\n\n size: RsaKeyBits::Rsa2048,\n\n scheme: RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256))\n\n .expect(\"Failed to create RSA scheme\"),\n\n pub_exponent: RsaExponent::default(),\n\n };\n\n let (key, key_auth) = ctx.create_key(key_params, 16).unwrap();\n\n let auth_value = key_auth.unwrap();\n\n let mut bad_auth_values = auth_value.value().to_vec();\n\n bad_auth_values[6..10].copy_from_slice(&[0xDE, 0xAD, 0xBE, 0xEF]);\n\n ctx.sign(\n\n key,\n\n key_params,\n\n Some(Auth::try_from(bad_auth_values).unwrap()),\n\n Digest::try_from(HASH.to_vec()).unwrap(),\n\n )\n\n .unwrap_err();\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 74, "score": 232807.1364827892 }, { "content": "#[test]\n\nfn load_with_invalid_params() {\n\n let pub_key = vec![\n\n 0x96, 0xDC, 0x72, 0x77, 0x49, 0x82, 0xFD, 0x2D, 0x06, 0x65, 0x8C, 0xE5, 0x3A, 0xCD, 0xED,\n\n 0xBD, 0x50, 0xD7, 0x6F, 0x3B, 0xE5, 0x6A, 0x76, 0xED, 0x3E, 0xD8, 0xF9, 0x93, 0x40, 0x55,\n\n 0x86, 0x6F, 0xBE, 0x76, 0x60, 0xD2, 0x03, 0x23, 0x59, 0x19, 0x8D, 0xFC, 0x51, 0x6A, 0x95,\n\n 0xC8, 0x5D, 0x5A, 0x89, 0x4D, 0xE5, 0xEA, 0x44, 0x78, 0x29, 0x62, 0xDB, 0x3F, 0xF0, 0xF7,\n\n 0x49, 0x15, 0xA5, 0xAE, 0x6D, 0x81, 0x8F, 0x06, 0x7B, 0x0B, 0x50, 0x7A, 0x2F, 0xEB, 0x00,\n\n 0xB6, 0x12, 0xF3, 0x10, 0xAF, 0x4D, 0x4A, 0xA9, 0xD9, 0x81, 0xBB, 0x1E, 0x2B, 0xDF, 0xB9,\n\n 0x33, 0x3D, 0xD6, 0xB7, 0x8D, 0x23, 0x7C, 0x7F, 0xE7, 0x12, 0x48, 0x4F, 0x26, 0x73, 0xAF,\n\n 0x63, 0x51, 0xA9, 0xDB, 0xA4, 0xAB, 0xB7, 0x27, 0x00, 0xD7, 0x1C, 0xFC, 0x2F, 0x61, 0x2A,\n\n 0xB9, 0x5B, 0x66, 0xA0, 0xE0, 0xD8, 0xF3, 0xD9,\n\n ];\n\n\n\n let key_params = KeyParams::Ecc {\n\n curve: EccCurve::NistP256,\n\n scheme: EccScheme::create(\n\n EccSchemeAlgorithm::EcDsa,\n\n Some(HashingAlgorithm::Sha256),\n\n None,\n\n )\n\n .expect(\"Failed to create ecc scheme\"),\n\n };\n\n let mut ctx = create_ctx();\n\n let _ = ctx\n\n .load_external_public_key(PublicKey::Rsa(pub_key), key_params)\n\n .unwrap_err();\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 75, "score": 232807.1364827892 }, { "content": "#[test]\n\nfn test_invalid_any_sig() {\n\n let mut signature_scheme = SignatureScheme::Null;\n\n assert_eq!(\n\n Err(Error::WrapperError(WrapperErrorKind::InvalidParam)),\n\n signature_scheme.signing_scheme(),\n\n \"Trying to get signing scheme digest from a non signing SignatureScheme did not produce the expected error\",\n\n );\n\n\n\n assert_eq!(\n\n Err(Error::WrapperError(WrapperErrorKind::InvalidParam)),\n\n signature_scheme.set_signing_scheme(HashingAlgorithm::Sha256),\n\n \"Trying to set signing scheme digest on a non signing SignatureScheme did not produce the expected error\",\n\n )\n\n}\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/tagged_tests/tagged_signature_scheme_tests.rs", "rank": 76, "score": 231415.85033575215 }, { "content": "#[test]\n\nfn test_valid_any_sig() {\n\n let mut signature_scheme = SignatureScheme::RsaPss {\n\n hash_scheme: HashScheme::new(HashingAlgorithm::Sha256),\n\n };\n\n assert_eq!(\n\n HashingAlgorithm::Sha256,\n\n signature_scheme\n\n .signing_scheme()\n\n .expect(\"Failed to get signing scheme digest\"),\n\n \"The signing scheme method did not return the correct value\"\n\n );\n\n\n\n signature_scheme\n\n .set_signing_scheme(HashingAlgorithm::Sha384)\n\n .expect(\"Failed to change signing scheme digest\");\n\n\n\n assert_eq!(\n\n HashingAlgorithm::Sha384,\n\n signature_scheme\n\n .signing_scheme()\n\n .expect(\"Failed to get signing key digest\"),\n\n \"The signing scheme method did not return the correct value after change.\"\n\n );\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/tagged_tests/tagged_signature_scheme_tests.rs", "rank": 77, "score": 231415.85033575215 }, { "content": "#[test]\n\nfn activate_credential_wrong_data() {\n\n let mut ctx = create_ctx();\n\n let params = KeyParams::Ecc {\n\n curve: EccCurve::NistP256,\n\n scheme: EccScheme::create(\n\n EccSchemeAlgorithm::EcDsa,\n\n Some(HashingAlgorithm::Sha256),\n\n None,\n\n )\n\n .expect(\"Failed to create ecc scheme\"),\n\n };\n\n // \"Good\" key (for which the credential will be generated)\n\n let (material, auth) = ctx.create_key(params, 16).unwrap();\n\n let obj = ObjectWrapper {\n\n material,\n\n auth,\n\n params,\n\n };\n\n\n\n // No data (essentially wrong size)\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 78, "score": 229723.7478353443 }, { "content": "#[test]\n\nfn two_signatures_different_digest() {\n\n let mut ctx = create_ctx();\n\n let key_params1 = KeyParams::Rsa {\n\n size: RsaKeyBits::Rsa2048,\n\n scheme: RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256))\n\n .expect(\"Failed to create RSA scheme\"),\n\n pub_exponent: RsaExponent::default(),\n\n };\n\n let (key1, auth1) = ctx.create_key(key_params1, 16).unwrap();\n\n let key_params2 = KeyParams::Rsa {\n\n size: RsaKeyBits::Rsa2048,\n\n scheme: RsaScheme::create(RsaSchemeAlgorithm::RsaSsa, Some(HashingAlgorithm::Sha256))\n\n .expect(\"Failed to create RSA scheme\"),\n\n pub_exponent: RsaExponent::default(),\n\n };\n\n let (key2, auth2) = ctx.create_key(key_params2, 16).unwrap();\n\n let signature1 = ctx\n\n .sign(\n\n key1,\n\n key_params1,\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 79, "score": 229723.7478353443 }, { "content": "#[test]\n\nfn make_cred_params_name() {\n\n // create a Transient key context, generate a key and\n\n // obtain the Make Credential parameters\n\n let mut ctx = create_ctx();\n\n let params = KeyParams::Ecc {\n\n curve: EccCurve::NistP256,\n\n scheme: EccScheme::create(\n\n EccSchemeAlgorithm::EcDsa,\n\n Some(HashingAlgorithm::Sha256),\n\n None,\n\n )\n\n .expect(\"Failed to create ecc scheme\"),\n\n };\n\n let (material, auth) = ctx.create_key(params, 16).unwrap();\n\n let obj = ObjectWrapper {\n\n material,\n\n auth,\n\n params,\n\n };\n\n let make_cred_params = ctx.get_make_cred_params(obj, None).unwrap();\n", "file_path": "tss-esapi/tests/integration_tests/abstraction_tests/transient_key_context_tests.rs", "rank": 80, "score": 229723.7478353443 }, { "content": "/// This creates an Attestation Key in the Endorsement hierarchy\n\npub fn create_ak<IKC: IntoKeyCustomization>(\n\n context: &mut Context,\n\n parent: KeyHandle,\n\n hash_alg: HashingAlgorithm,\n\n sign_alg: SignatureSchemeAlgorithm,\n\n ak_auth_value: Option<&Auth>,\n\n key_customization: IKC,\n\n) -> Result<CreateKeyResult> {\n\n let key_alg = AsymmetricAlgorithm::try_from(sign_alg).map_err(|e| {\n\n // sign_alg is either HMAC or Null.\n\n error!(\"Could not retrieve asymmetric algorithm for provided signature scheme\");\n\n e\n\n })?;\n\n\n\n let ak_pub = create_ak_public(key_alg, hash_alg, sign_alg, key_customization)?;\n\n\n\n let policy_auth_session = context\n\n .start_auth_session(\n\n None,\n\n None,\n", "file_path": "tss-esapi/src/abstraction/ak.rs", "rank": 81, "score": 225561.65365490608 }, { "content": "#[test]\n\nfn test_conversion_of_data_with_invalid_size_of_select() {\n\n let mut tpml_pcr_selection: TPML_PCR_SELECTION = PcrSelectionListBuilder::new()\n\n .with_selection(\n\n HashingAlgorithm::Sha256,\n\n &[PcrSlot::Slot0, PcrSlot::Slot8, PcrSlot::Slot16],\n\n )\n\n .build()\n\n .into();\n\n\n\n // 1,2,3,4 are theonly valid values for sizeofSelect.\n\n tpml_pcr_selection.pcrSelections[0].sizeofSelect = 20;\n\n\n\n // The try_from should then fail.\n\n PcrSelectionList::try_from(tpml_pcr_selection).unwrap_err();\n\n}\n\n\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/lists_tests/pcr_selection_list_builder_tests.rs", "rank": 82, "score": 220946.03812155352 }, { "content": "// Copyright 2021 Contributors to the Parsec project.\n\n// SPDX-License-Identifier: Apache-2.0\n\nmod attest_buffer_tests;\n\nmod auth_tests;\n\nmod data_tests;\n\nmod digest_tests;\n\nmod max_buffer_tests;\n\nmod nonce_tests;\n\nmod public_ecc_parameters_tests;\n\nmod public_rsa_exponent_tests;\n\nmod public_rsa_parameters_tests;\n", "file_path": "tss-esapi/tests/integration_tests/structures_tests/buffers_tests/mod.rs", "rank": 83, "score": 220079.28266702007 }, { "content": " restricted: bool,\n\n}\n\n\n\nimpl PublicRsaParametersBuilder {\n\n /// Creates a new [PublicRsaParametersBuilder]\n\n pub const fn new() -> Self {\n\n PublicRsaParametersBuilder {\n\n symmetric: None,\n\n rsa_scheme: None,\n\n key_bits: None,\n\n exponent: None,\n\n is_signing_key: false,\n\n is_decryption_key: false,\n\n restricted: false,\n\n }\n\n }\n\n\n\n /// Creates a [PublicRsaParametersBuilder] that is setup\n\n /// to build a restructed decryption key.\n\n pub const fn new_restricted_decryption_key(\n", "file_path": "tss-esapi/src/structures/buffers/public/rsa.rs", "rank": 84, "score": 212320.6216786956 }, { "content": " }\n\n}\n\n\n\n/// Structure holding the RSA specific parameters.\n\n///\n\n/// # Details\n\n/// This corresponds to TPMS_RSA_PARMS\n\n///\n\n/// These rsa parameters are specific to the [`crate::structures::Public`] type.\n\n#[derive(Clone, Debug, Copy)]\n\npub struct PublicRsaParameters {\n\n symmetric_definition_object: SymmetricDefinitionObject,\n\n rsa_scheme: RsaScheme,\n\n key_bits: RsaKeyBits,\n\n exponent: RsaExponent,\n\n}\n\n\n\nimpl PublicRsaParameters {\n\n /// Function for creating new [PublicRsaParameters] structure\n\n pub const fn new(\n", "file_path": "tss-esapi/src/structures/buffers/public/rsa.rs", "rank": 85, "score": 212318.52496971507 }, { "content": " exponent: RsaExponent,\n\n ) -> Self {\n\n PublicRsaParametersBuilder {\n\n symmetric: None,\n\n rsa_scheme: Some(rsa_scheme),\n\n key_bits: Some(key_bits),\n\n exponent: Some(exponent),\n\n is_signing_key: true,\n\n is_decryption_key: false,\n\n restricted: false,\n\n }\n\n }\n\n\n\n /// Adds a [SymmetricDefinitionObject] to the [PublicRsaParametersBuilder].\n\n pub const fn with_symmetric(mut self, symmetric: SymmetricDefinitionObject) -> Self {\n\n self.symmetric = Some(symmetric);\n\n self\n\n }\n\n\n\n /// Adds a [RsaScheme] to the [PublicRsaParametersBuilder].\n", "file_path": "tss-esapi/src/structures/buffers/public/rsa.rs", "rank": 86, "score": 212317.7298347278 }, { "content": " pub const fn with_scheme(mut self, rsa_scheme: RsaScheme) -> Self {\n\n self.rsa_scheme = Some(rsa_scheme);\n\n self\n\n }\n\n\n\n /// Adds [RsaKeyBits] to the [PublicRsaParametersBuilder].\n\n pub const fn with_key_bits(mut self, key_bits: RsaKeyBits) -> Self {\n\n self.key_bits = Some(key_bits);\n\n self\n\n }\n\n\n\n /// Adds [RsaExponent] to the [PublicRsaParametersBuilder].\n\n pub const fn with_exponent(mut self, exponent: RsaExponent) -> Self {\n\n self.exponent = Some(exponent);\n\n self\n\n }\n\n\n\n /// Adds a flag that indicates if the key is going to be used\n\n /// for signing to the [PublicRsaParametersBuilder].\n\n ///\n", "file_path": "tss-esapi/src/structures/buffers/public/rsa.rs", "rank": 87, "score": 212316.67345120132 }, { "content": " symmetric: SymmetricDefinitionObject,\n\n key_bits: RsaKeyBits,\n\n exponent: RsaExponent,\n\n ) -> Self {\n\n PublicRsaParametersBuilder {\n\n symmetric: Some(symmetric),\n\n rsa_scheme: Some(RsaScheme::Null),\n\n key_bits: Some(key_bits),\n\n exponent: Some(exponent),\n\n is_signing_key: false,\n\n is_decryption_key: true,\n\n restricted: true,\n\n }\n\n }\n\n\n\n /// Creates a [PublicRsaParametersBuilder] that is setup\n\n /// to build an unrestricted signing key.\n\n pub const fn new_unrestricted_signing_key(\n\n rsa_scheme: RsaScheme,\n\n key_bits: RsaKeyBits,\n", "file_path": "tss-esapi/src/structures/buffers/public/rsa.rs", "rank": 88, "score": 212314.99116585386 }, { "content": " /// the [PublicRsaParametersBuilder].\n\n ///\n\n /// # Arguments\n\n /// * `set` - `true` indicates that it is going to be a restricted key.\n\n /// `false` indicates that it is going to be a non restricted key.\n\n pub const fn with_restricted(mut self, set: bool) -> Self {\n\n self.restricted = set;\n\n self\n\n }\n\n\n\n /// Build an object given the previously provided parameters.\n\n ///\n\n /// The only mandatory parameter is the asymmetric scheme.\n\n ///\n\n /// # Errors\n\n /// * if no asymmetric scheme is set, `ParamsMissing` wrapper error is returned.\n\n /// * if the `for_signing`, `for_decryption` and `restricted` parameters are\n\n /// inconsistent with the rest of the parameters, `InconsistentParams` wrapper\n\n /// error is returned\n\n pub fn build(self) -> Result<PublicRsaParameters> {\n", "file_path": "tss-esapi/src/structures/buffers/public/rsa.rs", "rank": 89, "score": 212313.8837461389 }, { "content": "// Copyright 2021 Contributors to the Parsec project.\n\n// SPDX-License-Identifier: Apache-2.0\n\nuse crate::{\n\n interface_types::{algorithm::RsaSchemeAlgorithm, key_bits::RsaKeyBits},\n\n structures::{RsaScheme, SymmetricDefinitionObject},\n\n tss2_esys::{TPMS_RSA_PARMS, UINT32},\n\n Error, Result, WrapperErrorKind,\n\n};\n\nuse log::error;\n\nuse std::convert::{TryFrom, TryInto};\n\n\n\n/// Builder for `TPMS_RSA_PARMS` values.\n\n#[derive(Copy, Clone, Default, Debug)]\n\npub struct PublicRsaParametersBuilder {\n\n symmetric: Option<SymmetricDefinitionObject>,\n\n rsa_scheme: Option<RsaScheme>,\n\n key_bits: Option<RsaKeyBits>,\n\n exponent: Option<RsaExponent>,\n\n is_signing_key: bool,\n\n is_decryption_key: bool,\n", "file_path": "tss-esapi/src/structures/buffers/public/rsa.rs", "rank": 90, "score": 212311.30242666102 }, { "content": " self.rsa_scheme\n\n }\n\n\n\n /// Returns the [RsaKeyBits]\n\n pub const fn key_bits(&self) -> RsaKeyBits {\n\n self.key_bits\n\n }\n\n\n\n /// Returns the exponent in the form of a [RsaExponent]\n\n pub const fn exponent(&self) -> RsaExponent {\n\n self.exponent\n\n }\n\n}\n\n\n\nimpl From<PublicRsaParameters> for TPMS_RSA_PARMS {\n\n fn from(public_rsa_parameters: PublicRsaParameters) -> Self {\n\n TPMS_RSA_PARMS {\n\n symmetric: public_rsa_parameters.symmetric_definition_object.into(),\n\n scheme: public_rsa_parameters.rsa_scheme.into(),\n\n keyBits: public_rsa_parameters.key_bits.into(),\n", "file_path": "tss-esapi/src/structures/buffers/public/rsa.rs", "rank": 91, "score": 212310.30572540348 }, { "content": " symmetric_definition_object: SymmetricDefinitionObject,\n\n rsa_scheme: RsaScheme,\n\n key_bits: RsaKeyBits,\n\n exponent: RsaExponent,\n\n ) -> Self {\n\n PublicRsaParameters {\n\n symmetric_definition_object,\n\n rsa_scheme,\n\n key_bits,\n\n exponent,\n\n }\n\n }\n\n\n\n /// Returns the [SymmetricDefinitionObject].\n\n pub const fn symmetric_definition_object(&self) -> SymmetricDefinitionObject {\n\n self.symmetric_definition_object\n\n }\n\n\n\n /// Returns the [RsaScheme]\n\n pub const fn rsa_scheme(&self) -> RsaScheme {\n", "file_path": "tss-esapi/src/structures/buffers/public/rsa.rs", "rank": 92, "score": 212309.26240246918 }, { "content": " exponent: public_rsa_parameters.exponent.into(),\n\n }\n\n }\n\n}\n\n\n\nimpl TryFrom<TPMS_RSA_PARMS> for PublicRsaParameters {\n\n type Error = Error;\n\n\n\n fn try_from(tpms_rsa_parms: TPMS_RSA_PARMS) -> Result<Self> {\n\n Ok(PublicRsaParameters {\n\n symmetric_definition_object: tpms_rsa_parms.symmetric.try_into()?,\n\n rsa_scheme: tpms_rsa_parms.scheme.try_into()?,\n\n key_bits: tpms_rsa_parms.keyBits.try_into()?,\n\n exponent: tpms_rsa_parms.exponent.try_into()?,\n\n })\n\n }\n\n}\n", "file_path": "tss-esapi/src/structures/buffers/public/rsa.rs", "rank": 93, "score": 212306.60928798217 }, { "content": "fn create_ak_public<IKC: IntoKeyCustomization>(\n\n key_alg: AsymmetricAlgorithm,\n\n hash_alg: HashingAlgorithm,\n\n sign_alg: SignatureSchemeAlgorithm,\n\n key_customization: IKC,\n\n) -> Result<Public> {\n\n let key_customization = key_customization.into_key_customization();\n\n\n\n let obj_attrs_builder = ObjectAttributesBuilder::new()\n\n .with_restricted(true)\n\n .with_user_with_auth(true)\n\n .with_sign_encrypt(true)\n\n .with_decrypt(false)\n\n .with_fixed_tpm(true)\n\n .with_fixed_parent(true)\n\n .with_sensitive_data_origin(true);\n\n\n\n let obj_attrs = if let Some(ref k) = key_customization {\n\n k.attributes(obj_attrs_builder)\n\n } else {\n", "file_path": "tss-esapi/src/abstraction/ak.rs", "rank": 94, "score": 212304.45396298132 }, { "content": " /// # Arguments\n\n /// * `set` - `true` inidcates that the key is going to be used for signing operations.\n\n /// `false` indicates that the key is not going to be used for signing operations.\n\n pub const fn with_is_signing_key(mut self, set: bool) -> Self {\n\n self.is_signing_key = set;\n\n self\n\n }\n\n\n\n /// Adds a flag that indicates if the key is going to be used for\n\n /// decryption to the [PublicRsaParametersBuilder].\n\n ///\n\n /// # Arguments\n\n /// * `set` - `true` indicates that the key is going to be used for decryption operations.\n\n /// `false` indicates that the key is not going to be used for decryption operations.\n\n pub const fn with_is_decryption_key(mut self, set: bool) -> Self {\n\n self.is_decryption_key = set;\n\n self\n\n }\n\n\n\n /// Adds a flag that inidcates if the key is going to be restrictied to\n", "file_path": "tss-esapi/src/structures/buffers/public/rsa.rs", "rank": 95, "score": 212302.2947328136 }, { "content": " } else if let Some(symmetric) = self.symmetric {\n\n if !symmetric.is_null() {\n\n error!(\"Found symmetric parameter, expected it to be Null nor not set at all because 'restricted' and 'is_decrypt_key' are set to false\");\n\n return Err(Error::local_error(WrapperErrorKind::InconsistentParams));\n\n }\n\n }\n\n\n\n // TODO: Figure out if it actually should be allowed to not provide\n\n // these parameters.\n\n let symmetric_definition_object = self.symmetric.unwrap_or(SymmetricDefinitionObject::Null);\n\n let exponent = self.exponent.unwrap_or_default();\n\n\n\n if self.restricted {\n\n if self.is_signing_key\n\n && rsa_scheme.algorithm() != RsaSchemeAlgorithm::RsaPss\n\n && rsa_scheme.algorithm() != RsaSchemeAlgorithm::RsaSsa\n\n {\n\n error!(\"Invalid rsa scheme algorithm provided with 'restricted' and 'is_signing_key' set to true\");\n\n return Err(Error::local_error(WrapperErrorKind::InconsistentParams));\n\n }\n", "file_path": "tss-esapi/src/structures/buffers/public/rsa.rs", "rank": 96, "score": 212301.1546752676 }, { "content": " }\n\n\n\n if self.is_decryption_key\n\n && rsa_scheme.algorithm() != RsaSchemeAlgorithm::RsaEs\n\n && rsa_scheme.algorithm() != RsaSchemeAlgorithm::Oaep\n\n && rsa_scheme.algorithm() != RsaSchemeAlgorithm::Null\n\n {\n\n error!(\"Invalid rsa scheme algorithm provided with 'restricted' set to false and 'is_decryption_key' set to true\");\n\n return Err(Error::local_error(WrapperErrorKind::InconsistentParams));\n\n }\n\n }\n\n\n\n Ok(PublicRsaParameters {\n\n symmetric_definition_object,\n\n rsa_scheme,\n\n key_bits,\n\n exponent,\n\n })\n\n }\n\n}\n", "file_path": "tss-esapi/src/structures/buffers/public/rsa.rs", "rank": 97, "score": 212300.4411049849 }, { "content": "\n\n/// Structure used to hold the value of a RSA exponent\n\n#[derive(Default, Clone, Debug, Copy, PartialEq, Eq)]\n\npub struct RsaExponent {\n\n value: u32,\n\n}\n\n\n\nimpl RsaExponent {\n\n /// Empty exponent (internal value is 0), which is treated by TPMs\n\n /// as a shorthand for the default value (2^16 + 1).\n\n pub const ZERO_EXPONENT: Self = RsaExponent { value: 0 };\n\n\n\n /// Function for creating a new RsaExponent\n\n ///\n\n /// # Errors\n\n /// Will return an error if the value passed into the function\n\n /// is not a valid RSA exponent.\n\n pub fn create(value: u32) -> Result<Self> {\n\n if !RsaExponent::is_valid(value) {\n\n error!(\n", "file_path": "tss-esapi/src/structures/buffers/public/rsa.rs", "rank": 98, "score": 212296.89221522573 }, { "content": " let rsa_scheme = self.rsa_scheme.ok_or_else(|| {\n\n error!(\"Scheme parameter is required and has not been set in the PublicRsaParametersBuilder\");\n\n Error::local_error(WrapperErrorKind::ParamsMissing)\n\n })?;\n\n\n\n let key_bits = self.key_bits.ok_or_else(|| {\n\n error!(\"Key bits parameter is required and has not been set in the PublicRsaParametersBuilder\");\n\n Error::local_error(WrapperErrorKind::ParamsMissing)\n\n })?;\n\n\n\n if self.restricted && self.is_decryption_key {\n\n if let Some(symmetric) = self.symmetric {\n\n if symmetric.is_null() {\n\n error!(\"Found symmetric parameter but it was Null but 'restricted' and 'is_decrypt_key' are set to true\");\n\n return Err(Error::local_error(WrapperErrorKind::InconsistentParams));\n\n }\n\n } else {\n\n error!(\"Found symmetric parameter, expected it to be Null nor not set at all because 'restricted' and 'is_decrypt_key' are set to false\");\n\n return Err(Error::local_error(WrapperErrorKind::ParamsMissing));\n\n }\n", "file_path": "tss-esapi/src/structures/buffers/public/rsa.rs", "rank": 99, "score": 212296.71789081828 } ]
Rust
src/tool.rs
jacrgrady/farnans-farming-game
a9c168b13e3cfe5fa07c97aca02ee686206c4ba6
use crate::crop::CropType; use crate::genes; use crate::population::Population; use crate::InventoryItemTrait; use sdl2::rect::Rect; use sdl2::render::Texture; pub enum ToolType { Hand, Hoe, WateringCan, } pub struct Tool<'a> { src: Rect, texture: Texture<'a>, current_type: ToolType, } impl<'a> Tool<'a> { pub fn new(src: Rect, texture: Texture<'a>, t: ToolType) -> Tool<'a> { Tool { src, texture, current_type: t, } } } impl InventoryItemTrait for Tool<'_> { fn get_value(&self) -> i32 { 1 } fn texture(&self) -> &Texture { &self.texture } fn src(&self) -> Rect { self.src } fn to_save_string(&self) -> Option<String> { None } fn inventory_input( &self, square: (i32, i32), pop: &mut Population, ) -> Option<(Option<CropType>, Option<genes::Genes>, Option<genes::Genes>)> { let (x, y) = square; match self.current_type { ToolType::Hand => { if let Some(_i) = pop .get_crop_with_index(x as u32, y as u32) .get_gene(crate::genes::GeneType::GrowthRate) { println!( "{}", pop.get_crop_with_index(x as u32, y as u32) .get_all_genes() .as_ref() .unwrap() ); if let Some(p) = pop .get_crop_with_index(x as u32, y as u32) .get_child() .as_ref() { println!("{}", p); } else { println!("None"); } } if pop.get_crop_with_index(x as u32, y as u32).rotten() { let mut _c = pop.get_crop_with_index_mut(x as u32, y as u32); _c.set_stage(0); _c.set_rotten(false); _c.set_crop_type_enum(CropType::None); _c.set_water(false); _c.set_genes(None); _c.set_child(None); return None; } if pop.get_crop_with_index(x as u32, y as u32).get_stage() == 3 { let _g = pop .get_crop_with_index(x as u32, y as u32) .get_all_genes() .as_ref() .unwrap() .clone(); let mut _c = pop.get_crop_with_index_mut(x as u32, y as u32); let return_crop_type = _c.get_crop_type_enum(); _c.set_crop_type_enum(CropType::None); _c.set_stage(0); _c.set_water(false); _c.set_genes(None); _c.set_child(None); let child = _c.get_child().clone(); return Some((Some(return_crop_type), Some(_g), child)); } } ToolType::Hoe => { if pop .get_crop_with_index(x as u32, y as u32) .get_crop_type() .to_owned() == "None" && !pop.get_tile_with_index(x as u32, y as u32).tilled() { let mut _tile = pop.get_tile_with_index_mut(x as u32, y as u32); _tile.set_tilled(true); } } ToolType::WateringCan => { if !pop.get_crop_with_index(x as u32, y as u32).get_watered() { pop.get_crop_with_index_mut(x as u32, y as u32) .set_water(true); } if pop.get_tile_with_index(x as u32, y as u32).tilled() { pop.get_tile_with_index_mut(x as u32, y as u32) .set_water(true); } } } return None; } }
use crate::crop::CropType; use crate::genes; use crate::population::Population; use crate::InventoryItemTrait; use sdl2::rect::Rect; use sdl2::render::Texture; pub enum ToolType { Hand, Hoe, WateringCan, } pub struct Tool<'a> { src: Rect, texture: Texture<'a>, current_type: ToolType, } impl<'a> Tool<'a> { pub fn new(src: Rect, texture: Texture<'a>, t: ToolType) -> Tool<'a> { Tool { src, texture, current_type: t, } } } impl InventoryItemTrait for Tool<'_> { fn get_value(&self) -> i32 { 1 } fn texture(&self) -> &Texture { &self.texture } fn src(&self) -> Rect { self.src } fn to_save_string(&self) -> Option<String> { None } fn inventory_input( &self, square: (i32, i32), pop: &mut Population, ) -> Option<(Option<CropType>, Option<genes::Genes>, Option<genes::Genes>)> { let (x, y) = square; match self.current_type { ToolType::Hand => { if let Some(_i) = pop .get_crop_with_index(x as u32, y as u32) .get_gene(crate::genes::GeneType::GrowthRate) { println!( "{}", pop.get_crop_with_index(x as u32, y as u32) .get_all_genes() .as_ref() .unwrap() ); if let Some(p) = pop .get_crop_with_index(x as u32, y as u32) .get_child() .as_ref() { println!("{}", p); } else { println!("None"); } } if pop.get_crop_with_index(x as u32, y as u32).rotten() { let mut _c = pop.get_crop_with_index_mut(x as u32, y as u32); _c.set_stage(0); _c.set_rotten(false); _c.set_crop_type_enum(CropType::None); _c.set_water(false); _c.set_genes(None); _c.set_child(None); return None; } if pop.get_crop_with_index(x as u32, y as u32).get_stage() == 3 { let _g = pop .get_crop_with_index(x as u32, y as u32) .get_all_genes() .as_ref() .unwra
}
p() .clone(); let mut _c = pop.get_crop_with_index_mut(x as u32, y as u32); let return_crop_type = _c.get_crop_type_enum(); _c.set_crop_type_enum(CropType::None); _c.set_stage(0); _c.set_water(false); _c.set_genes(None); _c.set_child(None); let child = _c.get_child().clone(); return Some((Some(return_crop_type), Some(_g), child)); } } ToolType::Hoe => { if pop .get_crop_with_index(x as u32, y as u32) .get_crop_type() .to_owned() == "None" && !pop.get_tile_with_index(x as u32, y as u32).tilled() { let mut _tile = pop.get_tile_with_index_mut(x as u32, y as u32); _tile.set_tilled(true); } } ToolType::WateringCan => { if !pop.get_crop_with_index(x as u32, y as u32).get_watered() { pop.get_crop_with_index_mut(x as u32, y as u32) .set_water(true); } if pop.get_tile_with_index(x as u32, y as u32).tilled() { pop.get_tile_with_index_mut(x as u32, y as u32) .set_water(true); } } } return None; }
function_block-function_prefixed
[ { "content": "pub fn background_to_draw(p: &Player) -> Rect {\n\n Rect::new(\n\n ((p.x() + ((p.width() / 2) as i32)) - ((CAM_W / 2) as i32)).clamp(0, (BG_W - CAM_W) as i32),\n\n ((p.y() + ((p.height() / 2) as i32)) - ((CAM_H / 2) as i32))\n\n .clamp(0, (BG_H - CAM_H) as i32),\n\n CAM_W,\n\n CAM_H,\n\n )\n\n}\n\n\n", "file_path": "src/market.rs", "rank": 0, "score": 109399.23609499133 }, { "content": "pub fn save_home(pop: population::Population, item_vec: Vec<item::Item>) {\n\n let mut file_to_save = match File::create(\"saves/home_data.txt\") {\n\n Err(why) => panic!(\"couldn't create home_data.txt: {}\", why),\n\n Ok(file_to_save) => file_to_save,\n\n };\n\n for item in item_vec {\n\n let output = \"item;\".to_owned()\n\n + &item.x().to_string()\n\n + \";\"\n\n + &item.y().to_string()\n\n + \";\"\n\n + &item.width().to_string()\n\n + \";\"\n\n + &item.height().to_string()\n\n + \";\"\n\n + &item.tex_path()\n\n + \";\"\n\n + &item.collision().to_string()\n\n + \"\\n\";\n\n match file_to_save.write_all(output.as_ref()) {\n", "file_path": "src/save_load.rs", "rank": 1, "score": 97245.12308780658 }, { "content": "pub fn save_pests<'a>(p: PestPopulation) {\n\n let mut file_to_save = match File::create(\"saves/pest_data.txt\") {\n\n Err(why) => panic!(\"Couldn't create inventory_data.txt: {}\", why),\n\n Ok(file_to_save) => file_to_save,\n\n };\n\n for pest in 0..p.get_length() {\n\n let output = p.get_pest(pest).to_save_string();\n\n match file_to_save.write_all(output.as_ref()) {\n\n Err(why) => {\n\n panic!(\"couldn't write to pest_data.txt: {}\", why)\n\n }\n\n Ok(_) => {}\n\n }\n\n }\n\n}\n", "file_path": "src/save_load.rs", "rank": 2, "score": 95240.04789216413 }, { "content": "pub fn load_pests<'a>() -> PestPopulation {\n\n let mut pest_pop = pest_population::PestPopulation::new();\n\n let mut pest_file = File::open(\"saves/pest_data.txt\").expect(\"Can't open save home_file\");\n\n let mut pest_contents = String::new();\n\n pest_file\n\n .read_to_string(&mut pest_contents)\n\n .expect(\"Can't read home_file\");\n\n for line in pest_contents.lines() {\n\n let results: Vec<&str> = line.split(\";\").collect();\n\n pest_pop.add_pest(pest::Pest::from_save_string(results));\n\n }\n\n if pest_pop.get_length() < pest_population::POP_SIZE {\n\n pest_pop.fill_pest_population();\n\n }\n\n pest_pop.find_avg_attack_chance();\n\n pest_pop\n\n}\n\n\n", "file_path": "src/save_load.rs", "rank": 3, "score": 86864.94234592527 }, { "content": "pub fn draw_market(\n\n mut wincan: WindowCanvas,\n\n m_pop: &population::Population,\n\n cur_bg: &Rect,\n\n m_item_vec: &Vec<item::Item>,\n\n) -> WindowCanvas {\n\n let texture_creator = wincan.texture_creator();\n\n\n\n let grass_texture = texture_creator\n\n .load_texture(\"src/images/Background_Tileset.png\")\n\n .unwrap();\n\n for crop_tile in m_pop.get_vec().iter().flatten() {\n\n let x_pos = crop_tile.tile.x() - cur_bg.x();\n\n let y_pos = crop_tile.tile.y() - cur_bg.y();\n\n //Don't bother drawing any tiles that are off screen\n\n if x_pos > -(TILE_SIZE as i32)\n\n && x_pos < (CAM_W as i32)\n\n && y_pos > -(TILE_SIZE as i32)\n\n && y_pos < (CAM_H as i32)\n\n {\n", "file_path": "src/market.rs", "rank": 4, "score": 79272.96997783113 }, { "content": "pub fn update_market_pos(\n\n p: &mut Player,\n\n m_item_vec: &Vec<item::Item>,\n\n player_vel: (i32, i32),\n\n in_menu: &mut Option<Menu>,\n\n) {\n\n p.update_pos_x(player_vel, (0, (BG_W - TILE_SIZE) as i32));\n\n for item in m_item_vec {\n\n if p.check_collision(&item.pos()) {\n\n p.stay_still_x(player_vel, (0, (BG_W - TILE_SIZE) as i32));\n\n if item.tex_path() == \"src/images/marketstall.png\" {\n\n *in_menu = Some(Menu::Shop);\n\n } else if item.tex_path() == \"src/images/go_home.png\" {\n\n *in_menu = Some(Menu::ToHome)\n\n }\n\n }\n\n }\n\n\n\n //Y\n\n p.update_pos_y(player_vel, (0, (BG_W - TILE_SIZE) as i32));\n", "file_path": "src/market.rs", "rank": 5, "score": 76269.42090940839 }, { "content": "pub fn load_inventory<'a>(\n\n inventory: &mut inventory::Inventory<'a>,\n\n crop_texture: &'a Texture<'a>,\n\n rotten_texture: &'a Texture<'a>,\n\n) {\n\n let mut inventory_file =\n\n File::open(\"saves/inventory_data.txt\").expect(\"Can't open inventory_data.txt\");\n\n let mut contents = String::new();\n\n inventory_file\n\n .read_to_string(&mut contents)\n\n .expect(\"Can't read inventory_data.txt\");\n\n for line in contents.lines() {\n\n let results: Vec<&str> = line.split(\";\").collect();\n\n if results[0] == \"crop\" {\n\n inventory.add_item(crop::Crop::from_save_string(\n\n &results,\n\n crop_texture,\n\n rotten_texture,\n\n ));\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/save_load.rs", "rank": 6, "score": 73224.4065650527 }, { "content": "pub fn load_home<'a>(\n\n texture_creator: &'a TextureCreator<WindowContext>,\n\n crop_texture: &'a Texture<'a>,\n\n rotten_texture: &'a Texture<'a>,\n\n tile_texture: &'a Texture<'a>,\n\n) -> (population::Population<'a>, Vec<item::Item<'a>>) {\n\n let mut tile_vec = Vec::new();\n\n for x in 0..((BG_W / TILE_SIZE) as i32) + 1 {\n\n let mut sub_vec = Vec::new();\n\n for y in 0..((BG_H / TILE_SIZE) as i32) + 1 {\n\n sub_vec.push(population::CropTile::new(\n\n tile::Tile::new(\n\n Rect::new(\n\n (TILE_SIZE as i32) * x,\n\n (TILE_SIZE as i32) * y,\n\n TILE_SIZE,\n\n TILE_SIZE,\n\n ),\n\n tile_texture,\n\n ),\n", "file_path": "src/save_load.rs", "rank": 7, "score": 73224.40656505272 }, { "content": "pub fn load_market<'a>(\n\n texture_creator: &'a TextureCreator<WindowContext>,\n\n crop_texture: &'a Texture<'a>,\n\n rotten_texture: &'a Texture<'a>,\n\n tile_texture: &'a Texture<'a>,\n\n) -> (population::Population<'a>, Vec<item::Item<'a>>) {\n\n let mut tile_vec = Vec::new();\n\n for x in 0..((BG_W / TILE_SIZE) as i32) + 1 {\n\n let mut sub_vec = Vec::new();\n\n for y in 0..((BG_H / TILE_SIZE) as i32) + 1 {\n\n sub_vec.push(population::CropTile::new(\n\n tile::Tile::new(\n\n Rect::new(\n\n (TILE_SIZE as i32) * x,\n\n (TILE_SIZE as i32) * y,\n\n TILE_SIZE,\n\n TILE_SIZE,\n\n ),\n\n tile_texture,\n\n ),\n", "file_path": "src/save_load.rs", "rank": 8, "score": 73224.40656505272 }, { "content": "pub fn start_sleep_menu<'a>(\n\n mut in_menu: Option<Menu>,\n\n wincan: &mut WindowCanvas,\n\n keystate: HashSet<Keycode>,\n\n player: &mut Player,\n\n pop: &mut Population<'a>,\n\n r: Rect,\n\n pest_pop: &mut PestPopulation,\n\n) -> Option<Menu> {\n\n let texture_creator = wincan.texture_creator();\n\n if keystate.contains(&Keycode::Y) {\n\n //Player has selected yes\n\n\n\n //Generate a random number between 0.0 and 1.0. If that number is lower than the pest populations\n\n //average chance to attack bugs will attack that night.\n\n let mut rng = rand::thread_rng();\n\n // let bug_night_result = rng.gen_range(0.0..1.0);\n\n let bug_night_result: f32 = rng.gen();\n\n println!(\"{}\", bug_night_result);\n\n println!(\"{}\", pest_pop.get_avg_attack_chance());\n", "file_path": "src/sleep_menu.rs", "rank": 9, "score": 70533.62589071819 }, { "content": "pub fn start_market_transition_menu<'a>(\n\n mut in_menu: Option<Menu>,\n\n wincan: &mut WindowCanvas,\n\n keystate: HashSet<Keycode>,\n\n r: Rect,\n\n in_area: Option<Area>,\n\n) -> (Option<Menu>, Area) {\n\n let texture_creator = wincan.texture_creator();\n\n\n\n if let Some(Area::Home) = in_area {\n\n if keystate.contains(&Keycode::Y) {\n\n // Go to market. First fade to white.\n\n let alphas: Vec<u8> = (0..=255).collect();\n\n let dt = Duration::from_secs_f64(2.0 / (alphas.len() as f64));\n\n let mut blank = Animation::new(alphas, dt, Instant::now());\n\n blank.set_freezing();\n\n while blank.current_index() < 255 {\n\n let tex = texture_creator\n\n .load_texture(\"src/images/traveling_screen.png\")\n\n .unwrap();\n", "file_path": "src/market.rs", "rank": 10, "score": 70533.62589071819 }, { "content": "pub fn save_inventory(inventory: &inventory::Inventory) {\n\n let mut file_to_save = match File::create(\"saves/inventory_data.txt\") {\n\n Err(why) => panic!(\"Couldn't create inventory_data.txt: {}\", why),\n\n Ok(file_to_save) => file_to_save,\n\n };\n\n // Save all crops in Inventory slots 3 through 10\n\n for i in 3..11 {\n\n if let Some(v) = inventory.get_inventory_slot(i) {\n\n for j in 0..v.get_len() {\n\n // Save each crop\n\n if let Some(crop) = v.get_item(j) {\n\n if let Some(output) = crop.to_save_string() {\n\n match file_to_save.write_all(output.as_ref()) {\n\n Err(why) => panic!(\"Couldn't write to inventory_data.txt: {}\", why),\n\n Ok(_) => println!(\"Successfully wrote crop to inventory_data.txt\"),\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/save_load.rs", "rank": 11, "score": 61636.69086136989 }, { "content": "/// Trait used for items that can exist inside of the inventory\n\npub trait InventoryItemTrait {\n\n /// Return some determined value to sort the inventory\n\n fn get_value(&self) -> i32;\n\n // Get the texture\n\n fn texture(&self) -> &Texture;\n\n /// Get the pos\n\n fn src(&self) -> Rect;\n\n /// Perform the correct action for the inventory slot item\n\n fn inventory_input(\n\n &self,\n\n square: (i32, i32),\n\n pop: &mut population::Population,\n\n ) -> Option<(\n\n Option<crop::CropType>,\n\n Option<genes::Genes>,\n\n Option<genes::Genes>,\n\n )>;\n\n /// Make save string for crops; return None for tools\n\n fn to_save_string(&self) -> Option<String>;\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 12, "score": 55325.03451796085 }, { "content": "#[derive(Debug)]\n\nstruct Gene {\n\n gene_type: GeneType,\n\n value: f32,\n\n}\n\n\n\n/// Genes struct\n\n#[derive(Debug)]\n\npub struct Genes {\n\n genes: Vec<Gene>,\n\n}\n\n\n\nimpl Genes {\n\n /// Generate new Genes using random values following a\n\n /// Normal Distribution\n\n pub fn new() -> Genes {\n\n let normal = Normal::new(MEAN, STD_DEV).unwrap();\n\n let growth_var = normal.sample(&mut rand::thread_rng()).clamp(0.0, 1.0);\n\n let water_ret_var = normal.sample(&mut rand::thread_rng()).clamp(0.0, 1.0);\n\n let pest_resist_var = normal.sample(&mut rand::thread_rng()).clamp(0.0, 1.0);\n\n let value_var = (growth_var + water_ret_var + pest_resist_var) / 3.0;\n", "file_path": "src/genes.rs", "rank": 13, "score": 51063.889469111746 }, { "content": "struct PestGene {\n\n pest_gene_type: PestGeneType,\n\n value: f32,\n\n}\n\n\n\nimpl PestGene {\n\n fn new(t: PestGeneType, value: f32) -> PestGene {\n\n PestGene {\n\n pest_gene_type: t,\n\n value: value,\n\n }\n\n }\n\n}\n\n\n\npub struct Pest {\n\n pest_genes: Vec<PestGene>,\n\n fitness: f32,\n\n}\n\n\n\nimpl Pest {\n", "file_path": "src/pest.rs", "rank": 14, "score": 48964.74441255146 }, { "content": "fn main() {\n\n let sdl_cxt = sdl2::init().unwrap();\n\n let video_subsys = sdl_cxt.video().unwrap();\n\n\n\n let window = video_subsys\n\n .window(TITLE, CAM_W, CAM_H)\n\n .build()\n\n .map_err(|e| e.to_string())\n\n .unwrap();\n\n\n\n let wincan = window.into_canvas().accelerated();\n\n\n\n // Check if we should lock to vsync\n\n let wincan = if VSYNC {\n\n wincan.present_vsync()\n\n } else {\n\n wincan\n\n };\n\n\n\n let mut wincan = wincan.build().map_err(|e| e.to_string()).unwrap();\n", "file_path": "src/main.rs", "rank": 15, "score": 48094.1452793836 }, { "content": "\n\n pub fn get_tile(&self, x: i32, y: i32) -> &Tile {\n\n &self.crop_tile_vec[(x / TILE_SIZE as i32) as usize][(y / TILE_SIZE as i32) as usize].tile\n\n }\n\n\n\n //Lends out Tile struct at given x, y index\n\n pub fn get_tile_with_index(&self, x: u32, y: u32) -> &Tile {\n\n &self.crop_tile_vec[x as usize][y as usize].tile\n\n }\n\n\n\n pub fn get_tile_with_index_mut(&mut self, x: u32, y: u32) -> &mut Tile<'a> {\n\n &mut self.crop_tile_vec[x as usize][y as usize].tile\n\n }\n\n\n\n //Lends out Crop struct at given x, y map coordinates\n\n\n\n pub fn get_crop(&self, x: i32, y: i32) -> &Crop {\n\n &self.crop_tile_vec[(x / TILE_SIZE as i32) as usize][(y / TILE_SIZE as i32) as usize].crop\n\n }\n\n\n", "file_path": "src/population.rs", "rank": 23, "score": 36142.33204327636 }, { "content": " /// Returns an array of neighboring crops, sorted by distance from\n\n /// (x,y)\n\n pub fn get_neighbors(&self, x: i32, y: i32) -> Vec<(genes::Genes, f32)> {\n\n let mut v: Vec<&Crop> = Vec::new();\n\n // Loop through nearest rings\n\n for col in (x - 2).clamp(0, RIGHT_TILE_BOUND)..(x + 2).clamp(0, RIGHT_TILE_BOUND) {\n\n for row in (y - 2).clamp(0, BOTTOM_TILE_BOUND)..(y + 2).clamp(0, BOTTOM_TILE_BOUND) {\n\n // Don't let a plant pollinate itself\n\n if col == x && row == y {\n\n continue;\n\n }\n\n let c = self.get_crop_with_index(col as u32, row as u32);\n\n if c.get_crop_type_enum() != crate::crop::CropType::None && c.get_stage() == 3 {\n\n v.push(c);\n\n }\n\n }\n\n }\n\n // Sort vector\n\n v.sort_by_cached_key(|k| (k.distance(x, y) * 100.0) as i32);\n\n // Extract clones of genes and distances\n", "file_path": "src/population.rs", "rank": 24, "score": 36142.14511879444 }, { "content": "use crate::crop::Crop;\n\nuse crate::genes;\n\nuse crate::tile::Tile;\n\nuse crate::{BOTTOM_TILE_BOUND, RIGHT_TILE_BOUND, TILE_SIZE};\n\n\n\n//Struct used to combine tile and crop structs into one for easy storage into the vector\n\npub struct CropTile<'a> {\n\n pub tile: Tile<'a>,\n\n pub crop: Crop<'a>,\n\n}\n\n\n\nimpl<'a> CropTile<'a> {\n\n pub fn new(tile: Tile<'a>, crop: Crop<'a>) -> CropTile<'a> {\n\n CropTile { tile, crop }\n\n }\n\n\n\n pub fn set_crop(&mut self, c: Crop<'a>) {\n\n self.crop = c;\n\n }\n\n}\n", "file_path": "src/population.rs", "rank": 25, "score": 36141.75589391816 }, { "content": " let mut r: Vec<(genes::Genes, f32)> = Vec::new();\n\n for i in v {\n\n r.push((\n\n i.get_all_genes().as_ref().unwrap().clone(),\n\n i.distance(x, y),\n\n ));\n\n }\n\n r\n\n }\n\n\n\n // pub fn pollinate(&self, x: i32, y: i32) {\n\n // // let mut c =\n\n // }\n\n}\n", "file_path": "src/population.rs", "rank": 26, "score": 36141.43077599762 }, { "content": "\n\npub struct Population<'a> {\n\n crop_tile_vec: Vec<Vec<CropTile<'a>>>,\n\n}\n\n\n\nimpl<'a> Population<'a> {\n\n pub fn new(crop_tile_vec: Vec<Vec<CropTile<'a>>>) -> Population<'a> {\n\n Population { crop_tile_vec }\n\n }\n\n\n\n //Lends out the whole vector\n\n pub fn get_vec(&self) -> &Vec<Vec<CropTile>> {\n\n &self.crop_tile_vec\n\n }\n\n\n\n pub fn get_vec_mut(&mut self) -> &mut Vec<Vec<CropTile<'a>>> {\n\n &mut self.crop_tile_vec\n\n }\n\n\n\n //Lends out Tile struct at given x, y map coordinates\n", "file_path": "src/population.rs", "rank": 27, "score": 36141.148486075035 }, { "content": " //Lends out Crop struct at given x, y index\n\n pub fn get_crop_with_index(&self, x: u32, y: u32) -> &Crop {\n\n &self.crop_tile_vec[x as usize][y as usize].crop\n\n }\n\n\n\n pub fn get_crop_with_index_mut(&mut self, x: u32, y: u32) -> &mut Crop<'a> {\n\n &mut self.crop_tile_vec[x as usize][y as usize].crop\n\n }\n\n\n\n pub fn set_crop_with_index(&mut self, x: u32, y: u32, mut tar_crop: Crop<'a>) {\n\n tar_crop.set_pos(self.crop_tile_vec[x as usize][y as usize].crop.get_pos());\n\n self.crop_tile_vec[x as usize][y as usize].crop = tar_crop;\n\n }\n\n\n\n pub fn update_all_plants(&self) {}\n\n\n\n pub fn plant_seed(&self) {}\n\n\n\n pub fn destroy_plant(&self) {}\n\n\n", "file_path": "src/population.rs", "rank": 28, "score": 36139.72382065847 }, { "content": "use crate::pest;\n\nuse crate::pest::Pest;\n\nuse crate::pest::PestGeneType::AttackRate;\n\nuse rand;\n\nuse rand::Rng;\n\nuse rand_distr::{Distribution, Normal};\n\n\n\nconst MEAN: f32 = 0.5;\n\nconst STD_DEV: f32 = 0.1;\n\npub(crate) const POP_SIZE: usize = 500;\n\n\n\npub struct PestPopulation {\n\n pest_population: Vec<Pest>,\n\n avg_attack_chance: f32,\n\n}\n\n\n\nimpl PestPopulation {\n\n pub fn new() -> PestPopulation {\n\n let mut temp = Vec::new();\n\n let mut z = 0.0;\n", "file_path": "src/pest_population.rs", "rank": 29, "score": 33770.05494633189 }, { "content": " If the pest population is under the appropriate size it will fill it up with new pests\n\n */\n\n pub fn fill_pest_population(&mut self) {\n\n while self.pest_population.len() < POP_SIZE {\n\n self.pest_population.push(pest::Pest::new());\n\n }\n\n }\n\n\n\n /*\n\n Recalculates average attack chance for when pests die or when a new generation is added\n\n */\n\n pub fn find_avg_attack_chance(&mut self) {\n\n let mut sum = 0.0;\n\n for g in 0..POP_SIZE {\n\n sum = sum + self.pest_population[g].get_pest_gene(AttackRate);\n\n }\n\n self.avg_attack_chance = sum / POP_SIZE as f32;\n\n }\n\n\n\n pub fn add_pest(&mut self, p: Pest) {\n", "file_path": "src/pest_population.rs", "rank": 30, "score": 33766.13552611445 }, { "content": " self.pest_population.push(p);\n\n }\n\n\n\n /*\n\n Generates a new generation\n\n */\n\n pub fn next_generation(&mut self) {\n\n let mut rng = rand::thread_rng();\n\n let mut temp = Vec::new();\n\n //print!(\"outside loop\");\n\n for g in 0..POP_SIZE {\n\n let n = (self.pest_population[g].get_fitness() * 100.0).round();\n\n //print!(\"{}\", n);\n\n for i in 0..n as i32 {\n\n //print!(\"loop1\");\n\n let test_pest = self.pest_population[g].clone();\n\n temp.push(test_pest);\n\n }\n\n }\n\n //print!(\"{}\", temp.len());\n", "file_path": "src/pest_population.rs", "rank": 31, "score": 33765.93576494496 }, { "content": "\n\n for i in 0..POP_SIZE {\n\n let pest0: &Pest = temp.get(rng.gen_range(0..temp.len())).unwrap();\n\n let pest1: &Pest = temp.get(rng.gen_range(0..temp.len())).unwrap();\n\n let mut pest3 = Pest::make_pest(pest0.breed_pests(pest1));\n\n pest3.recalc_fitness();\n\n pest3.mutate_pest();\n\n self.pest_population[i] = pest3;\n\n }\n\n\n\n self.find_avg_attack_chance();\n\n }\n\n\n\n pub fn kill_pest(&mut self, i: usize) {\n\n self.pest_population.remove(i);\n\n }\n\n}\n", "file_path": "src/pest_population.rs", "rank": 32, "score": 33765.32836565195 }, { "content": "\n\n PestPopulation {\n\n pest_population: temp,\n\n avg_attack_chance: z,\n\n }\n\n }\n\n\n\n pub fn get_length(&self) -> usize {\n\n self.pest_population.len()\n\n }\n\n\n\n pub fn get_pest(&self, x: usize) -> &Pest {\n\n &self.pest_population[x]\n\n }\n\n\n\n pub fn get_avg_attack_chance(&self) -> f32 {\n\n self.avg_attack_chance\n\n }\n\n\n\n /*\n", "file_path": "src/pest_population.rs", "rank": 33, "score": 33760.75268647846 }, { "content": " pub fn get_selected(&self) -> i32 {\n\n self.inventory.get_selected()\n\n }\n\n\n\n pub fn use_inventory(\n\n &mut self,\n\n square: (i32, i32),\n\n pop: &mut Population,\n\n ) -> Option<(Option<CropType>, Option<genes::Genes>, Option<genes::Genes>)> {\n\n self.inventory.use_inventory(square, pop)\n\n /*match return_crop{\n\n Some(x) => Some(x),\n\n None => (),\n\n }*/\n\n }\n\n pub fn add_item(&mut self, new_crop: Crop<'a>) {\n\n self.inventory.add_item(new_crop);\n\n }\n\n\n\n pub fn draw(&mut self, wincan: &mut WindowCanvas, player_cam_pos: Rect) {\n", "file_path": "src/player.rs", "rank": 34, "score": 6100.904408102372 }, { "content": "pub struct Inventory<'a> {\n\n inventory_slots: Vec<InventoryItem<'a>>,\n\n selected: i32,\n\n squares: Vec<Rect>,\n\n}\n\n\n\n/// Takes in texture_creator in order to load tools into the tool slots\n\nimpl<'a> Inventory<'a> {\n\n pub fn new(texture_creator: &'a TextureCreator<WindowContext>) -> Inventory<'a> {\n\n // Initializes inventory slots and sets tool slots to true\n\n let mut inventory_slots: Vec<InventoryItem> =\n\n (0..11).map(|x| InventoryItem::new(x < 3)).collect();\n\n\n\n // Add tool slots into the inventory\n\n inventory_slots[0].add_item(Box::new(Tool::new(\n\n Rect::new(0 * 32, 0, 32, 32),\n\n texture_creator\n\n .load_texture(\"src/images/itemMenu.png\")\n\n .unwrap(),\n\n crate::tool::ToolType::Hand,\n", "file_path": "src/inventory.rs", "rank": 35, "score": 6098.250290495654 }, { "content": " &self,\n\n square: (i32, i32),\n\n pop: &mut Population,\n\n ) -> Option<(Option<CropType>, Option<genes::Genes>, Option<genes::Genes>)> {\n\n if self.stage != 0 {\n\n return None;\n\n }\n\n let (x, y) = square;\n\n if pop.get_tile_with_index(x as u32, y as u32).tilled()\n\n && pop\n\n .get_crop_with_index(x as u32, y as u32)\n\n .get_crop_type()\n\n .to_owned()\n\n == \"None\"\n\n {\n\n let mut _c = pop.get_crop_with_index_mut(x as u32, y as u32);\n\n _c.set_crop_type_enum(self.t);\n\n _c.set_stage(0);\n\n _c.set_water(false);\n\n _c.set_genes(self.get_all_genes().clone());\n", "file_path": "src/crop.rs", "rank": 36, "score": 6097.808047359993 }, { "content": "use sdl2::rect::Rect;\n\nuse sdl2::render::Texture;\n\n\n\nuse crate::TILE_SIZE;\n\n\n\npub struct Tile<'a> {\n\n pos: Rect,\n\n src: Rect,\n\n texture: &'a Texture<'a>,\n\n tilled: bool,\n\n}\n\n\n\nimpl<'a> Tile<'a> {\n\n pub fn new(pos: Rect, texture: &'a Texture<'a>) -> Tile {\n\n let src = Rect::new(0, 0, TILE_SIZE, TILE_SIZE);\n\n Tile {\n\n pos,\n\n src,\n\n texture,\n\n tilled: false,\n", "file_path": "src/tile.rs", "rank": 37, "score": 6095.403551011098 }, { "content": "use sdl2::pixels::Color;\n\nuse sdl2::rect::Rect;\n\nuse sdl2::render::WindowCanvas;\n\n\n\nuse crate::crop::Crop;\n\nuse crate::crop::CropType;\n\nuse crate::genes;\n\nuse crate::population::Population;\n\nuse crate::tool::Tool;\n\nuse crate::InventoryItemTrait;\n\n\n\nuse sdl2::image::LoadTexture;\n\nuse sdl2::render::TextureCreator;\n\nuse sdl2::video::WindowContext;\n\n\n\n// use sdl2::render::TextureQuery;\n\n\n\nstatic INVENTORY_X_POS: i32 = 261;\n\nstatic INVENTORY_Y_POS: i32 = 640;\n\n\n", "file_path": "src/inventory.rs", "rank": 38, "score": 6094.850033754708 }, { "content": " }\n\n }\n\n }\n\n }\n\n Area::Market => wincan = market::draw_market(wincan, &m_pop, &cur_bg, &m_item_vec),\n\n }\n\n\n\n // Draw inventory\n\n p.draw(&mut wincan, player_cam_pos);\n\n //ui.draw(&mut wincan);\n\n\n\n match in_menu {\n\n None => {}\n\n Some(Menu::Sleep) => {\n\n let sleep_box = texture_creator\n\n .load_texture(\"src/images/sleep.png\")\n\n .unwrap();\n\n wincan\n\n .copy(&sleep_box, None, Rect::new(400, 400, 600, 180))\n\n .unwrap();\n", "file_path": "src/main.rs", "rank": 39, "score": 6094.839892889889 }, { "content": " self.pos\n\n }\n\n\n\n pub fn src(&self) -> Rect {\n\n self.src\n\n }\n\n\n\n pub fn texture(&self) -> &Texture {\n\n &self.texture\n\n }\n\n\n\n pub fn tilled(&self) -> bool {\n\n self.tilled\n\n }\n\n\n\n pub fn set_tilled(&mut self, till: bool) {\n\n self.tilled = till;\n\n if till {\n\n self.src = Rect::new(TILE_SIZE as i32, TILE_SIZE as i32, TILE_SIZE, TILE_SIZE);\n\n } else {\n", "file_path": "src/tile.rs", "rank": 40, "score": 6094.5373016157655 }, { "content": " pub fn new(\n\n pos: Rect,\n\n stage: u8,\n\n texture: &'a Texture<'a>,\n\n rotten_texture: &'a Texture<'a>,\n\n watered: bool,\n\n t: CropType,\n\n genes: Option<genes::Genes>,\n\n ) -> Crop<'a> {\n\n let (x, y) = match t {\n\n CropType::None => (0, 0),\n\n CropType::Carrot => (stage as u32 * TILE_SIZE, 0),\n\n CropType::Corn => (stage as u32 * TILE_SIZE, TILE_SIZE),\n\n CropType::Potato => (stage as u32 * TILE_SIZE, TILE_SIZE * 2),\n\n CropType::Lettuce => (stage as u32 * TILE_SIZE, TILE_SIZE * 3),\n\n };\n\n\n\n let src = Rect::new(x as i32, y as i32, TILE_SIZE, TILE_SIZE);\n\n\n\n Crop {\n", "file_path": "src/crop.rs", "rank": 41, "score": 6093.779314646395 }, { "content": " CropType::Potato => (self.stage as u32 * TILE_SIZE, TILE_SIZE * 2),\n\n CropType::Lettuce => (self.stage as u32 * TILE_SIZE, TILE_SIZE * 3),\n\n };\n\n\n\n self.src = Rect::new(x as i32, y as i32, TILE_SIZE, TILE_SIZE);\n\n }\n\n\n\n pub fn set_crop_type_enum(&mut self, new_crop_type: CropType) {\n\n self.t = new_crop_type;\n\n let (x, y) = match self.t {\n\n CropType::None => (0, 0),\n\n CropType::Carrot => (self.stage as u32 * TILE_SIZE, 0),\n\n CropType::Corn => (self.stage as u32 * TILE_SIZE, TILE_SIZE),\n\n CropType::Potato => (self.stage as u32 * TILE_SIZE, TILE_SIZE * 2),\n\n CropType::Lettuce => (self.stage as u32 * TILE_SIZE, TILE_SIZE * 3),\n\n };\n\n\n\n self.src = Rect::new(x as i32, y as i32, TILE_SIZE, TILE_SIZE);\n\n }\n\n\n", "file_path": "src/crop.rs", "rank": 42, "score": 6093.578974544891 }, { "content": " )));\n\n\n\n inventory_slots[1].add_item(Box::new(Tool::new(\n\n Rect::new(1 * 32, 0, 32, 32),\n\n texture_creator\n\n .load_texture(\"src/images/itemMenu.png\")\n\n .unwrap(),\n\n crate::tool::ToolType::Hoe,\n\n )));\n\n\n\n inventory_slots[2].add_item(Box::new(Tool::new(\n\n Rect::new(2 * 32, 0, 32, 32),\n\n texture_creator\n\n .load_texture(\"src/images/itemMenu.png\")\n\n .unwrap(),\n\n crate::tool::ToolType::WateringCan,\n\n )));\n\n\n\n let temp_select = 0;\n\n\n", "file_path": "src/inventory.rs", "rank": 43, "score": 6093.508035996727 }, { "content": " pop: &mut Population,\n\n ) -> Option<(Option<CropType>, Option<genes::Genes>, Option<genes::Genes>)> {\n\n let current_item = self.inventory_slots[self.selected as usize].get_item(0);\n\n match current_item {\n\n Some(x) => {\n\n let ret_val = x.inventory_input(square, pop);\n\n\n\n match ret_val {\n\n Some((t, g, child)) => {\n\n match (t, g) {\n\n (Some(_t), Some(_g)) => {\n\n // If crop harvested...\n\n Some((Some(_t), Some(_g), child))\n\n }\n\n (Some(_t), None) => {\n\n if matches!(_t, CropType::None) {\n\n // If seed planted...\n\n self.inventory_slots\n\n .get_mut(self.selected as usize)\n\n .unwrap()\n", "file_path": "src/inventory.rs", "rank": 44, "score": 6093.3590163752615 }, { "content": " // pub fn set_src_xy(&mut self, x: i32, y: i32) {\n\n // self.src.set_x(x);\n\n // self.src.set_y(y);\n\n // }\n\n\n\n /// Set a crop's texture pointer\n\n pub fn set_texture(&mut self, t: &'a Texture<'a>) {\n\n self.texture = t;\n\n }\n\n\n\n /// Set a crop's `rotten` variable\n\n pub fn set_rotten(&mut self, r: bool) {\n\n self.rotten = r;\n\n if r {\n\n self.src.set_x(0);\n\n }\n\n }\n\n\n\n pub fn rotten(&self) -> bool {\n\n self.rotten\n", "file_path": "src/crop.rs", "rank": 45, "score": 6092.950500068701 }, { "content": " }\n\n\n\n pub fn price_draw(\n\n wincan: &mut WindowCanvas,\n\n mut steps: i32,\n\n initx: i32,\n\n inity: i32,\n\n value: i32,\n\n ) {\n\n let texture_creator = wincan.texture_creator();\n\n let values_texture = texture_creator\n\n .load_texture(\"src/images/MoneySpriteSheet.png\")\n\n .unwrap();\n\n let initsteps = steps;\n\n let mut disp_price = value;\n\n let mut tailing_zero = false;\n\n while steps >= 0 {\n\n let temp = i32::pow(10, steps as u32);\n\n let mut modulo = (disp_price - (disp_price % temp)) / temp;\n\n disp_price = disp_price - (modulo * temp);\n", "file_path": "src/store.rs", "rank": 46, "score": 6092.705643268387 }, { "content": " .copy(\n\n current_item.texture(),\n\n current_item.src(),\n\n Rect::new(\n\n INVENTORY_X_POS + (x * (ITEM_BOX_SIZE + BORDER_SIZE)),\n\n INVENTORY_Y_POS,\n\n ITEM_BOX_SIZE as u32,\n\n ITEM_BOX_SIZE as u32,\n\n ),\n\n )\n\n .unwrap();\n\n\n\n // Dont draw tool slots\n\n // This is so that it isn't shown that there is (1) tool\n\n if !inventory.is_tool {\n\n self.draw_numbers(wincan, x, inventory.get_len());\n\n }\n\n\n\n x = x + 1;\n\n }\n", "file_path": "src/inventory.rs", "rank": 47, "score": 6092.639448082899 }, { "content": " }\n\n\n\n /// Draw length for inventory slot\n\n pub fn draw_numbers(&self, wincan: &mut WindowCanvas, inventory_slot: i32, mut value: i32) {\n\n // let NUMBER_SIZE = 20;\n\n\n\n let texture_creator = wincan.texture_creator();\n\n let values_texture = texture_creator\n\n .load_texture(\"src/images/outlined_numbers.png\")\n\n .unwrap();\n\n let mut digit_place = 1;\n\n // Do-While loop in rust\n\n loop {\n\n let digit = value % 10;\n\n value /= 10;\n\n\n\n wincan\n\n .copy(\n\n &values_texture,\n\n Rect::new(20 * digit, 0, 20, 20),\n", "file_path": "src/inventory.rs", "rank": 48, "score": 6092.566189129794 }, { "content": " }\n\n Some(Menu::ToMarket) => {\n\n let go_box = texture_creator\n\n .load_texture(\"src/images/market_menu.png\")\n\n .unwrap();\n\n wincan\n\n .copy(&go_box, None, Rect::new(400, 400, 600, 180))\n\n .unwrap()\n\n }\n\n Some(Menu::ToHome) => {\n\n let go_box = texture_creator\n\n .load_texture(\"src/images/go_home_menu.png\")\n\n .unwrap();\n\n wincan\n\n .copy(&go_box, None, Rect::new(400, 400, 600, 180))\n\n .unwrap()\n\n }\n\n Some(Menu::Shop) => {\n\n store.draw(&mut wincan);\n\n }\n\n }\n\n\n\n wincan.present();\n\n } // end gameloop\n\n}\n", "file_path": "src/main.rs", "rank": 49, "score": 6092.536155236344 }, { "content": " match in_area {\n\n Area::Home => {\n\n let result = p.use_inventory(coordinates, &mut pop);\n\n match result {\n\n Some((Some(t), Some(g), child)) => {\n\n //Return multiple seeds from harvesting a plant\n\n //This may want to be determined on a plant's genes later\n\n\n\n let mut grown_crop = crop::Crop::new(\n\n Rect::new(0, 0, 0, 0),\n\n 3,\n\n &crop_texture,\n\n &rotten_texture,\n\n false,\n\n t,\n\n Some(g.clone()),\n\n );\n\n grown_crop.set_stage(3);\n\n p.add_item(grown_crop);\n\n\n", "file_path": "src/main.rs", "rank": 50, "score": 6092.318289164226 }, { "content": "use sdl2::image::LoadTexture;\n\nuse sdl2::pixels::Color;\n\nuse sdl2::rect::Rect;\n\n\n\nuse sdl2::render::WindowCanvas;\n\n\n\nuse crate::market_item::MarketItem;\n\n\n\npub struct Store<'a> {\n\n item_selected: i32,\n\n amount_selected: i32,\n\n price: i32,\n\n sub_menu: i32,\n\n number_of_goods: i32,\n\n item_rect: Rect,\n\n money_rect: Rect,\n\n amount_rect: Rect,\n\n menu_rect: Rect,\n\n items_array: &'a mut Vec<MarketItem>,\n\n}\n", "file_path": "src/store.rs", "rank": 51, "score": 6092.166315739493 }, { "content": " }\n\n\n\n pub fn get_crop_type_enum(&self) -> CropType {\n\n self.t\n\n }\n\n\n\n pub fn set_crop_type(&mut self, string: &str) {\n\n match string {\n\n \"None\" => self.t = CropType::None,\n\n \"Carrot\" => self.t = CropType::Carrot,\n\n \"Corn\" => self.t = CropType::Corn,\n\n \"Lettuce\" => self.t = CropType::Lettuce,\n\n \"Potato\" => self.t = CropType::Potato,\n\n _ => println!(\"invalid CropType\"),\n\n };\n\n\n\n let (x, y) = match self.t {\n\n CropType::None => (0, 0),\n\n CropType::Carrot => (self.stage as u32 * TILE_SIZE, 0),\n\n CropType::Corn => (self.stage as u32 * TILE_SIZE, TILE_SIZE),\n", "file_path": "src/crop.rs", "rank": 52, "score": 6091.846130287865 }, { "content": " pos,\n\n stage,\n\n src,\n\n texture,\n\n rotten_texture,\n\n watered,\n\n t,\n\n genes,\n\n pollinated: false,\n\n rotten: false,\n\n child: None,\n\n }\n\n }\n\n\n\n /// Sets a crop's `watered` variable to `w`\n\n pub fn set_water(&mut self, w: bool) {\n\n self.watered = w;\n\n }\n\n\n\n /// Set the x and y of a crop's `src` Rect\n", "file_path": "src/crop.rs", "rank": 53, "score": 6091.734990225963 }, { "content": " pub fn texture(&self) -> &Texture {\n\n &self.texture\n\n }\n\n\n\n pub fn check_collision(&self, a: &Rect) -> bool {\n\n let b = self.get_pos();\n\n !(a.bottom() < b.top()\n\n || a.top() > b.bottom()\n\n || a.right() < b.left()\n\n || a.left() > b.right())\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn get_inventory(&mut self) -> &mut Inventory<'a> {\n\n &mut self.inventory\n\n }\n\n\n\n /// Eat two or three randomly selected crops. Returns the number of\n\n /// crops that the PC wanted to eat but couldn't.\n\n pub fn dinner(&mut self) -> i32 {\n", "file_path": "src/player.rs", "rank": 54, "score": 6091.492557998238 }, { "content": "use sdl2::rect::Rect;\n\nuse sdl2::render::{Texture, WindowCanvas};\n\n\n\npub struct Item<'a> {\n\n pos: Rect,\n\n texture: Texture<'a>,\n\n tex_path: String,\n\n collision: bool,\n\n}\n\n\n\nimpl<'a> Item<'a> {\n\n pub fn new(pos: Rect, texture: Texture<'a>, tex_path: String, collision: bool) -> Item {\n\n Item {\n\n pos,\n\n texture,\n\n tex_path,\n\n collision,\n\n }\n\n }\n\n\n", "file_path": "src/item.rs", "rank": 55, "score": 6091.30488821634 }, { "content": " pub fn distance(&self, x: i32, y: i32) -> f32 {\n\n ((((self.get_x() / TILE_SIZE as i32) - x).abs() as f32).powi(2)\n\n + (((self.get_y() / TILE_SIZE as i32) - y).abs() as f32).powi(2))\n\n .sqrt()\n\n }\n\n\n\n // pub fn pollinate(&mut self, pop: &mut Population) {\n\n pub fn pollinate(&mut self, neighbors: Vec<(genes::Genes, f32)>) {\n\n // If self is already pollinated, return immediately\n\n if self.pollinated || self.stage != 3 {\n\n return;\n\n }\n\n // TODO tweak pollination prob\n\n let mut prob: f32 = 0.4; // Pollination probability\n\n // let x = self.get_x() / TILE_SIZE as i32;\n\n // let y = self.get_y() / TILE_SIZE as i32;\n\n // let neighbors = pop.get_neighbors(x, y);\n\n let mut rng = rand::thread_rng();\n\n let mut r: f32;\n\n // println!(\n", "file_path": "src/crop.rs", "rank": 56, "score": 6091.272125218548 }, { "content": "\n\n wincan.set_blend_mode(BlendMode::Blend);\n\n let texture_creator = wincan.texture_creator();\n\n let r = Rect::new((0) as i32, (0) as i32, CAM_W, CAM_H);\n\n wincan.set_draw_color(Color::RGBA(255, 255, 255, 255));\n\n wincan.clear();\n\n\n\n // Crop and tile textures; all use the same one, so\n\n // just reference it for efficiency\n\n let crop_texture = texture_creator\n\n .load_texture(\"src/images/Crop_Tileset.png\")\n\n .unwrap();\n\n\n\n let rotten_texture = texture_creator\n\n .load_texture(\"src/images/RottingCrops.png\")\n\n .unwrap();\n\n\n\n let tile_texture = texture_creator\n\n .load_texture(\"src/images/Background_Tileset.png\")\n\n .unwrap();\n", "file_path": "src/main.rs", "rank": 57, "score": 6091.263409458039 }, { "content": " Return:\n\n The updated WindowCanvas\n\n */\n\n pub fn print_item(\n\n &self,\n\n x: i32,\n\n y: i32,\n\n w: u32,\n\n h: u32,\n\n mut win: WindowCanvas,\n\n ) -> WindowCanvas {\n\n let testx = self.x() - x;\n\n let testy = self.y() - y;\n\n // Draw barn\n\n if testx > -(self.width() as i32)\n\n && testx < w as i32\n\n && testy > -(self.height() as i32)\n\n && testy < h as i32\n\n {\n\n let barn_sub_set = Rect::new(self.x() - x, self.y() - y, self.width(), self.height());\n", "file_path": "src/item.rs", "rank": 58, "score": 6091.205882265669 }, { "content": "\n\n // Roll group credits\n\n // let _ = roll_credits(&mut wincan, &texture_creator, r);\n\n // roll_credits(&mut wincan, &texture_creator, r).unwrap();\n\n\n\n let mut event_pump = sdl_cxt.event_pump().unwrap();\n\n let _x_vel = 0;\n\n let _y_vel = 0;\n\n\n\n let _menu_location = 0;\n\n\n\n let mut p = player::Player::new(\n\n Rect::new(\n\n (BG_W / 2 - PLAYER_WIDTH / 2) as i32,\n\n (BG_H / 2 - PLAYER_HEIGHT / 2) as i32,\n\n PLAYER_WIDTH,\n\n PLAYER_HEIGHT,\n\n ),\n\n texture_creator\n\n .load_texture(\"src/images/farmer.png\")\n", "file_path": "src/main.rs", "rank": 59, "score": 6091.142786209295 }, { "content": " pub fn set_child(&mut self, c: Option<genes::Genes>) {\n\n self.child = c;\n\n }\n\n\n\n pub fn get_child(&self) -> &Option<genes::Genes> {\n\n &self.child\n\n }\n\n\n\n /// Get a Crop's texture\n\n pub fn get_texture(&self) -> &Texture {\n\n if self.rotten {\n\n self.rotten_texture\n\n } else {\n\n self.texture\n\n }\n\n }\n\n\n\n /// Get a Crop's `src`\n\n pub fn get_src(&self) -> Rect {\n\n self.src\n", "file_path": "src/crop.rs", "rank": 60, "score": 6091.141150774479 }, { "content": " self.inventory.draw(wincan);\n\n let src = self.src();\n\n wincan.copy(self.texture(), src, player_cam_pos).unwrap();\n\n }\n\n\n\n /// Set a player's x position, clamping between given bounds\n\n pub fn update_pos_x(&mut self, vel: (i32, i32), x_bounds: (i32, i32)) {\n\n self.pos\n\n .set_x((self.pos.x() + vel.0).clamp(x_bounds.0, x_bounds.1));\n\n }\n\n\n\n /// Set a player's y position, clamping between given bounds\n\n pub fn update_pos_y(&mut self, vel: (i32, i32), y_bounds: (i32, i32)) {\n\n self.pos\n\n .set_y((self.pos.y() + vel.1).clamp(y_bounds.0, y_bounds.1));\n\n }\n\n\n\n /// Stop a player from moving in the x direction\n\n pub fn stay_still_x(&mut self, vel: (i32, i32), x_bounds: (i32, i32)) {\n\n self.pos\n", "file_path": "src/player.rs", "rank": 61, "score": 6090.998027384285 }, { "content": " Rect::new(\n\n INVENTORY_X_POS + ((inventory_slot + 1) * (ITEM_BOX_SIZE + BORDER_SIZE))\n\n - digit_place * NUMBER_SIZE,\n\n INVENTORY_Y_POS + ITEM_BOX_SIZE - NUMBER_SIZE,\n\n NUMBER_SIZE as u32,\n\n NUMBER_SIZE as u32,\n\n ),\n\n )\n\n .unwrap();\n\n digit_place += 1;\n\n\n\n // While\n\n if value == 0 {\n\n break;\n\n }\n\n }\n\n }\n\n\n\n pub fn set_selected(&mut self, _selected: i32) {\n\n self.selected = _selected\n", "file_path": "src/inventory.rs", "rank": 62, "score": 6090.741491907692 }, { "content": " // println!(\"Loading from {:?}, len = {:?}\", s, s.len());\n\n // TODO add to this as more genes are added or make from_save_string in Genes\n\n\n\n if s.len() > 8 {\n\n g = Some(genes::Genes::make_genes(vec![\n\n s[7].parse::<f32>().unwrap(),\n\n s[8].parse::<f32>().unwrap(),\n\n s[9].parse::<f32>().unwrap(),\n\n s[10].parse::<f32>().unwrap(),\n\n ]));\n\n } else {\n\n g = None;\n\n }\n\n let mut c = Crop::new(\n\n Rect::new(\n\n s[1].parse::<i32>().unwrap() * TILE_SIZE as i32,\n\n s[2].parse::<i32>().unwrap() * TILE_SIZE as i32,\n\n TILE_SIZE,\n\n TILE_SIZE,\n\n ),\n", "file_path": "src/crop.rs", "rank": 63, "score": 6090.67116867876 }, { "content": " win.copy(self.texture(), None, barn_sub_set).unwrap();\n\n return win;\n\n }\n\n win\n\n }\n\n\n\n // pub fn check_for_collision(&self, x: i32, y: i32, w: i32, h: i32) -> bool {\n\n // true\n\n // }\n\n}\n", "file_path": "src/item.rs", "rank": 64, "score": 6090.633091805957 }, { "content": "// Imports\n\nuse sdl2::rect::Rect;\n\nuse sdl2::render::{Texture, WindowCanvas};\n\nuse std::str::FromStr;\n\n\n\nuse crate::genes;\n\nuse crate::population::Population;\n\nuse crate::InventoryItemTrait;\n\n\n\n// Import constant from main\n\nuse crate::{CAM_H, CAM_W, TILE_SIZE};\n\n// use std::string::ParseError;\n\n\n\nuse rand::Rng;\n\n\n\n/// Crop type enum\n\n#[derive(Copy, Clone, PartialEq, Debug)]\n\n\n\npub enum CropType {\n\n None,\n", "file_path": "src/crop.rs", "rank": 65, "score": 6090.562157668424 }, { "content": " /// Get `src` of player\n\n pub fn src(&mut self) -> Rect {\n\n let k = match self.dir {\n\n Direction::Down => 0,\n\n Direction::Left => 1,\n\n Direction::Right => 2,\n\n Direction::Up => 3,\n\n };\n\n let src = &mut self.src[k];\n\n // Animate if the player is moving *or* if the animation hasn't looped\n\n // yet, so that the sprite doesn't jerk downward when stopping.\n\n if self.moving || src.current_index() != 0 {\n\n *src.tick()\n\n } else {\n\n src.reset(Instant::now());\n\n *src.current()\n\n }\n\n }\n\n\n\n /// Get texture of player\n", "file_path": "src/player.rs", "rank": 66, "score": 6090.504212965784 }, { "content": "\n\n let mut market_items = vec![store_item_0, store_item_1, store_item_2, store_item_3];\n\n\n\n let mut store = store::Store::new(4, &mut market_items);\n\n\n\n let mut in_area = Area::Home;\n\n // Things that might be used every frame but should only be loaded once:\n\n let _bg_tiles_tex = texture_creator\n\n .load_texture(\"src/images/Background_Tileset.png\")\n\n .unwrap();\n\n\n\n // enum used to pause the game while any menu is up.\n\n let mut in_menu: Option<Menu> = None;\n\n 'gameloop: loop {\n\n for event in event_pump.poll_iter() {\n\n match event {\n\n Event::Quit { .. }\n\n | Event::KeyDown {\n\n keycode: Some(Keycode::Escape),\n\n ..\n", "file_path": "src/main.rs", "rank": 67, "score": 6090.489117513387 }, { "content": " wincan\n\n .draw_rect(Rect::new(x_pos, y_pos, TILE_SIZE, TILE_SIZE))\n\n .unwrap();\n\n }\n\n }\n\n }\n\n // Drawing item\n\n for item in &item_vec {\n\n wincan = item.print_item(cur_bg.x(), cur_bg.y, CAM_W, CAM_H, wincan);\n\n }\n\n\n\n // Draw crops\n\n for _x in 0..((BG_W / TILE_SIZE) as i32 + 1) {\n\n for _y in 0..((BG_H / TILE_SIZE) as i32 + 1) {\n\n let _c = pop.get_crop_with_index(_x as u32, _y as u32);\n\n match _c.get_crop_type() {\n\n \"None\" => {}\n\n _ => {\n\n wincan = _c.print_crop(cur_bg.x(), cur_bg.y(), wincan);\n\n }\n", "file_path": "src/main.rs", "rank": 68, "score": 6090.465731224895 }, { "content": "}\n\n\n\nimpl InventoryItemTrait for Crop<'_> {\n\n /// Sort inventory so that you take the best item from the inventory\n\n /// This can be a combination of factors\n\n /// i.e. 2*speed + resistance\n\n fn get_value(&self) -> i32 {\n\n if let Some(g) = self.get_all_genes() {\n\n (g.average() * 100 as f32) as i32\n\n } else {\n\n 0\n\n }\n\n }\n\n fn texture(&self) -> &Texture {\n\n &self.texture\n\n }\n\n fn src(&self) -> Rect {\n\n self.src\n\n }\n\n fn inventory_input(\n", "file_path": "src/crop.rs", "rank": 69, "score": 6090.344257460479 }, { "content": " pub fn print_crop(&self, x: i32, y: i32, mut win: WindowCanvas) -> WindowCanvas {\n\n let testx = self.get_x() - x;\n\n let testy = self.get_y() - y;\n\n\n\n if testx > -(self.get_width() as i32)\n\n && testx < CAM_W as i32\n\n && testy > -(self.get_height() as i32)\n\n && testy < CAM_H as i32\n\n {\n\n let crop_sub_set = Rect::new(\n\n self.get_x() - x,\n\n self.get_y() - y,\n\n self.get_width(),\n\n self.get_height(),\n\n );\n\n win.copy(self.get_texture(), self.get_src(), crop_sub_set)\n\n .unwrap();\n\n return win;\n\n }\n\n win\n", "file_path": "src/crop.rs", "rank": 70, "score": 6090.197352177 }, { "content": " .pop_item();\n\n None\n\n } else {\n\n None\n\n }\n\n }\n\n _ => None,\n\n }\n\n }\n\n None => None,\n\n }\n\n }\n\n None => None,\n\n }\n\n }\n\n\n\n /// Eat a food yum. Or no food!\n\n pub fn eat(&mut self, kind: CropType) -> bool {\n\n let k = Inventory::crop_idx(kind, false);\n\n if self.inventory_slots[k].get_len() == 0 {\n\n false\n\n } else {\n\n self.inventory_slots.get_mut(k).unwrap().pop_item();\n\n true\n\n }\n\n }\n\n}\n", "file_path": "src/inventory.rs", "rank": 71, "score": 6089.977687687416 }, { "content": "mod pest;\n\nmod pest_population;\n\nmod player;\n\nmod population;\n\nmod save_load;\n\nmod sleep_menu;\n\nmod store;\n\nmod tile;\n\nmod tool;\n\n\n\nuse anim::Animation;\n\n\n\nuse sdl2::event::Event;\n\nuse sdl2::image::LoadTexture;\n\nuse sdl2::keyboard::Keycode;\n\n\n\nuse sdl2::pixels::Color;\n\nuse sdl2::rect::Rect;\n\nuse sdl2::render::BlendMode;\n\nuse sdl2::render::Texture;\n", "file_path": "src/main.rs", "rank": 72, "score": 6089.96650390776 }, { "content": " self.src = Rect::new(0, 0, TILE_SIZE, TILE_SIZE);\n\n }\n\n }\n\n\n\n pub fn set_water(&mut self, water: bool) {\n\n if self.tilled() {\n\n if water {\n\n self.src = Rect::new(2 * TILE_SIZE as i32, TILE_SIZE as i32, TILE_SIZE, TILE_SIZE);\n\n } else {\n\n self.src = Rect::new(TILE_SIZE as i32, TILE_SIZE as i32, TILE_SIZE, TILE_SIZE);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/tile.rs", "rank": 73, "score": 6089.940695239245 }, { "content": " pub fn item_list_draw(wincan: &mut WindowCanvas, items_array: &[MarketItem]) {\n\n let texture_creator = wincan.texture_creator();\n\n let market_menu_items = texture_creator\n\n .load_texture(\"src/images/Market_menu_items.png\")\n\n .unwrap();\n\n let mut i = 0;\n\n for item in items_array {\n\n wincan\n\n .copy(\n\n &market_menu_items,\n\n Rect::new(0, item.item_label_offset, 100, 6),\n\n Rect::new(150, 30 + i * 50, 500, 50),\n\n )\n\n .unwrap();\n\n Store::price_draw(wincan, 3, 380, 45 + i * 50, item.amount);\n\n Store::price_draw(wincan, 3, 530, 45 + i * 50, item.price);\n\n i = i + 1;\n\n }\n\n }\n\n\n", "file_path": "src/store.rs", "rank": 74, "score": 6089.791382855936 }, { "content": " wincan\n\n .copy(\n\n &label_texture,\n\n Rect::new(0, 0, 16, 5),\n\n Rect::new(850, 578, 80, 25),\n\n )\n\n .unwrap();\n\n wincan\n\n .copy(\n\n &label_texture,\n\n Rect::new(18, 0, 20, 5),\n\n Rect::new(850, 518, 100, 25),\n\n )\n\n .unwrap();\n\n\n\n let item_textures = texture_creator\n\n .load_texture(\"src/images/Crop_Tileset.png\")\n\n .unwrap();\n\n wincan\n\n .copy(\n", "file_path": "src/store.rs", "rank": 75, "score": 6089.493786074338 }, { "content": " }\n\n }\n\n\n\n pub fn x(&self) -> i32 {\n\n self.pos.x()\n\n }\n\n\n\n pub fn y(&self) -> i32 {\n\n self.pos.y()\n\n }\n\n\n\n pub fn width(&self) -> u32 {\n\n self.pos.width()\n\n }\n\n\n\n pub fn height(&self) -> u32 {\n\n self.pos.height()\n\n }\n\n\n\n pub fn pos(&self) -> Rect {\n", "file_path": "src/tile.rs", "rank": 76, "score": 6089.40344778004 }, { "content": " r\n\n }\n\n }\n\n\n\n /// Add item into the correct inventory slot\n\n pub fn add_item(&mut self, new_crop: Crop<'a>) {\n\n let seedy = new_crop.get_stage() != 3;\n\n let k = Inventory::crop_idx(new_crop.get_crop_type_enum(), seedy);\n\n self.inventory_slots[k].add_item(Box::new(new_crop));\n\n }\n\n\n\n pub fn get_inventory_slot(&self, index: i32) -> Option<&InventoryItem> {\n\n self.inventory_slots.get(index as usize)\n\n }\n\n\n\n /// Use the inventory slot for the correct function\n\n /// For crops, this means planting the crop onto tilled soil\n\n pub fn use_inventory(\n\n &mut self,\n\n square: (i32, i32),\n", "file_path": "src/inventory.rs", "rank": 77, "score": 6089.391104160192 }, { "content": " pub fn x(&self) -> i32 {\n\n self.pos.x()\n\n }\n\n\n\n pub fn y(&self) -> i32 {\n\n self.pos.y()\n\n }\n\n\n\n pub fn width(&self) -> u32 {\n\n self.pos.width()\n\n }\n\n\n\n pub fn height(&self) -> u32 {\n\n self.pos.height()\n\n }\n\n\n\n pub fn pos(&self) -> Rect {\n\n self.pos\n\n }\n\n\n", "file_path": "src/item.rs", "rank": 78, "score": 6089.279525184277 }, { "content": "use crate::population::Population;\n\n\n\n// Import constants from main\n\nuse crate::{BG_H, BG_W, TILE_SIZE};\n\n\n\n// Player sprites are 54x90 px.\n\npub const PLAYER_WIDTH: u32 = 54;\n\npub const PLAYER_HEIGHT: u32 = 90;\n\n/// PLAYER_EFF_HEIGHT_SKIP is the number of pixels to skip when computing\n\n/// collision.\n\nconst PLAYER_EFF_HEIGHT_SKIP: i32 = 10;\n\n\n\nconst SPEED_LIMIT: f32 = 5.0;\n\npub const ACCEL_RATE: f32 = 1.0;\n\n\n\n/// Sprite directions.\n\npub enum Direction {\n\n Down,\n\n Left,\n\n Right,\n", "file_path": "src/player.rs", "rank": 79, "score": 6088.983295825468 }, { "content": " }\n\n self.items.insert(insert_pos, new_item);\n\n }\n\n\n\n /// This will pop the highest sorted item at index 0\n\n pub fn pop_item(&mut self) -> Box<dyn InventoryItemTrait + 'a> {\n\n self.items.remove(0 as usize)\n\n }\n\n\n\n pub fn get_item(&self, index: i32) -> Option<&Box<dyn InventoryItemTrait + 'a>> {\n\n if index >= self.get_len() {\n\n return None;\n\n }\n\n Some(&(self.items[index as usize]))\n\n }\n\n}\n\n\n\n/// Inventory class that has a vector of inventory slots\n\n/// Also keeps track of the current selected inventory slot\n\n/// squares is used to draw the inventory slot. It is kept here so that it doesn't have to be initialized each time you want to draw\n", "file_path": "src/inventory.rs", "rank": 80, "score": 6088.946104091217 }, { "content": " }\n\n if keystate.contains(&Keycode::P) {\n\n let _new_crop_texture = texture_creator\n\n .load_texture(\"src/images/Crop_Tileset.png\")\n\n .unwrap();\n\n let _p = store.confirm_purchase();\n\n if let Some((a, t)) = _p {\n\n for _ in 0..a {\n\n let _c = crop::Crop::new(\n\n Rect::new(0, 0, 0, 0),\n\n 0,\n\n &crop_texture,\n\n &rotten_texture,\n\n false,\n\n t,\n\n Some(genes::Genes::new()),\n\n );\n\n p.add_item(_c);\n\n }\n\n }\n", "file_path": "src/main.rs", "rank": 81, "score": 6088.810644236529 }, { "content": "use crate::COM_HASH;\n\nuse std::collections::HashMap;\n\n\n\npub struct ESList {\n\n avg: f32,\n\n eslist: Vec<f32>,\n\n}\n\nimpl ESList {\n\n pub fn new() -> ESList {\n\n ESList {\n\n avg: 0.0,\n\n eslist: Vec::new(),\n\n }\n\n }\n\n pub fn last_average(&mut self, history: i32) -> f32 {\n\n if self.eslist.len() == 0 {\n\n return 0.0;\n\n }\n\n let skip =\n\n ((self.eslist.len() as i32) - 1).min(0.max((self.eslist.len() as i32) - history));\n", "file_path": "src/commodities.rs", "rank": 82, "score": 6088.397003290048 }, { "content": " && x_pos < (CAM_W as i32)\n\n && y_pos > -(TILE_SIZE as i32)\n\n && y_pos < (CAM_H as i32)\n\n {\n\n let cur_tile = Rect::new(\n\n crop_tile.tile.x() - cur_bg.x(),\n\n crop_tile.tile.y() - cur_bg.y(),\n\n TILE_SIZE,\n\n TILE_SIZE,\n\n );\n\n wincan\n\n .copy(crop_tile.tile.texture(), crop_tile.tile.src(), cur_tile)\n\n .unwrap();\n\n if (\n\n crop_tile.tile.x() / TILE_SIZE as i32,\n\n crop_tile.tile.y() / TILE_SIZE as i32,\n\n ) == coordinates\n\n {\n\n // println!(\"Drawing rect!\");\n\n wincan.set_draw_color(Color::RED);\n", "file_path": "src/main.rs", "rank": 83, "score": 6088.352405655898 }, { "content": "use std::collections::HashSet;\n\nuse std::thread;\n\nuse std::time::Duration;\n\n\n\nuse crate::crop::CropType;\n\nuse crate::market_item::MarketItem;\n\nuse crate::player::{PLAYER_HEIGHT, PLAYER_WIDTH};\n\n\n\nconst VSYNC: bool = true;\n\n// Camera dimensions\n\npub const CAM_W: u32 = 1280;\n\npub const CAM_H: u32 = 720;\n\n// Background dimensions\n\nconst BG_W: u32 = 3000;\n\nconst BG_H: u32 = 3000;\n\nconst TITLE: &str = \"Farnan's Farmers\";\n\npub const TILE_SIZE: u32 = 80; // Make this public so we can import it elsewhere\n\n\n\n// Right and bottom tile bounds\n\npub const RIGHT_TILE_BOUND: i32 = (BG_W / TILE_SIZE) as i32;\n", "file_path": "src/main.rs", "rank": 84, "score": 6088.0128042376855 }, { "content": " }\n\n }\n\n\n\n /// Get player position `Rect`\n\n pub fn get_pos(&self) -> Rect {\n\n let mut pos = self.pos;\n\n pos.set_y(pos.y + PLAYER_EFF_HEIGHT_SKIP);\n\n pos.set_height((pos.height() as i32 - PLAYER_EFF_HEIGHT_SKIP) as u32);\n\n pos\n\n }\n\n\n\n /// Get left bound of player\n\n #[allow(dead_code)]\n\n pub fn left(&self) -> i32 {\n\n self.pos.left()\n\n }\n\n\n\n /// Get right bound of player\n\n #[allow(dead_code)]\n\n pub fn right(&self) -> i32 {\n", "file_path": "src/player.rs", "rank": 85, "score": 6087.975545555784 }, { "content": " &item_textures,\n\n self.items_array[self.item_selected as usize].pos,\n\n Rect::new(665, 35, 460, 460),\n\n )\n\n .unwrap();\n\n }\n\n\n\n pub fn navigate(&mut self, increment: i32) {\n\n if self.sub_menu == 0 {\n\n if increment == -1 && self.item_selected != 0 {\n\n self.item_selected = self.item_selected + increment;\n\n self.price = 1;\n\n self.amount_selected = 1;\n\n }\n\n if increment == 1 && self.item_selected != self.number_of_goods - 1 {\n\n self.item_selected = self.item_selected + increment;\n\n self.price = 1;\n\n self.amount_selected = 1;\n\n }\n\n }\n", "file_path": "src/store.rs", "rank": 86, "score": 6087.6311770001685 }, { "content": " Direction::Down\n\n };\n\n }\n\n\n\n /// Returns the grid coordinates of the\n\n /// tile the player is facing\n\n pub fn get_facing(&self) -> (i32, i32) {\n\n let offset: (i32, i32) = {\n\n match self.get_dir() {\n\n // Down\n\n 0 => (0, 1),\n\n // Left\n\n 1 => (-1, 0),\n\n // Right\n\n 2 => (1, 0),\n\n // Up\n\n 3 => (0, -1),\n\n // Other (shouldn't happen)\n\n _ => (0, 0),\n\n }\n", "file_path": "src/player.rs", "rank": 87, "score": 6087.559793545513 }, { "content": "\n\nimpl<'a> Player<'a> {\n\n /// Creates a new `Player` instance.\n\n ///\n\n /// # Arguments\n\n /// * `pos` - Position of the player.\n\n /// * `texture` - Sprite sheet texture\n\n pub fn new(\n\n pos: Rect,\n\n texture: Texture<'a>,\n\n texture_creator: &'a TextureCreator<WindowContext>,\n\n ) -> Player<'a> {\n\n // Derive the number of frames from the size of the texture.\n\n let sz = texture.query();\n\n let bounds = Rect::new(0, 0, sz.width, sz.height);\n\n let dur = Duration::from_secs_f64(1.0 / 30.0);\n\n let mut anims = Vec::with_capacity(4);\n\n let now = Instant::now();\n\n for i in 0..4 {\n\n let anim = Animation::from_sheet(\n", "file_path": "src/player.rs", "rank": 88, "score": 6087.418016715679 }, { "content": " // Initialize squares to be drawn\n\n let squares: Vec<Rect> = (0..11)\n\n .map(|x| {\n\n Rect::new(\n\n INVENTORY_X_POS + (x * (ITEM_BOX_SIZE + BORDER_SIZE)),\n\n INVENTORY_Y_POS,\n\n ITEM_BOX_SIZE as u32,\n\n ITEM_BOX_SIZE as u32,\n\n )\n\n })\n\n .collect();\n\n\n\n Inventory {\n\n inventory_slots,\n\n selected: temp_select,\n\n squares,\n\n }\n\n }\n\n\n\n /// Draw inventory slots onto the canvas\n", "file_path": "src/inventory.rs", "rank": 89, "score": 6087.392281265045 }, { "content": "\n\n /// Moves to the next frame regardless of time.\n\n #[allow(dead_code)]\n\n fn next(&mut self) -> &T {\n\n self.advance(1)\n\n }\n\n\n\n /// Borrows the current frame.\n\n pub fn current(&self) -> &T {\n\n &self.frames[self.k]\n\n }\n\n\n\n /// Returns the current frame index.\n\n pub fn current_index(&self) -> usize {\n\n self.k\n\n }\n\n}\n\n\n\nimpl Animation<Rect> {\n\n /// Creates an animation dividing a row on a spritesheet into\n", "file_path": "src/anim.rs", "rank": 90, "score": 6087.380694693569 }, { "content": " /// Get y position of player\n\n pub fn y(&self) -> i32 {\n\n self.pos.y()\n\n }\n\n\n\n /// Get width of player\n\n pub fn width(&self) -> u32 {\n\n self.pos.width()\n\n }\n\n\n\n /// Get height of player\n\n pub fn height(&self) -> u32 {\n\n self.pos.height()\n\n }\n\n\n\n pub fn set_selected(&mut self, _selected: i32) {\n\n self.inventory.set_selected(_selected);\n\n }\n\n\n\n #[allow(dead_code)]\n", "file_path": "src/player.rs", "rank": 91, "score": 6087.308898393306 }, { "content": "static ITEM_BOX_SIZE: i32 = 64;\n\nstatic BORDER_SIZE: i32 = 4;\n\nstatic SELECTED_SIZE: i32 = 2;\n\nstatic NUMBER_SIZE: i32 = 20;\n\n\n\n/// Individual inventory slot. This takes in an inventory trait object(crop or tool)\n\n/// Inventory slots are sorted, so you have the \"best\" seed at the bottom of the queue\n\n/// This is done so that seed can have different genetics, but still have one inventory slot\n\n/// The vectors are sorted by their value: a number that is determined in the crop class\n\npub struct InventoryItem<'a> {\n\n items: Vec<Box<dyn InventoryItemTrait + 'a>>,\n\n is_tool: bool,\n\n}\n\n\n\nimpl<'a> InventoryItem<'a> {\n\n /// Takes in is_tool: used to differentiate tools from crops\n\n /// and initializes vector of inventory_item_trait\n\n pub fn new(is_tool: bool) -> InventoryItem<'a> {\n\n InventoryItem {\n\n items: Vec::new(),\n", "file_path": "src/inventory.rs", "rank": 92, "score": 6087.083821009328 }, { "content": " }\n\n\n\n /// Get a Crop's position\n\n pub fn get_pos(&self) -> Rect {\n\n self.pos\n\n }\n\n\n\n pub fn set_pos(&mut self, new_pos: Rect) {\n\n self.pos = new_pos;\n\n }\n\n\n\n /// Get a Crop's width\n\n pub fn get_width(&self) -> u32 {\n\n self.get_pos().width()\n\n }\n\n\n\n /// Get a Crop's height\n\n pub fn get_height(&self) -> u32 {\n\n self.get_pos().height()\n\n }\n", "file_path": "src/crop.rs", "rank": 93, "score": 6087.05689395072 }, { "content": "\n\n // draw item labels\n\n Store::item_list_draw(wincan, self.items_array);\n\n\n\n // selection\n\n wincan.set_draw_color(Color::RGBA(255, 0, 0, 60));\n\n wincan\n\n .fill_rect(Rect::new(150, 30 + self.item_selected * 50, 500, 50))\n\n .unwrap();\n\n\n\n // submenu\n\n wincan.set_draw_color(Color::RGBA(0, 0, 0, 40));\n\n wincan.fill_rect(self.menu_rect).unwrap();\n\n Store::price_draw(wincan, 6, 665, 578, self.price);\n\n Store::price_draw(wincan, 6, 665, 518, self.amount_selected);\n\n\n\n let texture_creator = wincan.texture_creator();\n\n let label_texture = texture_creator\n\n .load_texture(\"src/images/MoneyLabels.png\")\n\n .unwrap();\n", "file_path": "src/store.rs", "rank": 94, "score": 6087.042288141488 }, { "content": "\n\nimpl<'a> Store<'a> {\n\n pub fn new(number_of_goods: i32, items_array: &'a mut Vec<MarketItem>) -> Store<'a> {\n\n let item_selected = 0;\n\n let amount_selected = 1;\n\n let price = 1;\n\n let sub_menu = 0;\n\n let item_rect = Rect::new(150, 30, 500, 580);\n\n let money_rect = Rect::new(660, 570, 470, 40);\n\n let amount_rect = Rect::new(660, 510, 470, 40);\n\n let menu_rect = Rect::new(150, 30, 500, 580);\n\n Store {\n\n item_selected,\n\n amount_selected,\n\n price,\n\n sub_menu,\n\n number_of_goods,\n\n item_rect,\n\n money_rect,\n\n amount_rect,\n", "file_path": "src/store.rs", "rank": 95, "score": 6086.922795210705 }, { "content": " /// equal-size tiles.\n\n pub fn from_sheet(\n\n bounds: &Rect,\n\n row_top: i32,\n\n tile_width: u32,\n\n tile_height: u32,\n\n frame_length: Duration,\n\n base: Instant,\n\n ) -> Animation<Rect> {\n\n let n = bounds.width() / tile_width;\n\n Animation::n_from_sheet(row_top, tile_width, tile_height, n, frame_length, base)\n\n }\n\n\n\n /// Creates an animation with n equal-size tiles along a row in a\n\n /// spritesheet.\n\n pub fn n_from_sheet(\n\n row_top: i32,\n\n tile_width: u32,\n\n tile_height: u32,\n\n n: u32,\n", "file_path": "src/anim.rs", "rank": 96, "score": 6086.849102609626 }, { "content": " .unwrap(),\n\n &texture_creator,\n\n );\n\n\n\n let mut pest_pop = save_load::load_pests();\n\n\n\n let _crop_vec: Vec<crop::Crop> = Vec::new();\n\n\n\n // LOAD SAVE DATA\n\n // Load home area\n\n let home_tup = save_load::load_home(\n\n &texture_creator,\n\n &crop_texture,\n\n &rotten_texture,\n\n &tile_texture,\n\n );\n\n let mut pop = home_tup.0;\n\n let item_vec = home_tup.1;\n\n\n\n // Load market\n", "file_path": "src/main.rs", "rank": 97, "score": 6086.817831271996 }, { "content": " Carrot,\n\n Corn,\n\n Potato,\n\n Lettuce,\n\n}\n\n\n\n/// Crop struct\n\npub struct Crop<'a> {\n\n /// Rectangle to manage crop position.\n\n pos: Rect,\n\n /// The stage of growth the crop is in, from\n\n /// 0 to 3.\n\n stage: u8,\n\n /// Rectangle to crop the sprite sheet to the\n\n /// appropriate tile.\n\n src: Rect,\n\n /// Texture of sprite sheet.\n\n texture: &'a Texture<'a>,\n\n /// Texture of rotten crop sprite sheet\n\n rotten_texture: &'a Texture<'a>,\n", "file_path": "src/crop.rs", "rank": 98, "score": 6086.75005507446 }, { "content": " // Go home. First fade to white.\n\n let alphas: Vec<u8> = (0..=255).collect();\n\n let dt = Duration::from_secs_f64(2.0 / (alphas.len() as f64));\n\n let mut blank = Animation::new(alphas, dt, Instant::now());\n\n blank.set_freezing();\n\n while blank.current_index() < 255 {\n\n let tex = texture_creator\n\n .load_texture(\"src/images/traveling_screen.png\")\n\n .unwrap();\n\n wincan.copy(&tex, None, None).unwrap();\n\n wincan.set_draw_color(Color::RGBA(255, 255, 255, *blank.tick()));\n\n wincan.fill_rect(r).unwrap();\n\n wincan.present();\n\n thread::sleep(Duration::from_millis(15));\n\n }\n\n // Go home.\n\n in_menu = None;\n\n return (in_menu, Area::Home);\n\n //previously the actual changing of the area would occur here, but I have moved that outside of this file and placed it right below the function call.\n\n } else if keystate.contains(&Keycode::N) {\n\n in_menu = None;\n\n return (in_menu, Area::Market);\n\n } else {\n\n //allows menu to stay on screen until either y or n is chosen\n\n return (in_menu, Area::Market);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/market.rs", "rank": 99, "score": 6086.722772740088 } ]
Rust
src/sync.rs
naftulikay/phatnoise.rs
ed6d4217bcd29d58738224c1314f16020e73f228
use log::{debug, error, info}; use crate::dms; use crate::library::get_dms_media_library; use crate::library::get_local_media_library; use crate::library::LibraryFile; use crate::library::LibrarySource; use crate::utils::crypto::sha256sum; use crate::utils::fs::copy_mtime; use rayon::prelude::*; use std::collections::{BTreeSet, HashSet}; use std::env; use std::fs; use std::path::Path; use std::process; pub fn synchronize() { if !dms::is_dms_present() { error!("No DMS device detected."); process::exit(1); } if !dms::is_dms_mounted() { error!("DMS device is present but not mounted."); process::exit(1); } synchronize_media_files(); } pub fn synchronize_media_files() { info!("Synchronizing media files with DMS..."); let local_dir = Path::join( Path::new(&match env::var("HOME") { Ok(value) => value, Err(e) => { error!("Unable to detect home directory: {}", e); process::exit(1); } }), Path::new("Music"), ); let dms_dir = dms::get_dms_mount_point().expect("DMS not present or not mounted."); debug!("Music directory: {}", local_dir.display()); let (local, dms) = (get_local_media_library(&local_dir), get_dms_media_library()); let (added, deleted, changed) = ( added_files(&local, &dms), deleted_files(&local, &dms), changed_files(&local, &dms), ); info!("Copying {} new files to the DMS...", added.len()); copy_files(&added, &local, &dms_dir); info!("Copying {} changed files to the DMS...", changed.len()); copy_files(&changed, &local, &dms_dir); info!("Deleting {} orphaned files from the DMS...", deleted.len()); delete_files(&deleted); } fn copy_files(files: &Vec<&LibraryFile>, local: &BTreeSet<LibraryFile>, dms_dir: &Path) { for file in files { let (source, dest) = ( &local.get(file).unwrap().path, Path::join(&dms_dir, Path::new(&file.debase())), ); debug!( "Copying local file {} to DMS at {}...", source.display(), dest.display() ); let dest_dir = &dest .parent() .expect(format!("Unable to get parent directory for {}", dest.display()).as_str()); if !dest_dir.is_dir() { debug!("Creating parent directory {}", dest_dir.display()); fs::create_dir_all(&dest_dir).expect( format!("Unable to create parent directory for {}", dest.display()).as_str(), ); } fs::copy(&source, &dest) .expect(format!("Unable to copy file {} to DMS", source.display()).as_str()); copy_mtime(&source, &dest).expect( format!( "Unable to copy modification time from source to destination {}", dest.display() ) .as_str(), ); } } fn delete_files(files: &Vec<&LibraryFile>) { for file in files .iter() .filter(|f| f.source == LibrarySource::DMS) .map(|f| &f.path) { debug!("Deleting orphaned file from DMS {}", file.display()); fs::remove_file(file) .expect(format!("Unable to remove file from DMS: {}", file.display()).as_str()); } } pub fn added_files<'a>( local: &'a BTreeSet<LibraryFile>, dms: &'a BTreeSet<LibraryFile>, ) -> Vec<&'a LibraryFile> { local.difference(dms).collect() } pub fn deleted_files<'a>( local: &'a BTreeSet<LibraryFile>, dms: &'a BTreeSet<LibraryFile>, ) -> Vec<&'a LibraryFile> { dms.difference(local).collect() } pub fn changed_files<'a>( local: &'a BTreeSet<LibraryFile>, dms: &'a BTreeSet<LibraryFile>, ) -> Vec<&'a LibraryFile> { local .into_par_iter() .filter(|p| { if !dms.contains(p) { return false; } let (local, remote) = (p, dms.get(*p).unwrap()); let (lmeta, rmeta) = ( fs::metadata(&local.path).unwrap(), fs::metadata(&remote.path).unwrap(), ); let (llen, rlen) = (lmeta.len(), rmeta.len()); let (lmod, rmod) = (lmeta.modified().unwrap(), rmeta.modified().unwrap()); if llen != rlen { debug!("{}: changed - size not equal", local.debase()); return true; } let (first, last) = (lmod.min(rmod), lmod.max(rmod)); let diff = last.duration_since(first).unwrap(); if diff.as_secs() <= 3 { return false; } let (source, destination) = ( sha256sum(&local.path).expect("unable to compute checksum for local file"), sha256sum(&remote.path).expect("unable to compute checksum for remote file"), ); if source == destination { debug!("{}: unchanged - checksums match", local.debase()); copy_mtime(&local.path, &remote.path).ok(); false } else { debug!("{}: changed - checksums differ", local.debase()); true } }) .collect() }
use log::{debug, error, info}; use crate::dms; use crate::library::get_dms_media_library; use crate::library::get_local_media_library; use crate::library::LibraryFile; use crate::library::LibrarySource; use crate::utils::crypto::sha256sum; use crate::utils::fs::copy_mtime; use rayon::prelude::*; use std::collections::{BTreeSet, HashSet}; use std::env; use std::fs; use std::path::Path; use std::process; pub fn synchronize() { if !dms::is_dms_present() { error!("No DMS device detected."); process::exit(1); } if !dms::is_dms_mounted() { error!("DMS device is present but not mounted."); process::exit(1); } synchronize_media_files(); }
fn copy_files(files: &Vec<&LibraryFile>, local: &BTreeSet<LibraryFile>, dms_dir: &Path) { for file in files { let (source, dest) = ( &local.get(file).unwrap().path, Path::join(&dms_dir, Path::new(&file.debase())), ); debug!( "Copying local file {} to DMS at {}...", source.display(), dest.display() ); let dest_dir = &dest .parent() .expect(format!("Unable to get parent directory for {}", dest.display()).as_str()); if !dest_dir.is_dir() { debug!("Creating parent directory {}", dest_dir.display()); fs::create_dir_all(&dest_dir).expect( format!("Unable to create parent directory for {}", dest.display()).as_str(), ); } fs::copy(&source, &dest) .expect(format!("Unable to copy file {} to DMS", source.display()).as_str()); copy_mtime(&source, &dest).expect( format!( "Unable to copy modification time from source to destination {}", dest.display() ) .as_str(), ); } } fn delete_files(files: &Vec<&LibraryFile>) { for file in files .iter() .filter(|f| f.source == LibrarySource::DMS) .map(|f| &f.path) { debug!("Deleting orphaned file from DMS {}", file.display()); fs::remove_file(file) .expect(format!("Unable to remove file from DMS: {}", file.display()).as_str()); } } pub fn added_files<'a>( local: &'a BTreeSet<LibraryFile>, dms: &'a BTreeSet<LibraryFile>, ) -> Vec<&'a LibraryFile> { local.difference(dms).collect() } pub fn deleted_files<'a>( local: &'a BTreeSet<LibraryFile>, dms: &'a BTreeSet<LibraryFile>, ) -> Vec<&'a LibraryFile> { dms.difference(local).collect() } pub fn changed_files<'a>( local: &'a BTreeSet<LibraryFile>, dms: &'a BTreeSet<LibraryFile>, ) -> Vec<&'a LibraryFile> { local .into_par_iter() .filter(|p| { if !dms.contains(p) { return false; } let (local, remote) = (p, dms.get(*p).unwrap()); let (lmeta, rmeta) = ( fs::metadata(&local.path).unwrap(), fs::metadata(&remote.path).unwrap(), ); let (llen, rlen) = (lmeta.len(), rmeta.len()); let (lmod, rmod) = (lmeta.modified().unwrap(), rmeta.modified().unwrap()); if llen != rlen { debug!("{}: changed - size not equal", local.debase()); return true; } let (first, last) = (lmod.min(rmod), lmod.max(rmod)); let diff = last.duration_since(first).unwrap(); if diff.as_secs() <= 3 { return false; } let (source, destination) = ( sha256sum(&local.path).expect("unable to compute checksum for local file"), sha256sum(&remote.path).expect("unable to compute checksum for remote file"), ); if source == destination { debug!("{}: unchanged - checksums match", local.debase()); copy_mtime(&local.path, &remote.path).ok(); false } else { debug!("{}: changed - checksums differ", local.debase()); true } }) .collect() }
pub fn synchronize_media_files() { info!("Synchronizing media files with DMS..."); let local_dir = Path::join( Path::new(&match env::var("HOME") { Ok(value) => value, Err(e) => { error!("Unable to detect home directory: {}", e); process::exit(1); } }), Path::new("Music"), ); let dms_dir = dms::get_dms_mount_point().expect("DMS not present or not mounted."); debug!("Music directory: {}", local_dir.display()); let (local, dms) = (get_local_media_library(&local_dir), get_dms_media_library()); let (added, deleted, changed) = ( added_files(&local, &dms), deleted_files(&local, &dms), changed_files(&local, &dms), ); info!("Copying {} new files to the DMS...", added.len()); copy_files(&added, &local, &dms_dir); info!("Copying {} changed files to the DMS...", changed.len()); copy_files(&changed, &local, &dms_dir); info!("Deleting {} orphaned files from the DMS...", deleted.len()); delete_files(&deleted); }
function_block-full_function
[ { "content": "pub fn is_dms_present() -> bool {\n\n get_dms_device().is_some()\n\n}\n\n\n", "file_path": "src/dms.rs", "rank": 2, "score": 86054.32286933367 }, { "content": "pub fn is_dms_mounted() -> bool {\n\n get_dms_mount_point().is_some()\n\n}\n\n\n", "file_path": "src/dms.rs", "rank": 3, "score": 85997.69139710971 }, { "content": "pub fn sha256sum(path: &Path) -> Result<Sha256Digest,io::Error> {\n\n let mut f = File::open(path)?;\n\n let mut buf: [u8; 4096] = [0; 4096];\n\n let mut digest = Sha256::new();\n\n\n\n while let Ok(bytes_read) = f.read(&mut buf) {\n\n if bytes_read == 0 {\n\n break;\n\n }\n\n\n\n digest.input(&buf[..bytes_read]);\n\n }\n\n\n\n let mut result: [u8; 32] = [0; 32];\n\n digest.result(&mut result);\n\n\n\n Ok(Sha256Digest(result))\n\n}\n", "file_path": "src/utils/crypto.rs", "rank": 4, "score": 79344.46939843395 }, { "content": "#[cfg(target_os = \"linux\")]\n\npub fn get_dms_device() -> Option<PathBuf> {\n\n fs::canonicalize(Path::new(\"/dev/disk/by-label/PHTDTA\")).ok()\n\n}\n\n\n", "file_path": "src/dms.rs", "rank": 5, "score": 76849.21606454186 }, { "content": "#[cfg(target_os = \"linux\")]\n\npub fn get_dms_mount_point() -> Option<PathBuf> {\n\n if !is_dms_present() {\n\n return None;\n\n }\n\n\n\n let device = get_dms_device()?;\n\n let f = fs::File::open(Path::new(\"/proc/mounts\")).ok()?;\n\n let buffer = io::BufReader::new(f);\n\n\n\n for line in buffer\n\n .lines()\n\n .filter_map(|s| s.ok())\n\n .collect::<Vec<String>>()\n\n {\n\n if line.starts_with(device.to_str()?) {\n\n return PROC_MOUNT_LINE\n\n .captures(&line)?\n\n .name(\"mount\")\n\n .map(|s| PathBuf::from(s.as_str()));\n\n }\n\n }\n\n\n\n None\n\n}\n", "file_path": "src/dms.rs", "rank": 6, "score": 74700.22074468076 }, { "content": "#[cfg(target_os = \"linux\")]\n\npub fn copy_mtime(source: &Path, dest: &Path) -> Result<(), ModTimeUpdateError> {\n\n let smeta = fs::metadata(source).unwrap();\n\n let s_modified = smeta.modified().unwrap();\n\n\n\n let dmeta = fs::metadata(dest).unwrap();\n\n let d_accessed = dmeta.accessed().unwrap();\n\n\n\n let accessed_duration = d_accessed.duration_since(UNIX_EPOCH).unwrap();\n\n let modified_duration = s_modified.duration_since(UNIX_EPOCH).unwrap();\n\n\n\n let rc = unsafe {\n\n let file = File::open(dest).unwrap();\n\n\n\n let accessed = timespec {\n\n tv_sec: accessed_duration.as_secs() as time_t,\n\n tv_nsec: accessed_duration.subsec_nanos() as libc::c_long,\n\n };\n\n\n\n let modified = timespec {\n\n tv_sec: modified_duration.as_secs() as time_t,\n", "file_path": "src/utils/fs.rs", "rank": 10, "score": 61072.552878916744 }, { "content": "/// Fetch a list of all media files in the local media library.\n\npub fn get_local_media_library<P>(base: P) -> HashSet<MediaFile>\n\nwhere\n\n P: Into<PathBuf>,\n\n{\n\n // mask base with a reference counter\n\n let base: Arc<PathBuf> = Arc::new(base.into());\n\n\n\n // find all files and directories within the directory excluding the directory itself\n\n let walker = WalkDir::new(base.clone().as_path())\n\n .min_depth(1)\n\n .into_iter();\n\n\n\n // find all media files and collect them as MediaFile instances\n\n walker\n\n .filter_map(|e| e.ok())\n\n .filter(|e| e.file_type().is_file() && is_media_file(e.path()))\n\n .map(|e| MediaFile::new(e.path(), base.clone().as_path()))\n\n .collect()\n\n}\n\n\n", "file_path": "src/fsync.rs", "rank": 11, "score": 60974.498053908086 }, { "content": "/// Fetch a list of all media files in the DMS media library\n\npub fn get_dms_media_library<P>(base: P) -> HashSet<MediaFile>\n\nwhere\n\n P: Into<PathBuf>,\n\n{\n\n // mask base with a reference counter\n\n let base: Arc<PathBuf> = Arc::new(base.into());\n\n\n\n // find all files and directories within the directory excluding the directory itself\n\n let walker = WalkDir::new(base.clone().as_path())\n\n .min_depth(1)\n\n .into_iter();\n\n\n\n // find all media files not existing in profiles and tts due to these being data directories\n\n walker\n\n .filter_entry(|e| is_allowed_dms_path(base.clone().as_path(), e))\n\n .filter_map(|e| e.ok())\n\n .filter(|e| e.file_type().is_file() && is_media_file(e.path()))\n\n .map(|e| MediaFile::new(e.path(), base.clone().as_path()))\n\n .collect()\n\n}\n\n\n", "file_path": "src/fsync.rs", "rank": 12, "score": 60974.498053908086 }, { "content": "pub fn is_media_file(path: &Path) -> bool {\n\n lazy_static! {\n\n static ref MEDIA_FILE_EXTENSION: Regex = Regex::new(r\"(?i)(mp3)$\").unwrap();\n\n }\n\n\n\n MEDIA_FILE_EXTENSION.is_match(\n\n // get the extension OsStr, convert to an Option<&str>, and unwrap or return empty string\n\n path.extension().and_then(|v| v.to_str()).unwrap_or(\"\"),\n\n )\n\n}\n\n\n", "file_path": "src/fsync.rs", "rank": 13, "score": 56177.64733918282 }, { "content": "pub fn is_media_filename(path: &Path) -> bool {\n\n MEDIA_FILE_EXTENSION.is_match(\n\n // get the extension OsStr, convert to an Option<&str>, and unwrap or return empty string\n\n path.extension().and_then(|v| v.to_str()).unwrap_or(\"\")\n\n )\n\n}\n\n\n", "file_path": "src/utils/media.rs", "rank": 14, "score": 54728.234061037816 }, { "content": "pub fn get_dms_media_library() -> BTreeSet<LibraryFile> {\n\n match dms::get_dms_mount_point() {\n\n Some(base) => utils::media::get_media_library(&base)\n\n .iter()\n\n .map(|p| LibraryFile::new(p, &base, LibrarySource::DMS))\n\n .collect(),\n\n None => BTreeSet::new(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub mod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_media_file_identity() {\n\n let base = Path::new(\"Music\");\n\n\n\n // test bounce to lowercase\n\n assert_eq!(\n", "file_path": "src/library.rs", "rank": 15, "score": 52509.873807669595 }, { "content": "fn get_genre_flac(path: &Path) -> Result<String, metaflac::Error> {\n\n let tag = metaflac::Tag::read_from_path(path)?;\n\n\n\n for tag_name in &[\"GENRE\", \"STYLE\"] {\n\n // FIXME this is garbage horse trash\n\n if let Some(entities) = tag.get_vorbis(tag_name) {\n\n for entity in entities {\n\n if entity.len() > 0 {\n\n return Ok(entity.to_string());\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok(DEFAULT_GENRE.to_string())\n\n}\n\n\n", "file_path": "src/metadata.rs", "rank": 16, "score": 51429.98417486888 }, { "content": "fn get_artist_flac(path: &Path) -> Result<String, metaflac::Error> {\n\n let tag = metaflac::Tag::read_from_path(path)?;\n\n\n\n for tag_name in &[\"ALBUMARTIST\", \"ARTIST\", \"COMPOSER\"] {\n\n // FIXME this is garbage horse trash\n\n if let Some(entities) = tag.get_vorbis(tag_name) {\n\n for entity in entities {\n\n if entity.len() > 0 {\n\n return Ok(entity.to_string());\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok(DEFAULT_ARTIST.to_string())\n\n}\n\n\n", "file_path": "src/metadata.rs", "rank": 17, "score": 51429.98417486888 }, { "content": "fn get_album_flac(path: &Path) -> Result<String, metaflac::Error> {\n\n let tag = metaflac::Tag::read_from_path(path)?;\n\n let metadata = tag.vorbis_comments().unwrap();\n\n\n\n // find the first non-empty tag, or return DEFAULT_ALBUM\n\n Ok(metadata\n\n .album()\n\n .iter()\n\n .flat_map(|&v| v)\n\n .map(|s| s.to_string())\n\n .filter(|f| f.len() > 0)\n\n .nth(0)\n\n .unwrap_or(DEFAULT_ALBUM.to_string()))\n\n}\n\n\n", "file_path": "src/metadata.rs", "rank": 18, "score": 51429.98417486888 }, { "content": "pub fn get_media_library(base: &Path) -> Vec<PathBuf> {\n\n let walker = WalkDir::new(base).min_depth(1).into_iter();\n\n // find all media files and collect them\n\n walker.filter_map(|e| e.ok())\n\n .filter(|e| e.file_type().is_file())\n\n .filter(|e| is_media_filename(e.path()))\n\n .map(|e| e.path().to_owned())\n\n .collect()\n\n}\n\n\n", "file_path": "src/utils/media.rs", "rank": 19, "score": 49663.553883619505 }, { "content": "pub fn is_allowed_dms_path(base: &Path, entry: &DirEntry) -> bool {\n\n // get the lowercase intersection of the entry and the base\n\n let intersection = entry\n\n .path()\n\n .strip_prefix(base)\n\n .unwrap()\n\n .to_string_lossy()\n\n .chars()\n\n .flat_map(|c| c.case_fold())\n\n .collect::<String>();\n\n\n\n match intersection.as_str() {\n\n \"profiles\" => false,\n\n \"tts\" => false,\n\n _ => true,\n\n }\n\n}\n\n\n", "file_path": "src/fsync.rs", "rank": 20, "score": 48526.758241365984 }, { "content": "pub fn get_local_media_library(base: &Path) -> BTreeSet<LibraryFile> {\n\n utils::media::get_media_library(base)\n\n .iter()\n\n .map(|p| LibraryFile::new(p, base, LibrarySource::Local))\n\n .collect()\n\n}\n\n\n", "file_path": "src/library.rs", "rank": 21, "score": 47563.72839343625 }, { "content": "#[test]\n\nfn test_is_media_file() {\n\n // test false cases\n\n assert!(!is_media_file(Path::new(\"/home/naftuli/Music/Directory\")));\n\n assert!(!is_media_file(Path::new(\n\n \"/home/naftuli/Music/Directory/Folder.jpg\"\n\n )));\n\n\n\n // test true cases\n\n assert!(is_media_file(Path::new(\n\n \"/home/naftuli/Music/01 - Track.mp3\"\n\n )));\n\n assert!(is_media_file(Path::new(\n\n \"/home/naftuli/Music/01 - Track.MP3\"\n\n )));\n\n}\n\n\n", "file_path": "src/fsync.rs", "rank": 22, "score": 23953.84925335766 }, { "content": "#[test]\n\nfn test_string_pool() {\n\n let pool = StringPool::new();\n\n let new = pool.get(\"brand new\");\n\n let old = pool.get(\"brand new\");\n\n\n\n assert!(Arc::ptr_eq(&new, &old));\n\n}\n", "file_path": "src/utils/test.rs", "rank": 23, "score": 23099.17378668335 }, { "content": "#[test]\n\nfn test_is_media_filename() {\n\n // test false cases\n\n assert!(!is_media_filename(Path::new(\"/home/naftuli/Music/Directory\")));\n\n assert!(!is_media_filename(Path::new(\"/home/naftuli/Music/Directory/Folder.jpg\")));\n\n\n\n // test true cases\n\n assert!(is_media_filename(Path::new(\"/home/naftuli/Music/01 - Track.mp3\")));\n\n assert!(is_media_filename(Path::new(\"/home/naftuli/Music/01 - Track.MP3\")));\n\n}\n", "file_path": "src/utils/media.rs", "rank": 24, "score": 23099.17378668335 }, { "content": "#[test]\n\nfn test_media_file_identity() {\n\n let base = Path::new(\"Music\");\n\n\n\n // test bounce to lowercase\n\n assert_eq!(\n\n \"andrew w. k./i get wet/02 - party hard.mp3\",\n\n MediaFile::new(\"Music/Andrew W. K./I Get Wet/02 - Party Hard.mp3\", base).id()\n\n );\n\n // test supported ascii characters\n\n assert_eq!(\n\n \"mêlée/everyday behavior/01 - got it all.mp3\",\n\n MediaFile::new(\"Music/Mêlée/Everyday Behavior/01 - Got It All.mp3\", base).id()\n\n );\n\n // test strip colons\n\n assert_eq!(\n\n \"apocalyptica/begin again/01 - track thing.mp3\",\n\n MediaFile::new(\n\n \"Music/Apocalyptica/Begin: Again/01 - Track: Thing.mp3\",\n\n base\n\n )\n\n .id()\n\n );\n\n // test strip tabs\n\n assert_eq!(\n\n \"the end/something.mp3\",\n\n MediaFile::new(\"Music/The\\tEnd/Something.mp3\", base).id()\n\n );\n\n}\n", "file_path": "src/fsync.rs", "rank": 25, "score": 23099.17378668335 }, { "content": "#[test]\n\nfn test_get_genre_flac() {\n\n // should return default genre on blank flac\n\n assert_eq!(\n\n DEFAULT_GENRE,\n\n get_genre_flac(Path::new(\"test/fixtures/flac/blank.flac\")).unwrap()\n\n );\n\n assert_eq!(\n\n \"Genre\",\n\n get_genre_flac(Path::new(\"test/fixtures/flac/genre.flac\")).unwrap()\n\n );\n\n assert_eq!(\n\n \"Style 1\",\n\n get_genre_flac(Path::new(\"test/fixtures/flac/style.flac\")).unwrap()\n\n );\n\n}\n\n\n", "file_path": "src/metadata/test.rs", "rank": 26, "score": 22315.251017901388 }, { "content": "#[test]\n\nfn test_id3_talb() {\n\n let tags = ID3Tag::read_from_path(\"test/fixtures/id3/talb.mp3\").unwrap();\n\n assert_eq!(Some(\"The Album\"), tags.get(\"TALB\").unwrap().content.text());\n\n}\n\n\n", "file_path": "src/metadata/test/fixtures.rs", "rank": 27, "score": 22315.251017901388 }, { "content": "#[test]\n\nfn test_flac_composer() {\n\n let tag = FLACTag::read_from_path(\"test/fixtures/flac/composer.flac\").unwrap();\n\n let metadata = tag.vorbis_comments().unwrap();\n\n assert!(metadata.album_artist().is_none());\n\n assert!(metadata.artist().is_none());\n\n assert_eq!(\"Composer\", tag.get_vorbis(\"COMPOSER\").iter().flat_map(|&v| v).nth(0).unwrap());\n\n}\n\n\n", "file_path": "src/metadata/test/fixtures.rs", "rank": 28, "score": 22315.251017901388 }, { "content": "#[test]\n\nfn test_flac_albumartist() {\n\n let tag = FLACTag::read_from_path(\"test/fixtures/flac/albumartist.flac\").unwrap();\n\n let metadata = tag.vorbis_comments().unwrap();\n\n assert_eq!(\"Album Artist\", metadata.album_artist().unwrap()[0]);\n\n assert_eq!(\"Artist\", metadata.artist().unwrap()[0]);\n\n // metaflac has a case-sensitivity bug as of 0.1.5: https://github.com/jameshurst/rust-metaflac/issues/2\n\n assert_eq!(\"Composer\", tag.get_vorbis(\"COMPOSER\").iter().flat_map(|&v| v).nth(0).unwrap());\n\n}\n\n\n", "file_path": "src/metadata/test/fixtures.rs", "rank": 29, "score": 22315.251017901388 }, { "content": "#[test]\n\nfn test_flac_artist() {\n\n let tag = FLACTag::read_from_path(\"test/fixtures/flac/artist.flac\").unwrap();\n\n let metadata = tag.vorbis_comments().unwrap();\n\n assert!(metadata.album_artist().is_none());\n\n assert_eq!(\"Artist\", metadata.artist().unwrap()[0]);\n\n assert_eq!(\"Composer\", tag.get_vorbis(\"COMPOSER\").iter().flat_map(|&v| v).nth(0).unwrap());\n\n}\n\n\n", "file_path": "src/metadata/test/fixtures.rs", "rank": 30, "score": 22315.251017901388 }, { "content": "#[test]\n\nfn test_id3_blank() {\n\n // blank.mp3 should have absolutely no tags present\n\n assert_eq!(0, ID3Tag::read_from_path(\"test/fixtures/id3/blank.mp3\").unwrap().frames().len());\n\n}\n\n\n", "file_path": "src/metadata/test/fixtures.rs", "rank": 31, "score": 22315.251017901388 }, { "content": "#[test]\n\nfn test_id3_tope() {\n\n let tags = ID3Tag::read_from_path(\"test/fixtures/id3/tope.mp3\").unwrap();\n\n assert!(tags.get(\"TPE1\").is_none());\n\n assert!(tags.get(\"TPE2\").is_none());\n\n assert_eq!(Some(\"Tope Rope\"), tags.get(\"TOPE\").unwrap().content.text());\n\n}\n\n\n", "file_path": "src/metadata/test/fixtures.rs", "rank": 32, "score": 22315.251017901388 }, { "content": "#[test]\n\n#[ignore]\n\nfn test_get_duration_mp3() {\n\n panic!(\"not implemented\")\n\n}\n", "file_path": "src/metadata/test.rs", "rank": 33, "score": 22315.251017901388 }, { "content": "#[test]\n\nfn test_flac_album() {\n\n let tag = FLACTag::read_from_path(\"test/fixtures/flac/album.flac\").unwrap();\n\n let metadata = tag.vorbis_comments().unwrap();\n\n assert_eq!(\"Album\", metadata.album().unwrap()[0]);\n\n}\n\n\n", "file_path": "src/metadata/test/fixtures.rs", "rank": 34, "score": 22315.251017901388 }, { "content": "#[test]\n\nfn test_get_artist_id3() {\n\n // test that the file without any tags returns DEFAULT_ARTIST\n\n // FIXME why did this ever work?\n\n // assert_eq!(\n\n // DEFAULT_ARTIST,\n\n // get_artist_id3(\n\n // &id3::Tag::read_from_path(Path::new(\"test/fixtures/id3/blank.mp3\")).unwrap()\n\n // )\n\n // );\n\n // test that it finds the value in TPE1\n\n assert_eq!(\n\n \"Tee-Pee 1\",\n\n get_artist_id3(&id3::Tag::read_from_path(Path::new(\"test/fixtures/id3/tpe1.mp3\")).unwrap())\n\n );\n\n // test that it finds the value in TPE2\n\n assert_eq!(\n\n \"Tee-Pee 2\",\n\n get_artist_id3(&id3::Tag::read_from_path(Path::new(\"test/fixtures/id3/tpe2.mp3\")).unwrap())\n\n );\n\n // test that it finds the value in TOPE\n\n\n\n assert_eq!(\n\n \"Tope Rope\",\n\n get_artist_id3(&id3::Tag::read_from_path(Path::new(\"test/fixtures/id3/tope.mp3\")).unwrap())\n\n );\n\n}\n\n\n", "file_path": "src/metadata/test.rs", "rank": 35, "score": 22315.251017901388 }, { "content": "#[test]\n\nfn test_get_genre_id3() {\n\n // test that the file without any tags returns DEFAULT_GENRE\n\n assert_eq!(\n\n DEFAULT_GENRE,\n\n get_genre_id3(&id3::Tag::read_from_path(Path::new(\"test/fixtures/id3/blank.mp3\")).unwrap())\n\n );\n\n // test that it finds the value in TCON\n\n assert_eq!(\n\n \"Metalstep\",\n\n get_genre_id3(&id3::Tag::read_from_path(Path::new(\"test/fixtures/id3/tcon.mp3\")).unwrap())\n\n );\n\n}\n\n\n", "file_path": "src/metadata/test.rs", "rank": 36, "score": 22315.251017901388 }, { "content": "#[test]\n\nfn test_id3_tcon() {\n\n let tags = ID3Tag::read_from_path(\"test/fixtures/id3/tcon.mp3\").unwrap();\n\n assert_eq!(Some(\"Metalstep\"), tags.get(\"TCON\").unwrap().content.text());\n\n}\n", "file_path": "src/metadata/test/fixtures.rs", "rank": 37, "score": 22315.251017901388 }, { "content": "#[test]\n\nfn test_get_album_id3() {\n\n // test that the file without any tags returns DEFAULT_ALBUM\n\n assert_eq!(\n\n DEFAULT_ALBUM,\n\n get_album_id3(&id3::Tag::read_from_path(Path::new(\"test/fixtures/id3/blank.mp3\")).unwrap())\n\n );\n\n // test that it finds the value in TALB\n\n assert_eq!(\n\n \"The Album\",\n\n get_album_id3(&id3::Tag::read_from_path(Path::new(\"test/fixtures/id3/talb.mp3\")).unwrap())\n\n );\n\n}\n\n\n", "file_path": "src/metadata/test.rs", "rank": 38, "score": 22315.251017901388 }, { "content": "#[test]\n\nfn test_id3_tpe2() {\n\n let tags = ID3Tag::read_from_path(\"test/fixtures/id3/tpe2.mp3\").unwrap();\n\n assert_eq!(Some(\"Tee-Pee 1\"), tags.get(\"TPE1\").unwrap().content.text());\n\n assert_eq!(Some(\"Tee-Pee 2\"), tags.get(\"TPE2\").unwrap().content.text());\n\n assert_eq!(Some(\"Tope Rope\"), tags.get(\"TOPE\").unwrap().content.text());\n\n}\n\n\n", "file_path": "src/metadata/test/fixtures.rs", "rank": 39, "score": 22315.251017901388 }, { "content": "#[test]\n\nfn test_flac_style() {\n\n let tag = FLACTag::read_from_path(\"test/fixtures/flac/style.flac\").unwrap();\n\n let metadata = tag.vorbis_comments().unwrap();\n\n assert!(metadata.genre().is_none());\n\n assert_eq!(\"Style 1\", tag.get_vorbis(\"STYLE\").iter().flat_map(|&v| v).nth(0).unwrap());\n\n assert_eq!(\"Style 2\", tag.get_vorbis(\"STYLE\").iter().flat_map(|&v| v).nth(1).unwrap());\n\n}\n\n\n", "file_path": "src/metadata/test/fixtures.rs", "rank": 40, "score": 22315.251017901388 }, { "content": "#[test]\n\nfn test_flac_genre() {\n\n let tag = FLACTag::read_from_path(\"test/fixtures/flac/genre.flac\").unwrap();\n\n let metadata = tag.vorbis_comments().unwrap();\n\n assert_eq!(\"Genre\", metadata.genre().unwrap()[0]);\n\n assert_eq!(\"Style 1\", tag.get_vorbis(\"STYLE\").iter().flat_map(|&v| v).nth(0).unwrap());\n\n assert_eq!(\"Style 2\", tag.get_vorbis(\"STYLE\").iter().flat_map(|&v| v).nth(1).unwrap());\n\n}\n\n\n", "file_path": "src/metadata/test/fixtures.rs", "rank": 41, "score": 22315.251017901388 }, { "content": "#[test]\n\nfn test_flac_blank() {\n\n // blank.flac should have no tags\n\n assert_eq!(0, FLACTag::read_from_path(\"test/fixtures/flac/blank.flac\").unwrap()\n\n .vorbis_comments().unwrap().comments.len());\n\n}\n\n\n", "file_path": "src/metadata/test/fixtures.rs", "rank": 42, "score": 22315.251017901388 }, { "content": "#[test]\n\nfn test_get_album_flac() {\n\n // should return default album on blank flac\n\n assert_eq!(\n\n DEFAULT_ALBUM,\n\n get_album_flac(Path::new(\"test/fixtures/flac/blank.flac\")).unwrap()\n\n );\n\n assert_eq!(\n\n \"Album\",\n\n get_album_flac(Path::new(\"test/fixtures/flac/album.flac\")).unwrap()\n\n );\n\n}\n\n\n", "file_path": "src/metadata/test.rs", "rank": 43, "score": 22315.251017901388 }, { "content": "#[test]\n\nfn test_get_artist_flac() {\n\n // should return default artist on blank flac\n\n assert_eq!(\n\n DEFAULT_ARTIST,\n\n get_artist_flac(Path::new(\"test/fixtures/flac/blank.flac\")).unwrap()\n\n );\n\n assert_eq!(\n\n \"Album Artist\",\n\n get_artist_flac(Path::new(\"test/fixtures/flac/albumartist.flac\")).unwrap()\n\n );\n\n assert_eq!(\n\n \"Artist\",\n\n get_artist_flac(Path::new(\"test/fixtures/flac/artist.flac\")).unwrap()\n\n );\n\n assert_eq!(\n\n \"Composer\",\n\n get_artist_flac(Path::new(\"test/fixtures/flac/composer.flac\")).unwrap()\n\n );\n\n}\n\n\n", "file_path": "src/metadata/test.rs", "rank": 44, "score": 22315.251017901388 }, { "content": "#[test]\n\nfn test_id3_tpe1() {\n\n let tags = ID3Tag::read_from_path(\"test/fixtures/id3/tpe1.mp3\").unwrap();\n\n assert!(tags.get(\"TPE2\").is_none());\n\n assert_eq!(Some(\"Tee-Pee 1\"), tags.get(\"TPE1\").unwrap().content.text());\n\n assert_eq!(Some(\"Tope Rope\"), tags.get(\"TOPE\").unwrap().content.text());\n\n}\n\n\n", "file_path": "src/metadata/test/fixtures.rs", "rank": 45, "score": 22315.251017901388 }, { "content": "#[test]\n\n#[should_panic(expected = \"does not contain an id3 tag\")]\n\nfn test_get_genre_id3_empty() {\n\n // this file has no id3 header; should fail\n\n get_genre_id3(&id3::Tag::read_from_path(Path::new(\"test/fixtures/id3/no-id3.mp3\")).unwrap());\n\n}\n\n\n", "file_path": "src/metadata/test.rs", "rank": 46, "score": 21593.64445347647 }, { "content": "#[test]\n\n#[ignore]\n\nfn test_get_track_number_id3() {\n\n panic!(\"not implemented\")\n\n}\n\n\n", "file_path": "src/metadata/test.rs", "rank": 47, "score": 21593.64445347647 }, { "content": "#[test]\n\n#[should_panic(expected = \"does not contain an id3 tag\")]\n\nfn test_get_album_id3_empty() {\n\n // this file has no id3 header; should fail\n\n get_album_id3(&id3::Tag::read_from_path(Path::new(\"test/fixtures/id3/no-id3.mp3\")).unwrap());\n\n}\n\n\n", "file_path": "src/metadata/test.rs", "rank": 48, "score": 21593.64445347647 }, { "content": "#[test]\n\n#[should_panic(expected = \"does not contain an id3 tag\")]\n\nfn test_get_artist_id3_empty() {\n\n // this file has no id3 header; should fail\n\n get_artist_id3(&id3::Tag::read_from_path(Path::new(\"test/fixtures/id3/no-id3.mp3\")).unwrap());\n\n}\n\n\n", "file_path": "src/metadata/test.rs", "rank": 49, "score": 21593.64445347647 }, { "content": "#[test]\n\n#[ignore]\n\nfn test_get_track_number_id3_empty() {\n\n panic!(\"not implemented\")\n\n}\n\n\n", "file_path": "src/metadata/test.rs", "rank": 50, "score": 20927.207607463788 }, { "content": "fn get_duration_mp3(path: &Path) -> Duration {\n\n // some of my tracks panic when trying to get the duration, so failover to 0\n\n mp3_duration::from_path(path).unwrap_or(Duration::new(0, 0))\n\n}\n", "file_path": "src/metadata.rs", "rank": 52, "score": 20545.725588268477 }, { "content": "fn get_genre_id3(tag: &id3::Tag) -> String {\n\n tag.genre().unwrap_or(DEFAULT_GENRE).to_string()\n\n}\n\n\n", "file_path": "src/metadata.rs", "rank": 53, "score": 19638.216793682386 }, { "content": "fn get_album_id3(tag: &id3::Tag) -> String {\n\n tag.album().unwrap_or(DEFAULT_ALBUM).to_string()\n\n}\n\n\n", "file_path": "src/metadata.rs", "rank": 54, "score": 19638.216793682386 }, { "content": "fn get_title_id3(tag: &id3::Tag) -> String {\n\n tag.title().unwrap_or(DEFAULT_TITLE).to_string()\n\n}\n\n\n", "file_path": "src/metadata.rs", "rank": 55, "score": 19638.216793682386 }, { "content": "fn get_artist_id3(tag: &id3::Tag) -> String {\n\n // leeched from here: id3.org/id3v2.4.0-frames\n\n /*\n\n TPE2: Used by players as \"album artist,\" the artist who created the album of this track.\n\n TPE1: Used by players as \"artist,\" the artist who performed the track on the given album\n\n TOPE: Original artist.\n\n */\n\n // FIXME yikes, could be better\n\n tag.album_artist()\n\n .unwrap_or(\n\n tag.artist().unwrap_or(\n\n tag.get(\"TOPE\")\n\n .map(|frame| frame.content().text().unwrap_or(DEFAULT_ARTIST))\n\n .unwrap_or(DEFAULT_ARTIST),\n\n ),\n\n )\n\n .to_string()\n\n}\n\n\n", "file_path": "src/metadata.rs", "rank": 56, "score": 19638.216793682386 }, { "content": "fn get_track_number_id3(tag: &id3::Tag) -> u16 {\n\n // find the first non-empty tag or return DEFAULT_TRACK_NUMBER\n\n let track = tag\n\n .track()\n\n .map(|i| format!(\"{}\", i))\n\n .unwrap_or(DEFAULT_TRACK_NUMBER.to_string());\n\n\n\n // the above value is a string of the format (\\d+)(?:/(\\d+))?, deconstruct and parse into an int\n\n TRACK_NUMBER\n\n .captures(&track)\n\n .unwrap()\n\n .name(\"track\")\n\n .map(|s| u16::from_str(s.as_str()).unwrap())\n\n .unwrap()\n\n}\n\n\n", "file_path": "src/metadata.rs", "rank": 57, "score": 19020.85570050716 }, { "content": "use libc::futimens;\n\nuse libc::time_t;\n\nuse libc::timespec;\n\n\n\nuse std::error::Error;\n\nuse std::fmt;\n\nuse std::fs;\n\nuse std::fs::File;\n\nuse std::os::unix::io::AsRawFd;\n\nuse std::path::Path;\n\nuse std::time::UNIX_EPOCH;\n\n\n\n#[derive(Debug)]\n\npub struct ModTimeUpdateError {\n\n rc: isize,\n\n}\n\n\n\nimpl Error for ModTimeUpdateError {\n\n fn description(&self) -> &str {\n\n \"Unable to call futimens without error\"\n", "file_path": "src/utils/fs.rs", "rank": 59, "score": 6.770658777227415 }, { "content": "use lazy_static;\n\n\n\nuse regex::Regex;\n\n\n\nuse std::fs;\n\nuse std::io;\n\nuse std::io::BufRead;\n\nuse std::path::{Path, PathBuf};\n\n\n\nlazy_static! {\n\n static ref PROC_MOUNT_LINE: Regex =\n\n Regex::new(r\"(?i)^(?P<device>[^\\s]+)\\s+(?P<mount>[^\\s]+)\").unwrap();\n\n}\n\n\n", "file_path": "src/dms.rs", "rank": 60, "score": 6.459259086581163 }, { "content": "#[cfg(test)]\n\nmod test;\n\n\n\nuse id3;\n\n\n\nuse log::debug;\n\n\n\nuse regex::Regex;\n\n\n\nuse std::cmp::Ordering;\n\nuse std::convert::From;\n\nuse std::error::Error;\n\nuse std::fmt;\n\nuse std::iter::Iterator;\n\nuse std::path::Path;\n\nuse std::path::PathBuf;\n\nuse std::str::FromStr;\n\nuse std::string::String;\n\nuse std::sync::Arc;\n\nuse std::time::Duration;\n", "file_path": "src/metadata.rs", "rank": 62, "score": 5.071488920799352 }, { "content": "#[macro_use]\n\nextern crate lazy_static;\n\n\n\npub mod data;\n\npub mod dms;\n\npub mod fsync;\n\npub mod library;\n\npub mod metadata;\n\npub mod sync;\n\npub mod utils;\n", "file_path": "src/lib.rs", "rank": 63, "score": 4.905195374692856 }, { "content": "use log::debug;\n\n\n\nuse std::collections::HashSet;\n\nuse std::sync::Arc;\n\nuse std::sync::RwLock;\n\n\n\npub struct StringPool {\n\n map: RwLock<HashSet<Arc<str>>>,\n\n}\n\n\n\nimpl StringPool {\n\n pub fn new() -> Self {\n\n StringPool {\n\n map: RwLock::new(HashSet::new()),\n\n }\n\n }\n\n\n\n pub fn get(&self, value: &str) -> Arc<str> {\n\n {\n\n debug!(\"Acquiring read lock...\");\n", "file_path": "src/utils/stringpool.rs", "rank": 64, "score": 4.491962304499614 }, { "content": "#[cfg(test)]\n\nmod test;\n\n\n\npub mod crypto;\n\npub mod fs;\n\npub mod media;\n\npub mod stringpool;\n\n\n\n// export\n\npub use crate::utils::stringpool::StringPool;\n\n\n\npub static FAT32_DELETE_CHARS: &'static [char] = &['\\t'];\n\npub static FAT32_HYPHENIZE_CHARS: &'static [char] = &[':'];\n", "file_path": "src/utils.rs", "rank": 65, "score": 4.474331675330182 }, { "content": "use regex::Regex;\n\n\n\nuse std::cmp::{Eq, PartialEq};\n\nuse std::collections::HashSet;\n\nuse std::hash::{Hash, Hasher};\n\nuse std::path::{Path, PathBuf};\n\nuse std::string::String;\n\nuse std::sync::Arc;\n\n\n\nuse unicode_casefold::UnicodeCaseFold;\n\n\n\nuse walkdir::{DirEntry, WalkDir};\n\n\n\npub struct MediaFile {\n\n pub path: PathBuf,\n\n base: Arc<PathBuf>,\n\n}\n\n\n\nstatic SPACE_CHARS: &'static [char] = &['\\t'];\n\nstatic DELETE_CHARS: &'static [char] = &[':'];\n", "file_path": "src/fsync.rs", "rank": 66, "score": 4.421163136123122 }, { "content": "\n\n#[derive(Debug)]\n\npub enum MediaParsingError {\n\n FLACError { err: metaflac::Error },\n\n ID3Error { err: id3::Error },\n\n UnrecognizedFormat,\n\n}\n\n\n\nimpl Error for MediaParsingError {\n\n fn description(&self) -> &str {\n\n \"Unable to load metadata from media file.\"\n\n }\n\n}\n\n\n\nimpl fmt::Display for MediaParsingError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n &MediaParsingError::FLACError { ref err } => {\n\n write!(f, \"Unable to load metadata from media file: {}\", err)\n\n }\n", "file_path": "src/metadata.rs", "rank": 67, "score": 4.23472843505755 }, { "content": "use crate::dms;\n\nuse crate::utils;\n\n\n\nuse std::cmp::Eq;\n\nuse std::cmp::Ord;\n\nuse std::cmp::Ordering;\n\nuse std::cmp::PartialEq;\n\nuse std::cmp::PartialOrd;\n\nuse std::collections::BTreeSet;\n\nuse std::hash::Hash;\n\nuse std::hash::Hasher;\n\nuse std::path::Path;\n\nuse std::path::PathBuf;\n\n\n\nuse unicode_casefold::UnicodeCaseFold;\n\n\n\n#[derive(Clone, Copy, Eq, Debug, PartialEq)]\n\npub enum LibrarySource {\n\n Local,\n\n DMS,\n", "file_path": "src/library.rs", "rank": 68, "score": 4.070351783612818 }, { "content": "use std::fmt;\n\nuse std::fs::File;\n\nuse std::io;\n\nuse std::io::Read;\n\nuse std::path::Path;\n\n\n\nuse crypto::digest::Digest;\n\nuse crypto::sha2::Sha256;\n\n\n\n#[derive(Eq,PartialEq)]\n\npub struct Sha256Digest([u8; 32]);\n\n\n\nimpl fmt::Display for Sha256Digest {\n\n\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let mut result = String::new();\n\n for b in self.0.iter() {\n\n result += &format!(\"{:02x}\", b);\n\n }\n\n write!(f, \"{}\", result)\n\n }\n\n}\n\n\n", "file_path": "src/utils/crypto.rs", "rank": 69, "score": 3.9189400137883426 }, { "content": " pub path: PathBuf,\n\n pub base: PathBuf,\n\n pub artist: Arc<str>,\n\n pub album: Arc<str>,\n\n pub genre: Arc<str>,\n\n pub title: String,\n\n pub track_number: u16,\n\n pub duration: u64,\n\n}\n\n\n\nimpl MediaMetadata {\n\n pub fn load(path: &Path, base: &Path, pool: &StringPool) -> Result<Self, MediaParsingError> {\n\n debug!(\"Loading metadata from file {}...\", path.display());\n\n match path.extension() {\n\n Some(extension) if extension == \"mp3\" => {\n\n let tag = id3::Tag::read_from_path(path)?;\n\n\n\n Ok(MediaMetadata {\n\n path: path.to_path_buf(),\n\n base: base.to_path_buf(),\n", "file_path": "src/metadata.rs", "rank": 71, "score": 3.676579087631722 }, { "content": "use std::fs::File;\n\nuse std::io::BufWriter;\n\nuse std::io::Write;\n\nuse std::path::Path;\n\n\n\n/// The database index into the tracks CSV file, to be rendered to disk as `tracks.idx`.\n\npub struct TracksDbIndex {\n\n pub track_offsets: Vec<u32>,\n\n}\n\n\n\nimpl TracksDbIndex {\n\n /// Dump this database index to the given file.\n\n pub fn write<P: AsRef<Path>>(&self, dest: P) {\n\n let mut f = BufWriter::new(File::create(dest).expect(\"unable to create tracks.idx\"));\n\n\n\n // first, write the count as a u32 at the beginning of the file\n\n let count = self.track_offsets.len() as u32;\n\n\n\n f.write(&count.to_le_bytes())\n\n .expect(\"unable to write count\");\n", "file_path": "src/data.rs", "rank": 72, "score": 3.6373514908156985 }, { "content": " &MediaParsingError::ID3Error { ref err } => {\n\n write!(f, \"Unable to load metadata from media file: {}\", err)\n\n }\n\n &MediaParsingError::UnrecognizedFormat => write!(\n\n f,\n\n \"Unable to load metadata from media file, unrecognized format.\"\n\n ),\n\n }\n\n }\n\n}\n\n\n\nimpl From<id3::Error> for MediaParsingError {\n\n fn from(e: id3::Error) -> Self {\n\n MediaParsingError::ID3Error { err: e }\n\n }\n\n}\n\n\n", "file_path": "src/metadata.rs", "rank": 73, "score": 3.2767509239154378 }, { "content": "use std::path::Path;\n\n\n\nuse super::*;\n\n\n\nuse std::mem::size_of;\n\n\n\n#[test]\n", "file_path": "src/metadata/test.rs", "rank": 74, "score": 3.214342747152943 }, { "content": "use super::*;\n\n\n\nuse std::sync::Arc;\n\n\n\n#[test]\n", "file_path": "src/utils/test.rs", "rank": 75, "score": 3.113404936555172 }, { "content": "use id3::Tag as ID3Tag;\n\n\n\nuse metaflac::Tag as FLACTag;\n\n\n\n#[test]\n", "file_path": "src/metadata/test/fixtures.rs", "rank": 76, "score": 2.894173867710612 }, { "content": "use std::path::Path;\n\nuse std::path::PathBuf;\n\nuse regex::Regex;\n\n\n\nuse walkdir::WalkDir;\n\n\n\nlazy_static! {\n\n static ref MEDIA_FILE_EXTENSION: Regex = Regex::new(r\"(?i)(mp3)$\").unwrap();\n\n}\n\n\n", "file_path": "src/utils/media.rs", "rank": 77, "score": 2.8935290940879232 }, { "content": "}\n\n\n\n#[derive(Debug)]\n\npub struct LibraryFile {\n\n pub id: String,\n\n pub path: PathBuf,\n\n pub base: PathBuf,\n\n pub source: LibrarySource,\n\n}\n\n\n\nimpl LibraryFile {\n\n pub fn new(path: &Path, base: &Path, source: LibrarySource) -> Self {\n\n LibraryFile {\n\n id: LibraryFile::gen_id(&path, &base),\n\n path: path.to_path_buf(),\n\n base: base.to_path_buf(),\n\n source: source,\n\n }\n\n }\n\n\n", "file_path": "src/library.rs", "rank": 78, "score": 2.789976606688936 }, { "content": "\n\nuse metaflac;\n\n\n\nuse mp3_duration;\n\n\n\nuse crate::utils::StringPool;\n\n\n\nstatic DEFAULT_ARTIST: &'static str = \"Unknown Artist\";\n\nstatic DEFAULT_ALBUM: &'static str = \"Unknown Album\";\n\nstatic DEFAULT_GENRE: &'static str = \"Unknown Genre\";\n\nstatic DEFAULT_TITLE: &'static str = \"Unknown Title\";\n\nstatic DEFAULT_TRACK_NUMBER: &'static str = \"0\";\n\n\n\nlazy_static! {\n\n static ref TRACK_NUMBER: Regex =\n\n Regex::new(r\"(?i)^(?P<track>\\d+)(?:/(?P<total>\\d+))?$\").unwrap();\n\n static ref PRONOUN_START: Regex = Regex::new(r\"(?i)^(?:a|an|the)\\s+\").unwrap();\n\n}\n\n\n\npub struct MediaMetadata {\n", "file_path": "src/metadata.rs", "rank": 79, "score": 2.7001691424849708 }, { "content": "\n\n for offset in &self.track_offsets {\n\n // write each offset in order into the file\n\n f.write(&offset.to_le_bytes())\n\n .expect(\"unable to write offset\");\n\n }\n\n\n\n // critical to flush always\n\n f.flush().expect(\"unable to flush data to disk\");\n\n }\n\n}\n\n\n\npub struct ArtistsDbIndex {\n\n pub id: u32,\n\n pub track_offsets: Vec<u32>,\n\n}\n", "file_path": "src/data.rs", "rank": 80, "score": 2.4337650588463715 }, { "content": " let reader = &self.map.read().unwrap();\n\n\n\n debug!(\"Checking for presence in collection...\");\n\n if reader.contains(value) {\n\n debug!(\"Value is present in collection, cloning Arc...\");\n\n return reader.get(value).unwrap().clone();\n\n }\n\n }\n\n\n\n debug!(\"Value not present in collection, inserting and returning Arc...\");\n\n let mut writer = self.map.write().unwrap();\n\n let result: Arc<str> = Arc::from(value);\n\n writer.insert(result.clone());\n\n result\n\n }\n\n}\n", "file_path": "src/utils/stringpool.rs", "rank": 81, "score": 1.939829691525642 }, { "content": "\n\nimpl MediaFile {\n\n pub fn new<P1, P2>(path: P1, base: P2) -> MediaFile\n\n where\n\n P1: Into<PathBuf>,\n\n P2: Into<PathBuf>,\n\n {\n\n MediaFile {\n\n path: path.into(),\n\n base: Arc::new(base.into()),\n\n }\n\n }\n\n\n\n pub fn id(&self) -> String {\n\n self.path\n\n .strip_prefix(self.base.as_path())\n\n .unwrap()\n\n .to_string_lossy()\n\n .chars()\n\n // delete illegal characters\n", "file_path": "src/fsync.rs", "rank": 82, "score": 1.8796478628437718 }, { "content": " }\n\n}\n\n\n\nimpl fmt::Display for ModTimeUpdateError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"failed to call futimens, return code {}\", self.rc)\n\n }\n\n}\n\n\n\n// Copy the modified time of source to dest\n\n#[cfg(target_os = \"linux\")]\n", "file_path": "src/utils/fs.rs", "rank": 83, "score": 1.5755830419273158 }, { "content": " tv_nsec: modified_duration.subsec_nanos() as libc::c_long,\n\n };\n\n\n\n let times = [accessed, modified];\n\n\n\n futimens(file.as_raw_fd() as libc::c_int, times.as_ptr()) as isize\n\n };\n\n\n\n if rc == 0 {\n\n Ok(())\n\n } else {\n\n Err(ModTimeUpdateError { rc })\n\n }\n\n}\n", "file_path": "src/utils/fs.rs", "rank": 84, "score": 1.5755830419273158 }, { "content": " .flat_map(|c| c.case_fold())\n\n // collect into a string\n\n .collect()\n\n }\n\n\n\n pub fn debase(&self) -> &str {\n\n self.path\n\n .strip_prefix(&self.base)\n\n .unwrap()\n\n .to_str()\n\n .unwrap()\n\n }\n\n}\n\n\n\nimpl Eq for LibraryFile {}\n\n\n\nimpl PartialEq for LibraryFile {\n\n fn eq(&self, other: &LibraryFile) -> bool {\n\n self.id == other.id\n\n }\n", "file_path": "src/library.rs", "rank": 85, "score": 1.4254909630385892 }, { "content": " /// given order.\n\n pub fn by_genre(this: &Self, that: &Self) -> Ordering {\n\n this.genre_sortable()\n\n .cmp(that.genre_sortable())\n\n .then(this.artist_sortable().cmp(that.artist_sortable()))\n\n .then(this.album_sortable().cmp(that.album_sortable()))\n\n .then(this.track_number.cmp(&that.track_number))\n\n .then(this.title_sortable().cmp(that.title_sortable()))\n\n }\n\n\n\n /// Get the location of a file relative to the DMS root.\n\n fn dms_location(&self) -> PathBuf {\n\n Path::new(\"/dos/data\").join(self.path.strip_prefix(&self.base).unwrap())\n\n }\n\n\n\n /// Converts a MediaMetadata instance into the CSV format expected by PhatNoise for artists,\n\n /// albums, genres, and tracks databases.\n\n ///\n\n /// See: https://github.com/naftulikay/phatnoise.rs/wiki/Sync-Workflow\n\n pub fn to_csv(&self) -> String {\n", "file_path": "src/metadata.rs", "rank": 86, "score": 1.3330881339301648 }, { "content": "# phatnoise.rs [![Build Status][travis.svg]][travis]\n\n\n\nA PhatNoise synchronization tool written in Rust.\n\n\n\n## License\n\n\n\nLicensed at your discretion under either:\n\n\n\n - [Apache Software License, Version 2.0](./LICENSE-APACHE)\n\n - [MIT License](./LICENSE-MIT)\n\n\n\n [travis]: https://travis-ci.org/naftulikay/phatnoise.rs\n\n [travis.svg]: https://travis-ci.org/naftulikay/phatnoise.rs.svg?branch=master\n", "file_path": "README.md", "rank": 87, "score": 1.19500246467241 }, { "content": " artist: pool.get(&get_artist_id3(&tag)),\n\n album: pool.get(&get_album_id3(&tag)),\n\n genre: pool.get(&get_genre_id3(&tag)),\n\n title: get_title_id3(&tag),\n\n track_number: get_track_number_id3(&tag),\n\n duration: get_duration_mp3(&path).as_secs(),\n\n })\n\n }\n\n _ => Err(MediaParsingError::UnrecognizedFormat),\n\n }\n\n }\n\n\n\n /// Returns the track's artist for sorting\n\n fn artist_sortable(&self) -> &str {\n\n if PRONOUN_START.is_match(&self.artist) {\n\n &self.artist[PRONOUN_START.find(&self.artist).unwrap().end()..]\n\n } else {\n\n &self.artist\n\n }\n\n }\n", "file_path": "src/metadata.rs", "rank": 88, "score": 0.9552070352444901 }, { "content": " /// Generate an ID for the media file at the given path with the given base path.\n\n ///\n\n /// This is used for path uniqueness checks between two media libraries, ie between the local\n\n /// media library on disk and the remote media library on the DMS.\n\n fn gen_id(path: &Path, base: &Path) -> String {\n\n path.strip_prefix(base)\n\n .unwrap()\n\n .to_string_lossy()\n\n .chars()\n\n // delete illegal characters\n\n .filter(|c| !utils::FAT32_DELETE_CHARS.contains(&c))\n\n // replace certain characters with a single space character\n\n .map(|c| {\n\n if utils::FAT32_HYPHENIZE_CHARS.contains(&c) {\n\n '-'\n\n } else {\n\n c\n\n }\n\n })\n\n // bounce down to lowercase\n", "file_path": "src/library.rs", "rank": 89, "score": 0.9144823221223244 }, { "content": " fn title_sortable(&self) -> &str {\n\n if PRONOUN_START.is_match(&self.title) {\n\n &self.title[PRONOUN_START.find(&self.title).unwrap().end()..]\n\n } else {\n\n &self.title\n\n }\n\n }\n\n\n\n /// Sorting utility for sorting by artist, album, track number, track title, and by genre in the\n\n /// given order.\n\n pub fn by_artist(this: &Self, that: &Self) -> Ordering {\n\n this.artist_sortable()\n\n .cmp(that.artist_sortable())\n\n .then(this.album_sortable().cmp(that.album_sortable()))\n\n .then(this.track_number.cmp(&that.track_number))\n\n .then(this.title_sortable().cmp(that.title_sortable()))\n\n .then(this.genre_sortable().cmp(that.genre_sortable()))\n\n }\n\n\n\n /// Sorting utility for sorting by genre, artist, album, track number, and by track title in the\n", "file_path": "src/metadata.rs", "rank": 90, "score": 0.8980501429373424 } ]
Rust
crates/fefix/src/session/config.rs
ferrumfix/ferrumfix
36b40f3d34cc9af46acc7f60d2f3ae517fa8c0ee
use super::{Environment, MsgSeqNumCounter, SeqNumbers}; use std::marker::PhantomData; use std::num::NonZeroU64; use std::time::Duration; pub trait Configure: Clone + Default { fn verify_test_indicator(&self) -> bool { true } fn max_allowed_latency(&self) -> Duration { Duration::from_secs(3) } fn begin_string(&self) -> &[u8] { b"FIX.4.4" } fn sender_comp_id(&self) -> &[u8] { b"SENDER_COMP" } fn target_comp_id(&self) -> &[u8] { b"TARGET_COMP" } fn environment(&self) -> Environment { Environment::Production { allow_test: true } } fn heartbeat(&self) -> Duration { Duration::from_secs(30) } } #[derive(Debug, Clone)] #[allow(missing_docs)] pub struct Config { phantom: PhantomData<()>, pub verify_test_indicator: bool, pub max_allowed_latency: Duration, pub begin_string: String, pub environment: Environment, pub heartbeat: Duration, pub seq_numbers: SeqNumbers, pub msg_seq_num_inbound: MsgSeqNumCounter, pub msg_seq_num_outbound: MsgSeqNumCounter, pub sender_comp_id: String, pub target_comp_id: String, } impl Configure for Config { fn verify_test_indicator(&self) -> bool { self.verify_test_indicator } fn max_allowed_latency(&self) -> Duration { self.max_allowed_latency } fn sender_comp_id(&self) -> &[u8] { self.sender_comp_id.as_bytes() } fn target_comp_id(&self) -> &[u8] { self.target_comp_id.as_bytes() } fn begin_string(&self) -> &[u8] { self.begin_string.as_bytes() } fn environment(&self) -> Environment { self.environment } fn heartbeat(&self) -> Duration { self.heartbeat } } impl Default for Config { fn default() -> Self { Self { phantom: PhantomData::default(), verify_test_indicator: true, max_allowed_latency: Duration::from_secs(3), begin_string: "FIX.4.4".to_string(), environment: Environment::Production { allow_test: true }, heartbeat: Duration::from_secs(30), seq_numbers: SeqNumbers::new(NonZeroU64::new(1).unwrap(), NonZeroU64::new(1).unwrap()), msg_seq_num_inbound: MsgSeqNumCounter::START, msg_seq_num_outbound: MsgSeqNumCounter::START, sender_comp_id: "SENDER_COMP".to_string(), target_comp_id: "TARGET_COMP".to_string(), } } } #[cfg(test)] mod test { use super::*; use quickcheck_macros::quickcheck; #[derive(Default, Clone)] struct ConfigDefault; impl Configure for ConfigDefault {} #[test] fn config_defaults() { let config = Config::default(); assert_eq!( config.max_allowed_latency(), ConfigDefault.max_allowed_latency() ); assert_eq!( config.verify_test_indicator(), ConfigDefault.verify_test_indicator() ); } #[quickcheck] fn config_set_max_allowed_latency(latency: Duration) -> bool { let mut config = Config::default(); config.max_allowed_latency = latency; config.max_allowed_latency() == latency } #[quickcheck] fn config_set_verify_test_indicator(verify: bool) -> bool { let mut config = Config::default(); config.verify_test_indicator = verify; config.verify_test_indicator() == verify } }
use super::{Environment, MsgSeqNumCounter, SeqNumbers}; use std::marker::PhantomData; use std::num::NonZeroU64; use std::time::Duration; pub trait Configure: Clone + Default { fn verify_test_indicator(&self) -> bool { true } fn max_allowed_latency(&self) -> Duration { Duration::from_secs(3) } fn begin_string(&self) -> &[u8] { b"FIX.4.4" } fn sender_comp_id(&self) -> &[u8] { b"SENDER_COMP" } fn target_comp_id(&self) -> &[u8] { b"TARGET_COMP" } fn environment(&self) -> Environment { Environment::Production { allow_test: true } } fn heartbeat(&self) -> Duration { Duration::from_secs(30) } } #[derive(Debug, Clone)] #[allow(missing_docs)] pub struct Config { phantom: PhantomData<()>, pub verify_test_indicato
wrap()), msg_seq_num_inbound: MsgSeqNumCounter::START, msg_seq_num_outbound: MsgSeqNumCounter::START, sender_comp_id: "SENDER_COMP".to_string(), target_comp_id: "TARGET_COMP".to_string(), } } } #[cfg(test)] mod test { use super::*; use quickcheck_macros::quickcheck; #[derive(Default, Clone)] struct ConfigDefault; impl Configure for ConfigDefault {} #[test] fn config_defaults() { let config = Config::default(); assert_eq!( config.max_allowed_latency(), ConfigDefault.max_allowed_latency() ); assert_eq!( config.verify_test_indicator(), ConfigDefault.verify_test_indicator() ); } #[quickcheck] fn config_set_max_allowed_latency(latency: Duration) -> bool { let mut config = Config::default(); config.max_allowed_latency = latency; config.max_allowed_latency() == latency } #[quickcheck] fn config_set_verify_test_indicator(verify: bool) -> bool { let mut config = Config::default(); config.verify_test_indicator = verify; config.verify_test_indicator() == verify } }
r: bool, pub max_allowed_latency: Duration, pub begin_string: String, pub environment: Environment, pub heartbeat: Duration, pub seq_numbers: SeqNumbers, pub msg_seq_num_inbound: MsgSeqNumCounter, pub msg_seq_num_outbound: MsgSeqNumCounter, pub sender_comp_id: String, pub target_comp_id: String, } impl Configure for Config { fn verify_test_indicator(&self) -> bool { self.verify_test_indicator } fn max_allowed_latency(&self) -> Duration { self.max_allowed_latency } fn sender_comp_id(&self) -> &[u8] { self.sender_comp_id.as_bytes() } fn target_comp_id(&self) -> &[u8] { self.target_comp_id.as_bytes() } fn begin_string(&self) -> &[u8] { self.begin_string.as_bytes() } fn environment(&self) -> Environment { self.environment } fn heartbeat(&self) -> Duration { self.heartbeat } } impl Default for Config { fn default() -> Self { Self { phantom: PhantomData::default(), verify_test_indicator: true, max_allowed_latency: Duration::from_secs(3), begin_string: "FIX.4.4".to_string(), environment: Environment::Production { allow_test: true }, heartbeat: Duration::from_secs(30), seq_numbers: SeqNumbers::new(NonZeroU64::new(1).unwrap(), NonZeroU64::new(1).un
random
[ { "content": "/// A provider of configuration options related to FIX encoding and decoding.\n\n///\n\n/// # Implementing this trait\n\n///\n\n/// Before implementing this trait, you should look into [`Config`], which is\n\n/// adequate for most uses. The only benefit of writing your own [`Configure`]\n\n/// implementor rather than using [`Config`] is the possibility of relying on\n\n/// constants in code rather than accessing `struct` members, which results in\n\n/// better inlining by LLVM. E.g.\n\n///\n\n/// ```\n\n/// use fefix::tagvalue::Configure;\n\n///\n\n/// #[derive(Default, Copy, Clone)]\n\n/// struct FixInlineConfig {}\n\n///\n\n/// impl Configure for FixInlineConfig {\n\n/// #[inline]\n\n/// fn max_message_size(&self) -> Option<usize> {\n\n/// None\n\n/// }\n\n///\n\n/// #[inline]\n\n/// fn verify_checksum(&self) -> bool {\n\n/// true\n\n/// }\n\n/// }\n\n/// ```\n\n///\n\n/// Needless to say, **think twice** before polluting your codebase with such\n\n/// micro-optimizations.\n\npub trait Configure: Clone + Default {\n\n /// The delimiter character, which terminates every tag-value pair including\n\n /// the last one.\n\n ///\n\n /// ASCII 0x1 (SOH) is the default separator character.\n\n ///\n\n /// This setting is relevant for both encoding and decoding operations.\n\n #[inline]\n\n fn separator(&self) -> u8 {\n\n SOH\n\n }\n\n\n\n /// The maximum allowed size for any single FIX message. No restrictions are\n\n /// imposed when it is `None`.\n\n #[inline]\n\n fn max_message_size(&self) -> Option<usize> {\n\n Some(DEFAULT_MAX_MESSAGE_SIZE)\n\n }\n\n\n\n /// Determines wheather or not `CheckSum(10)` should be verified.\n", "file_path": "crates/fefix/src/tagvalue/config.rs", "rank": 0, "score": 333181.1783437958 }, { "content": "/// Configuration interface for the FIX JSON encoding format.\n\npub trait Configure: Clone + Default {\n\n /// This setting indicates that all encoded messages should be \"prettified\"\n\n /// if possible, i.e. the JSON code will not be compressed and instead it\n\n /// will have indentation and other whitespace that favors human\n\n /// readability. Some performance loss and increased payload size is\n\n /// expected.\n\n ///\n\n /// This is turned **off** be default.\n\n ///\n\n /// This setting has no effect when decoding messages.\n\n ///\n\n /// # Output examples\n\n ///\n\n /// With \"pretty print\":\n\n ///\n\n /// ```json\n\n /// {\n\n /// \"Header\": {\n\n /// \"...\": \"...\"\n\n /// },\n", "file_path": "crates/fefix/src/json/config.rs", "rank": 2, "score": 333148.2873694841 }, { "content": "/// The owner of a [`FixConnection`]. It can react to events, store incoming\n\n/// messages, send messages, etc..\n\npub trait Backend: Clone {\n\n /// The type of errors that can arise during a [`FixConnection`].\n\n type Error: for<'a> FixValue<'a>;\n\n\n\n fn sender_comp_id(&self) -> &[u8];\n\n fn target_comp_id(&self) -> &[u8];\n\n\n\n fn message_encoding(&self) -> Option<&[u8]> {\n\n None\n\n }\n\n\n\n fn set_sender_and_target<'a>(&'a self, msg: &mut impl SetField<u32>) {\n\n msg.set(49, self.sender_comp_id());\n\n msg.set(56, self.target_comp_id());\n\n }\n\n\n\n fn environment(&self) -> Environment {\n\n Environment::Production { allow_test: false }\n\n }\n\n\n", "file_path": "crates/fefix/src/session/mod.rs", "rank": 3, "score": 197090.54257307533 }, { "content": "/// Returns a copy of the `CheckSum <10>` digits of `message`.\n\npub fn checksum_digits(message: &[u8]) -> [u8; 3] {\n\n debug_assert!(message.len() >= MIN_FIX_MESSAGE_LEN_IN_BYTES);\n\n message[message.len() - 4..message.len() - 1]\n\n .try_into()\n\n .unwrap()\n\n}\n\n\n", "file_path": "crates/fefix/src/tagvalue/utils.rs", "rank": 4, "score": 191363.99860562233 }, { "content": "fn is_digit(byte: u8, min_digit: u8, max_digit: u8) -> bool {\n\n byte >= (min_digit + b'0') && byte <= (max_digit + b'0')\n\n}\n\n\n", "file_path": "crates/fefix/src/fix_value/monthyear.rs", "rank": 5, "score": 187603.10022288022 }, { "content": "fn validate(data: &[u8]) -> bool {\n\n if data.len() != 8 {\n\n return false;\n\n }\n\n if !validate_year(data) || !validate_month(data) {\n\n return false;\n\n }\n\n validate_week(data) || validate_day(data)\n\n}\n\n\n", "file_path": "crates/fefix/src/fix_value/monthyear.rs", "rank": 6, "score": 185597.07939188596 }, { "content": "fn validate_week(data: &[u8]) -> bool {\n\n data[6] == b'w' && is_digit(data[7], 1, 5)\n\n}\n\n\n", "file_path": "crates/fefix/src/fix_value/monthyear.rs", "rank": 7, "score": 183414.65386672225 }, { "content": "fn validate_year(data: &[u8]) -> bool {\n\n is_digit(data[0], 0, 9)\n\n && is_digit(data[1], 0, 9)\n\n && is_digit(data[2], 0, 9)\n\n && is_digit(data[3], 0, 9)\n\n}\n\n\n", "file_path": "crates/fefix/src/fix_value/monthyear.rs", "rank": 8, "score": 183414.65386672225 }, { "content": "fn validate_month(data: &[u8]) -> bool {\n\n ((data[4] == b'0' && data[5] <= b'9') || (data[4] == b'1' && data[5] <= b'2'))\n\n && data[5] >= b'0'\n\n}\n\n\n", "file_path": "crates/fefix/src/fix_value/monthyear.rs", "rank": 9, "score": 183414.65386672225 }, { "content": "fn is_ascii_digit(byte: u8) -> bool {\n\n byte >= b'0' && byte <= b'9'\n\n}\n\n\n", "file_path": "crates/fefix/src/fix_value/checksum.rs", "rank": 10, "score": 183414.65386672225 }, { "content": "fn validate_day(data: &[u8]) -> bool {\n\n (data[6] == b'0' && data[7] >= b'0' && data[7] <= b'9')\n\n || (data[6] == b'1' && data[7] >= b'0' && data[7] <= b'9')\n\n || (data[6] == b'2' && data[7] >= b'0' && data[7] <= b'9')\n\n || (data[6] == b'3' && data[7] >= b'0' && data[7] <= b'1')\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use quickcheck::{Arbitrary, Gen};\n\n use quickcheck_macros::quickcheck;\n\n\n\n impl Arbitrary for MonthYear {\n\n fn arbitrary(g: &mut Gen) -> Self {\n\n let year = u32::arbitrary(g) % 10000;\n\n let month = (u32::arbitrary(g) % 12) + 1;\n\n let day_or_week = if bool::arbitrary(g) {\n\n format!(\"{:02}\", (u32::arbitrary(g) % 31) + 1)\n\n } else {\n", "file_path": "crates/fefix/src/fix_value/monthyear.rs", "rank": 11, "score": 183414.65386672225 }, { "content": "/// Allows to get mutable and immutable references to configuration options.\n\npub trait GetConfig {\n\n /// The configuration options type.\n\n type Config;\n\n\n\n /// Returns an immutable reference to the configuration options used by\n\n /// `self`.\n\n fn config(&self) -> &Self::Config;\n\n\n\n /// Returns a mutable reference to the configuration options used by\n\n /// `self`.\n\n fn config_mut(&mut self) -> &mut Self::Config;\n\n}\n", "file_path": "crates/fefix/src/lib.rs", "rank": 12, "score": 175960.90392597305 }, { "content": "pub fn verify_checksum(headerless_msg: &[u8]) -> Result<(), DecodeError> {\n\n let msg_contents = &headerless_msg[..headerless_msg.len() - FIELD_CHECKSUM_LEN_IN_BYTES];\n\n let nominal_checksum = CheckSum::deserialize_lossy(&checksum_digits(headerless_msg)[..])\n\n .map_err(|_| DecodeError::CheckSum)?;\n\n let actual_checksum = CheckSum::compute(msg_contents);\n\n if nominal_checksum == actual_checksum {\n\n Ok(())\n\n } else {\n\n Err(DecodeError::CheckSum)\n\n }\n\n}\n\n\n", "file_path": "crates/fefix/src/tagvalue/utils.rs", "rank": 13, "score": 167489.20118165144 }, { "content": "fn decode_fix_message(fix_decoder: &mut Decoder<Config>, msg: &[u8]) {\n\n fix_decoder.decode(msg).expect(\"Invalid FIX message\");\n\n}\n\n\n", "file_path": "crates/fefix/benches/fix_decode.rs", "rank": 14, "score": 164576.87933957443 }, { "content": "/// Tries to [`FixValue::serialize`] an `item`, then to\n\n/// [`FixValue::deserialize`] it, and finally checks for equality with the\n\n/// initial data. [`FixValue::deserialize_lossy`] is then tested in the same\n\n/// manner.\n\npub fn test_utility_verify_serialization_behavior<T>(item: T) -> bool\n\nwhere\n\n T: for<'a> FixValue<'a> + PartialEq,\n\n{\n\n let serialized = item.to_bytes();\n\n let bytes = &serialized[..];\n\n let deserialized = T::deserialize(bytes).ok().unwrap();\n\n let deserialized_lossy = T::deserialize_lossy(bytes).ok().unwrap();\n\n deserialized == item && deserialized_lossy == item\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use quickcheck_macros::quickcheck;\n\n\n\n #[test]\n\n fn serialize_bools() {\n\n let mut buffer = Vec::new();\n\n assert_eq!(true.serialize(&mut buffer), 1);\n", "file_path": "crates/fefix/src/fix_value/mod.rs", "rank": 15, "score": 164335.09336168756 }, { "content": "pub fn decode_stop_bit_entity(input: &mut impl io::Read) -> io::Result<Vec<u8>> {\n\n let mut bytes = Vec::new();\n\n loop {\n\n let mut byte = [0u8; 1];\n\n input.read_exact(&mut byte[..])?;\n\n if byte[0] >= STOP_BYTE {\n\n byte[0] ^= STOP_BYTE;\n\n bytes.push(byte[0]);\n\n break;\n\n } else {\n\n bytes.push(byte[0]);\n\n }\n\n }\n\n Ok(bytes)\n\n}\n\n\n", "file_path": "crates/fefast/src/codec.rs", "rank": 16, "score": 147254.87148005932 }, { "content": "pub fn template_struct(template: &Template, custom_derive_line: &str) -> String {\n\n let identifier = template.name().to_camel_case();\n\n let fields = template\n\n .iter_items()\n\n .map(|field_instruction| match field_instruction.kind() {\n\n FieldType::Primitive(pt) => optional_rust_type(\n\n primitive_fast_type_to_rust_type(*pt),\n\n !field_instruction.is_mandatory(),\n\n ),\n\n FieldType::Group(_group) => String::new(),\n\n })\n\n .collect::<Vec<String>>();\n\n format!(\n\n indoc!(\n\n r#\"\n\n {notice}\n\n\n\n #[derive(Debug)]\n\n {custom_derive_line}\n\n pub struct {identifier}<'a> {{\n", "file_path": "crates/fefast/src/codegen.rs", "rank": 17, "score": 143624.78185774913 }, { "content": "fn ascii_digit_to_u8(digit: u8, multiplier: u8) -> u8 {\n\n digit.wrapping_sub(b'0').wrapping_mul(multiplier)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use quickcheck::{Arbitrary, Gen};\n\n use quickcheck_macros::quickcheck;\n\n\n\n impl Arbitrary for CheckSum {\n\n fn arbitrary(g: &mut Gen) -> Self {\n\n Self(u8::arbitrary(g))\n\n }\n\n }\n\n\n\n #[test]\n\n fn edges_cases() {\n\n assert_eq!(CheckSum::compute(&[]).0, 0);\n\n assert_eq!(CheckSum::compute(&[1]).0, 1);\n", "file_path": "crates/fefix/src/fix_value/checksum.rs", "rank": 18, "score": 141612.85946992287 }, { "content": "fn to_digit(byte: u8) -> u8 {\n\n byte + b'0'\n\n}\n\n\n\nimpl<'a, B, C> SetField<u32> for EncoderHandle<'a, B, C>\n\nwhere\n\n B: Buffer,\n\n C: Configure,\n\n{\n\n fn set_with<'s, V>(&'s mut self, tag: u32, value: V, settings: V::SerializeSettings)\n\n where\n\n V: FixValue<'s>,\n\n {\n\n write!(BufferWriter(self.buffer), \"{}=\", tag).unwrap();\n\n value.serialize_with(self.buffer, settings);\n\n self.buffer\n\n .extend_from_slice(&[self.encoder.config().separator()]);\n\n }\n\n}\n\n\n", "file_path": "crates/fefix/src/tagvalue/encoder.rs", "rank": 19, "score": 140744.45316680282 }, { "content": "fn from_digit(digit: u8) -> u8 {\n\n digit.wrapping_sub(b'0')\n\n}\n\n\n", "file_path": "crates/fefix/src/fix_value/monthyear.rs", "rank": 20, "score": 139399.40445288538 }, { "content": "fn digit_to_ascii(byte: u8) -> u8 {\n\n byte + b'0'\n\n}\n\n\n", "file_path": "crates/fefix/src/fix_value/checksum.rs", "rank": 21, "score": 138097.10923899972 }, { "content": "/// Operations on a growable in-memory buffer.\n\n///\n\n/// This trait is intented to be used as a thin compatibility layer between\n\n/// [`Vec<u8>`] and\n\n/// [`bytes::BytesMut`](https://docs.rs/bytes/1.1.0/bytes/struct.BytesMut.html).\n\n/// By writing generic code that operates on [`Buffer`], FerrumFIX users can\n\n/// decide for themselves if they want to use `bytes` and still use most of the\n\n/// features.\n\n///\n\n/// It's important to note that, unlike [`std::io::Write`] which only allows\n\n/// sequential write operations, [`Buffer`] allows arbitrary data manipulation\n\n/// over the whole buffer.\n\npub trait Buffer {\n\n /// Returns an immutable reference to the whole contents of the buffer.\n\n fn as_slice(&self) -> &[u8];\n\n\n\n /// Returns a mutable reference to the whole contents of the buffer.\n\n fn as_mut_slice(&mut self) -> &mut [u8];\n\n\n\n /// Returns the length of the whole contents of the buffer.\n\n #[inline]\n\n fn len(&self) -> usize {\n\n self.as_slice().len()\n\n }\n\n\n\n /// Returns the number of bytes that `self` can hold without reallocating.\n\n fn capacity(&self) -> usize;\n\n\n\n /// Completely erases the contents of `self`.\n\n fn clear(&mut self);\n\n\n\n /// Appends the contents of `extend` onto `self`, growing the buffer if\n", "file_path": "crates/fefix/src/buffer.rs", "rank": 22, "score": 136925.54676196177 }, { "content": "/// A trait to (de)serialize on-the-wire representations of entities.\n\npub trait Codec {\n\n fn deserialize(&mut self, input: &mut impl io::Read) -> io::Result<usize>;\n\n fn serialize(&self, output: &mut impl io::Write) -> io::Result<usize>;\n\n}\n\n\n\nimpl Codec for u32 {\n\n fn serialize(&self, output: &mut impl io::Write) -> io::Result<usize> {\n\n let num_ignored_bytes = self.leading_zeros() / 7;\n\n let bytes = [\n\n (self >> 28) as u8 & SIGNIFICANT_BYTE,\n\n (self >> 21) as u8 & SIGNIFICANT_BYTE,\n\n (self >> 14) as u8 & SIGNIFICANT_BYTE,\n\n (self >> 7) as u8 & SIGNIFICANT_BYTE,\n\n *self as u8 | STOP_BYTE,\n\n ];\n\n\n\n output.write_all(&bytes[num_ignored_bytes as usize..])?;\n\n Ok(bytes.len() - num_ignored_bytes as usize)\n\n }\n\n\n", "file_path": "crates/fefast/src/codec.rs", "rank": 23, "score": 136916.11097808968 }, { "content": "pub trait Application {}\n", "file_path": "crates/fefix/src/session/backends.rs", "rank": 24, "score": 135205.71200563805 }, { "content": "pub trait Verify {\n\n type Error;\n\n\n\n fn verify_begin_string(&self, begin_string: &[u8]) -> Result<(), Self::Error>;\n\n\n\n fn verify_test_message_indicator(\n\n &self,\n\n msg: &impl RandomFieldAccess<u32>,\n\n ) -> Result<(), Self::Error>;\n\n\n\n fn verify_sending_time(&self, msg: &impl RandomFieldAccess<u32>) -> Result<(), Self::Error>;\n\n}\n\n\n\nimpl<'a, B, C, V> FixConnector<'a, B, C, V> for FixConnection<B, C>\n\nwhere\n\n B: Backend,\n\n C: Configure,\n\n V: Verify,\n\n{\n\n type Error = &'a [u8];\n", "file_path": "crates/fefix/src/session/connection.rs", "rank": 25, "score": 135205.71200563805 }, { "content": "pub trait DataType {}\n", "file_path": "crates/fefast/src/dtf.rs", "rank": 26, "score": 135205.71200563805 }, { "content": "/// *Field encoding operator* in FAST terminology.\n\npub trait FieldOperator {\n\n /// The type of the (de)serializable item.\n\n type Item: Codec;\n\n\n\n /// See section 6.3.1 of FAST 1.1 documentation.\n\n fn previous_value(&self) -> Option<&Self::Item>;\n\n /// Replace the previous value (or set it if unset) with a new one.\n\n fn replace(&mut self, new_value: Self::Item);\n\n /// Determine whether the specified value can be omitted from the final\n\n /// payload. This behavior is custom to every field operator.\n\n fn can_omit(&self, value: &Self::Item) -> bool;\n\n /// Unset previous state.\n\n fn reset(&mut self);\n\n}\n\n\n\n/// The constant operator specifies that the value of a field will always be the\n\n/// same, as initialized with `new`.\n\n#[derive(Debug)]\n\npub struct Constant<T> {\n\n value: T,\n", "file_path": "crates/fefast/src/field_operators.rs", "rank": 27, "score": 133568.61395715713 }, { "content": "pub trait IsFieldDefinition {\n\n /// Returns the FIX tag associated with `self`.\n\n fn tag(&self) -> TagU16;\n\n\n\n /// Returns the official, ASCII, human-readable name associated with `self`.\n\n fn name(&self) -> &str;\n\n\n\n /// Returns the field location of `self`.\n\n fn location(&self) -> FieldLocation;\n\n}\n\n\n", "file_path": "crates/fefix/src/fefix_core/dict.rs", "rank": 28, "score": 131996.08607438952 }, { "content": " pub trait SymbolTableIndex {\n\n fn to_key(&self) -> KeyRef;\n\n }\n\n\n\n impl SymbolTableIndex for Key {\n\n fn to_key(&self) -> KeyRef {\n\n self.as_ref()\n\n }\n\n }\n\n\n\n impl<'a> SymbolTableIndex for KeyRef<'a> {\n\n fn to_key(&self) -> KeyRef {\n\n *self\n\n }\n\n }\n\n\n\n impl<'a> Hash for dyn SymbolTableIndex + 'a {\n\n fn hash<H: std::hash::Hasher>(&self, state: &mut H) {\n\n self.to_key().hash(state);\n\n }\n", "file_path": "crates/fefix/src/fefix_core/dict.rs", "rank": 29, "score": 130484.38208763057 }, { "content": "/// Provides access to entries within a FIX repeating group.\n\npub trait RepeatingGroup: Sized {\n\n /// The type of entries in this FIX repeating group. Must implement\n\n /// [`RandomFieldAccess`].\n\n type Entry;\n\n\n\n /// Returns the number of FIX group entries in `self`.\n\n fn len(&self) -> usize;\n\n\n\n /// Returns the `i` -th entry in `self`, if present.\n\n fn entry_opt(&self, i: usize) -> Option<Self::Entry>;\n\n\n\n /// Returns the `i` -th entry in `self`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics if `i` is outside the legal range of `self`.\n\n fn entry(&self, i: usize) -> Self::Entry {\n\n self.entry_opt(i)\n\n .expect(\"Index outside bounds of FIX repeating group.\")\n\n }\n", "file_path": "crates/fefix/src/field_access.rs", "rank": 30, "score": 128474.12044724487 }, { "content": "/// Allows to write FIX fields.\n\npub trait SetField<F> {\n\n fn set<'a, V>(&'a mut self, field: F, value: V)\n\n where\n\n V: FixValue<'a>,\n\n {\n\n self.set_with(field, value, <V::SerializeSettings as Default>::default())\n\n }\n\n\n\n fn set_with<'a, V>(&'a mut self, field: F, value: V, setting: V::SerializeSettings)\n\n where\n\n V: FixValue<'a>;\n\n}\n", "file_path": "crates/fefix/src/set_field.rs", "rank": 31, "score": 128474.12044724487 }, { "content": "/// Provides (de)serialization logic for a Rust type as FIX field values.\n\n///\n\n/// See the [`fix_value`](crate::fix_value) module for more information.\n\npub trait FixValue<'a>\n\nwhere\n\n Self: Sized,\n\n{\n\n /// The error type that can arise during deserialization.\n\n type Error;\n\n /// A type with values that customize the serialization algorithm, e.g.\n\n /// padding information.\n\n type SerializeSettings: Default;\n\n\n\n /// Writes `self` to `buffer` using default settings.\n\n #[inline]\n\n fn serialize<B>(&self, buffer: &mut B) -> usize\n\n where\n\n B: Buffer,\n\n {\n\n self.serialize_with(buffer, Self::SerializeSettings::default())\n\n }\n\n\n\n /// Writes `self` to `buffer` using custom serialization `settings`.\n", "file_path": "crates/fefix/src/fix_value/mod.rs", "rank": 32, "score": 128474.12044724487 }, { "content": "/// Verifies the `BodyLength(9)` field of the FIX message in `data`.\n\npub fn verify_body_length(\n\n data: &[u8],\n\n start_of_body: usize,\n\n nominal_body_length: usize,\n\n) -> Result<(), DecodeError> {\n\n let body_length = data\n\n .len()\n\n .wrapping_sub(FIELD_CHECKSUM_LEN_IN_BYTES)\n\n .wrapping_sub(start_of_body);\n\n let end_of_body = data.len() - FIELD_CHECKSUM_LEN_IN_BYTES;\n\n if start_of_body > end_of_body || nominal_body_length != body_length {\n\n Err(DecodeError::Invalid)\n\n } else {\n\n debug_assert!(body_length < data.len());\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "crates/fefix/src/tagvalue/utils.rs", "rank": 33, "score": 127932.65885603051 }, { "content": "fn fix_decoder() -> Decoder<Config> {\n\n let fix_dictionary = Dictionary::from_quickfix_spec(QUICKFIX_SPEC).unwrap();\n\n Decoder::<Config>::new(fix_dictionary)\n\n}\n", "file_path": "examples/05_coinbase_codegen/src/main.rs", "rank": 34, "score": 127316.54439721804 }, { "content": "/// A trait to retrieve field values in a FIX message.\n\n///\n\n/// # Type parameters\n\n///\n\n/// This trait is generic over a lifetime `'a`, which\n\n///\n\n/// # Field getters naming scheme\n\n///\n\n/// All getters start with `fv`, which stands for Field Value.\n\n/// - `l` stands for *lossy*, i.e. invalid field values might not be detected to\n\n/// improve performance.\n\n/// - `_opt` stands for *optional*, for better error reporting.\n\npub trait RandomFieldAccess<F> {\n\n /// The type returned by [`RandomFieldAccess::group()`] and [`RandomFieldAccess::group_opt()`].\n\n type Group: RepeatingGroup<Entry = Self>;\n\n\n\n /// Queries `self` for a group tagged with `key`. An unsuccessful query\n\n /// results in [`Err(None)`].\n\n fn group(&self, field: F) -> Result<Self::Group, Option<<usize as FixValue>::Error>> {\n\n match self.group_opt(field) {\n\n Some(Ok(group)) => Ok(group),\n\n Some(Err(e)) => Err(Some(e)),\n\n None => Err(None),\n\n }\n\n }\n\n\n\n /// Queries `self` for a group tagged with `key` which may or may not be\n\n /// present in `self`. This differs from\n\n /// [`RandomFieldAccess::group()`] as missing groups result in [`None`] rather than\n\n /// [`Err`].\n\n fn group_opt(&self, field: F) -> Option<Result<Self::Group, <usize as FixValue>::Error>>;\n\n\n", "file_path": "crates/fefix/src/field_access.rs", "rank": 35, "score": 126967.11103555466 }, { "content": "/// Provides access to entries within a FIX repeating group.\n\npub trait RepeatingGroup: Sized {\n\n /// The type of entries in this FIX repeating group. Must implement\n\n /// [`RandomFieldAccess`].\n\n type Entry;\n\n\n\n /// Returns the number of FIX group entries in `self`.\n\n fn len(&self) -> usize;\n\n\n\n /// Returns the `i` -th entry in `self`, if present.\n\n fn entry_opt(&self, i: usize) -> Option<Self::Entry>;\n\n\n\n /// Returns the `i` -th entry in `self`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics if `i` is outside the legal range of `self`.\n\n fn entry(&self, i: usize) -> Self::Entry {\n\n self.entry_opt(i)\n\n .expect(\"Index outside bounds of FIX repeating group.\")\n\n }\n", "file_path": "crates/fefix/src/random_field_access.rs", "rank": 36, "score": 126962.41646048594 }, { "content": "/// Provides random (i.e. non-sequential) access to FIX fields and groups within\n\n/// messages.\n\n///\n\n/// # Methods\n\n///\n\n/// [`RandomFieldAccess`] provides two kinds of methods:\n\n///\n\n/// 1. Group getters: [`RandomFieldAccess::group`] and\n\n/// [`RandomFieldAccess::group_opt`].\n\n///\n\n/// 2. Field getters: [`RandomFieldAccess::fv_raw`], [`RandomFieldAccess::fv`],\n\n/// etc..\n\n///\n\n/// The most basic form of field access is done via\n\n/// [`RandomFieldAccess::fv_raw`], which performs no deserialization at all: it\n\n/// simply returns the bytes contents associated with a FIX field, if found.\n\n///\n\n/// Building upon [`RandomFieldAccess::fv_raw`] and [`FixValue`], the other\n\n/// field access methods all provide some utility deserialization logic. These\n\n/// methods all have the `fv` prefix, with the following considerations:\n\n///\n\n/// - `fvl` methods perform \"lossy\" deserialization via\n\n/// [`FixValue::deserialize_lossy`]. Unlike lossless deserialization, these\n\n/// methods may skip some error checking logic and thus prove to be faster.\n\n/// Memory-safety is still guaranteed, but malformed FIX fields won't be\n\n/// detected 100% of the time.\n\n/// - `_opt` methods work exactly like their non-`_opt` counterparties, but they\n\n/// have a different return type: instead of returning [`Err(None)`] for missing\n\n/// fields, these methods return [`None`] for missing fields and\n\n/// [`Some(Ok(field))`] for existing fields.\n\n///\n\n/// # Type parameters\n\n///\n\n/// This trait is generic over a type `F`, which must univocally identify FIX\n\n/// fields (besides FIX repeating groups, which allow repetitions).\n\npub trait RandomFieldAccess<F> {\n\n /// The type returned by [`RandomFieldAccess::group`] and\n\n /// [`RandomFieldAccess::group_opt`].\n\n type Group: RepeatingGroup<Entry = Self>;\n\n\n\n /// Looks for a `field` within `self` and then returns its raw byte\n\n /// contents, if it exists.\n\n fn fv_raw(&self, field: F) -> Option<&[u8]>;\n\n\n\n /// Like [`RandomFieldAccess::group`], but doesn't return an [`Err`] if the\n\n /// group is missing.\n\n fn group_opt(&self, field: F) -> Option<Result<Self::Group, <usize as FixValue>::Error>>;\n\n\n\n /// Looks for a group that starts with `field` within `self`.\n\n #[inline]\n\n fn group(&self, field: F) -> Result<Self::Group, Option<<usize as FixValue>::Error>> {\n\n match self.group_opt(field) {\n\n Some(Ok(group)) => Ok(group),\n\n Some(Err(e)) => Err(Some(e)),\n\n None => Err(None),\n", "file_path": "crates/fefix/src/random_field_access.rs", "rank": 37, "score": 125510.08740630023 }, { "content": "pub trait DataFieldLookup<F> {\n\n fn field_is_data(&self, field: F) -> bool;\n\n}\n\n\n", "file_path": "crates/fefix/src/fefix_core/dict.rs", "rank": 38, "score": 125508.07440604751 }, { "content": "pub trait NumInGroupLookup<F> {\n\n fn field_is_num_in_group(&self, field: F) -> bool;\n\n}\n\n\n\nimpl DataFieldLookup<u32> for Dictionary {\n\n fn field_is_data(&self, tag: u32) -> bool {\n\n if let Some(field) = self.field_by_tag(tag) {\n\n field.data_type().basetype() == FixDatatype::Data\n\n } else {\n\n false\n\n }\n\n }\n\n}\n\n\n\nimpl NumInGroupLookup<u32> for Dictionary {\n\n fn field_is_num_in_group(&self, tag: u32) -> bool {\n\n if let Some(field) = self.field_by_tag(tag) {\n\n field.data_type().basetype() == FixDatatype::NumInGroup\n\n } else {\n\n false\n", "file_path": "crates/fefix/src/fefix_core/dict.rs", "rank": 39, "score": 125508.07440604751 }, { "content": "fn fix_stream() -> Vec<u8> {\n\n FIX_MESSAGES.iter().copied().flatten().copied().collect()\n\n}\n\n\n", "file_path": "examples/03_decode_fix_stream/src/main.rs", "rank": 40, "score": 125023.99581978278 }, { "content": "pub fn production_env() -> String {\n\n \"TestMessageIndicator(464) was set to 'Y' but the environment is a production environment\"\n\n .to_string()\n\n}\n\n\n", "file_path": "crates/fefix/src/session/errs.rs", "rank": 41, "score": 124650.32482427562 }, { "content": "pub fn heartbeat_gt_0() -> String {\n\n \"Invalid HeartBtInt(108), expected value greater than 0 seconds\".to_string()\n\n}\n\n\n", "file_path": "crates/fefix/src/session/errs.rs", "rank": 42, "score": 124650.32482427562 }, { "content": "pub fn inbound_seqnum() -> String {\n\n \"NextExpectedMsgSeqNum(789) > than last message sent\".to_string()\n\n}\n\n\n", "file_path": "crates/fefix/src/session/errs.rs", "rank": 43, "score": 124650.32482427562 }, { "content": "fn fix_encoder() -> Encoder<Config> {\n\n Encoder::default()\n\n}\n", "file_path": "examples/10_encode_new_order_single/src/main.rs", "rank": 44, "score": 124359.7805838768 }, { "content": "/// Creates a [`String`] that contains a multiline Rust \"Doc\" comment explaining\n\n/// that the subsequent code was automatically generated.\n\n///\n\n/// The following example is for illustrative purposes only and the actual\n\n/// contents might change. The string is guaranteed not to have any trailing or\n\n/// leading whitespace.\n\n///\n\n/// ```text\n\n/// // Generated automatically by FerrumFIX. Do not modify manually.\n\n/// ```\n\npub fn generated_code_notice() -> String {\n\n use chrono::prelude::*;\n\n\n\n format!(\n\n indoc!(\n\n r#\"\n\n // Generated automatically by FerrumFIX {} on {}.\n\n //\n\n // DO NOT MODIFY MANUALLY.\n\n // DO NOT COMMIT TO VERSION CONTROL.\n\n // ALL CHANGES WILL BE OVERWRITTEN.\"#\n\n ),\n\n FEFIX_VERSION,\n\n Utc::now().to_rfc2822(),\n\n )\n\n}\n\n\n", "file_path": "crates/fefix/src/fefix_core/codegen.rs", "rank": 45, "score": 121699.49138953925 }, { "content": "pub trait FixConnector<'a, B, C, Z>\n\nwhere\n\n B: Backend,\n\n C: Configure,\n\n Z: Verify,\n\n{\n\n type Error: FixValue<'a>;\n\n type Msg: FvWrite<'a>;\n\n\n\n fn target_comp_id(&self) -> &[u8];\n\n\n\n fn sender_comp_id(&self) -> &[u8];\n\n\n\n fn verifier(&self) -> &Z;\n\n\n\n fn dispatch_by_msg_type(&self, msg_type: &[u8], msg: Message<&[u8]>) -> Response {\n\n match msg_type {\n\n b\"A\" => {\n\n self.on_logon(msg);\n\n return Response::None;\n", "file_path": "crates/fefix/src/session/connection.rs", "rank": 46, "score": 117253.69041427237 }, { "content": "pub fn heartbeat_exact(secs: u64) -> String {\n\n format!(\"Invalid HeartBtInt(108), expected value {} seconds\", secs)\n\n}\n\n\n", "file_path": "crates/fefix/src/session/errs.rs", "rank": 47, "score": 116274.26213991008 }, { "content": "fn optional_rust_type(t: &str, optional: bool) -> String {\n\n if optional {\n\n format!(\"Option<{}>\", t)\n\n } else {\n\n t.to_string()\n\n }\n\n}\n\n\n", "file_path": "crates/fefast/src/codegen.rs", "rank": 48, "score": 116152.50114408747 }, { "content": "fn checksum_from_digits(data: [u8; LEN_IN_BYTES]) -> CheckSum {\n\n let digit1 = ascii_digit_to_u8(data[0], 100);\n\n let digit2 = ascii_digit_to_u8(data[1], 10);\n\n let digit3 = ascii_digit_to_u8(data[2], 1);\n\n\n\n CheckSum(digit1.wrapping_add(digit2).wrapping_add(digit3))\n\n}\n\n\n", "file_path": "crates/fefix/src/fix_value/checksum.rs", "rank": 49, "score": 113915.4806521539 }, { "content": "pub fn msg_seq_num(seq_number: u64) -> String {\n\n format!(\"Invalid MsgSeqNum <34>, expected value {}\", seq_number)\n\n}\n\n\n", "file_path": "crates/fefix/src/session/errs.rs", "rank": 50, "score": 113539.18191950858 }, { "content": "#[proc_macro_derive(FixValue, attributes(fefix))]\n\npub fn derive_fix_value(input: TokenStream) -> TokenStream {\n\n derive_fix_value::derive_fix_value(input)\n\n}\n", "file_path": "crates/fefix_derive/src/lib.rs", "rank": 51, "score": 112245.28746367109 }, { "content": "pub fn heartbeat_range(a: u64, b: u64) -> String {\n\n format!(\n\n \"Invalid HeartBtInt(108), expected value between {} and {} seconds\",\n\n a, b,\n\n )\n\n}\n\n\n", "file_path": "crates/fefix/src/session/errs.rs", "rank": 52, "score": 111974.3685348903 }, { "content": "pub fn derive_fix_value(input: TokenStream) -> TokenStream {\n\n let ast: syn::DeriveInput = syn::parse(input).unwrap();\n\n let darling_context = DataFieldWithVariants::from_derive_input(&ast).unwrap();\n\n let identifier = darling_context.ident;\n\n let matching_cases = darling_context\n\n .data\n\n .clone()\n\n .map_enum_variants(|enum_variant| {\n\n let enum_discriminant = enum_variant.variant.as_str();\n\n let enum_discriminant_len = enum_variant.variant.as_bytes().len();\n\n let enum_variant = enum_variant.ident;\n\n quote! {\n\n Self::#enum_variant => {\n\n buffer.extend_from_slice(#enum_discriminant.as_bytes());\n\n #enum_discriminant_len\n\n },\n\n }\n\n })\n\n .take_enum()\n\n .expect(\"Invalid enum\");\n", "file_path": "crates/fefix_derive/src/derive_fix_value.rs", "rank": 53, "score": 109792.0279897312 }, { "content": "pub fn missing_field(name: &str, tag: u32) -> String {\n\n format!(\"Missing mandatory field {}({})\", name, tag)\n\n}\n", "file_path": "crates/fefix/src/session/errs.rs", "rank": 54, "score": 109239.2883144888 }, { "content": "fn deserialize(data: [u8; LEN_IN_BYTES]) -> Result<Date, &'static str> {\n\n let year = ascii_digit_to_u32(data[0], 1000)\n\n + ascii_digit_to_u32(data[1], 100)\n\n + ascii_digit_to_u32(data[2], 10)\n\n + ascii_digit_to_u32(data[3], 1);\n\n let month = ascii_digit_to_u32(data[4], 10) + ascii_digit_to_u32(data[5], 1);\n\n let day = ascii_digit_to_u32(data[6], 10) + ascii_digit_to_u32(data[7], 1);\n\n Date::new(year, month, day).ok_or(ERR_BOUNDS)\n\n}\n\n\n\nconst fn is_digit(byte: u8) -> bool {\n\n byte >= b'0' && byte <= b'9'\n\n}\n\n\n\nconst fn ascii_digit_to_u32(digit: u8, multiplier: u32) -> u32 {\n\n (digit as u32).wrapping_sub(b'0' as u32) * multiplier\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "crates/fefix/src/fix_value/date.rs", "rank": 55, "score": 106315.42864350122 }, { "content": "#[doc(hidden)]\n\npub fn indent_string<S>(s: S, prefix: &str) -> String\n\nwhere\n\n S: AsRef<str>,\n\n{\n\n indent_lines(s.as_ref().lines(), prefix)\n\n}\n\n\n", "file_path": "crates/fefix/src/fefix_core/codegen.rs", "rank": 56, "score": 106113.42458079747 }, { "content": "/// Generates `const` implementors of\n\n/// [`IsFieldDefinition`](super::dict::IsFieldDefinition).\n\n///\n\n/// The generated module will contain:\n\n///\n\n/// - A generated code notice ([generated_code_notice]).\n\n/// - `enum` definitions for FIX field types.\n\n/// - A constant implementor of\n\n/// [`IsFieldDefinition`](super::dict::IsFieldDefinition)\n\n/// for each FIX field.\n\n///\n\n/// The Rust code will be free of any leading and trailing whitespace.\n\n/// An effort is made to provide good formatting, but users shouldn't rely on it\n\n/// and assume that formatting might be bad.\n\npub fn gen_definitions(fix_dictionary: dict::Dictionary, settings: &Settings) -> String {\n\n let enums = fix_dictionary\n\n .iter_fields()\n\n .filter_map(|field| gen_enum_of_allowed_values(field, settings))\n\n .collect::<Vec<String>>()\n\n .join(\"\\n\\n\");\n\n let field_defs = fix_dictionary\n\n .iter_fields()\n\n .map(|field| gen_field_definition(fix_dictionary.clone(), field))\n\n .collect::<Vec<String>>()\n\n .join(\"\\n\");\n\n let top_comment =\n\n onixs_link_to_dictionary(fix_dictionary.get_version()).unwrap_or(String::new());\n\n let code = format!(\n\n indoc!(\n\n r#\"\n\n {notice}\n\n\n\n // {top_comment}\n\n\n", "file_path": "crates/fefix/src/fefix_core/codegen.rs", "rank": 57, "score": 103660.16510685757 }, { "content": "/// Generates the Rust code for a FIX field definition.\n\npub fn gen_field_definition(fix_dictionary: dict::Dictionary, field: dict::Field) -> String {\n\n let mut header = FnvHashSet::default();\n\n let mut trailer = FnvHashSet::default();\n\n for item in fix_dictionary\n\n .component_by_name(\"StandardHeader\")\n\n .unwrap()\n\n .items()\n\n {\n\n if let dict::LayoutItemKind::Field(f) = item.kind() {\n\n header.insert(f.tag());\n\n }\n\n }\n\n for item in fix_dictionary\n\n .component_by_name(\"StandardTrailer\")\n\n .unwrap()\n\n .items()\n\n {\n\n if let dict::LayoutItemKind::Field(f) = item.kind() {\n\n trailer.insert(f.tag());\n\n }\n\n }\n\n gen_field_definition_with_hashsets(fix_dictionary, &header, &trailer, field)\n\n}\n\n\n", "file_path": "crates/fefix/src/fefix_core/codegen.rs", "rank": 58, "score": 99711.43361852545 }, { "content": "/// Generates the Rust code for an `enum` that has variants that map 1:1 the\n\n/// available values for `field`.\n\npub fn gen_enum_of_allowed_values(field: dict::Field, settings: &Settings) -> Option<String> {\n\n let derives = settings.derives_for_allowed_values.join(\", \");\n\n let attributes = settings.attributes_for_allowed_values.join(\"\\n\");\n\n let variants = field\n\n .enums()?\n\n .map(|v| gen_enum_variant_of_allowed_value(v, settings))\n\n .collect::<Vec<String>>()\n\n .join(\"\\n\");\n\n Some(format!(\n\n indoc!(\n\n r#\"\n\n /// Field type variants for [`{field_name}`].\n\n #[derive({derives})]\n\n {attributes}\n\n pub enum {identifier} {{\n\n {variants}\n\n }}\"#\n\n ),\n\n field_name = field.name().to_camel_case(),\n\n derives = derives,\n\n attributes = attributes,\n\n identifier = field.name().to_camel_case(),\n\n variants = variants,\n\n ))\n\n}\n\n\n", "file_path": "crates/fefix/src/fefix_core/codegen.rs", "rank": 59, "score": 99711.43361852545 }, { "content": "#[allow(dead_code)]\n\npub fn decode_stop_bit_bitvec(input: &mut impl io::Read) -> io::Result<BitVec> {\n\n let mut bits = BitVec::new();\n\n let mut stop_bit = false;\n\n while !stop_bit {\n\n let mut buffer = [0u8; 1];\n\n input.read_exact(&mut buffer[..])?;\n\n let byte = buffer[0];\n\n stop_bit = byte >= STOP_BYTE;\n\n if !stop_bit {\n\n bits.push(byte >> 7 == 1);\n\n }\n\n bits.push((byte >> 6) & 1 == 1);\n\n bits.push((byte >> 5) & 1 == 1);\n\n bits.push((byte >> 4) & 1 == 1);\n\n bits.push((byte >> 4) & 1 == 1);\n\n bits.push((byte >> 3) & 1 == 1);\n\n bits.push((byte >> 2) & 1 == 1);\n\n bits.push((byte >> 1) & 1 == 1);\n\n bits.push((byte >> 0) & 1 == 1);\n\n }\n", "file_path": "crates/fefast/src/codec.rs", "rank": 60, "score": 98200.58026976857 }, { "content": "#[doc(hidden)]\n\npub fn indent_lines<'a>(lines: impl Iterator<Item = &'a str>, prefix: &str) -> String {\n\n lines.fold(String::new(), |mut s, line| {\n\n if line.contains(char::is_whitespace) {\n\n s.push_str(prefix);\n\n }\n\n s.push_str(line);\n\n s.push_str(\"\\n\");\n\n s\n\n })\n\n}\n\n\n", "file_path": "crates/fefix/src/fefix_core/codegen.rs", "rank": 61, "score": 96677.13009433044 }, { "content": "fn _serialize_bitvec(bits: &BitSlice<Msb0, u8>, output: &mut impl io::Write) -> io::Result<usize> {\n\n let significant_data_bits_per_byte = bits.chunks_exact(7);\n\n let mut i = 0;\n\n let remaineder = significant_data_bits_per_byte.remainder().load::<u8>();\n\n for significant_data_bits in significant_data_bits_per_byte {\n\n let byte = significant_data_bits.load::<u8>();\n\n if byte != 0 {\n\n output.write_all(&[byte])?;\n\n i += 1;\n\n }\n\n }\n\n if remaineder != 0 {\n\n output.write_all(&[STOP_BYTE | remaineder])?;\n\n }\n\n Ok(i)\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct PresenceMap {\n\n bits: BitVec,\n", "file_path": "crates/fefast/src/codec.rs", "rank": 62, "score": 94487.59842230569 }, { "content": "#[derive(Debug, Copy, Clone, PartialEq, Eq)]\n\nstruct Header {\n\n /// The length of the message payload, in bytes. This does *not* include the\n\n /// length of the header itself.\n\n pub nominal_message_length_in_bytes: usize,\n\n /// The \"encoding type\" of the message payload.\n\n pub encoding_type: u16,\n\n}\n\n\n\nimpl Header {\n\n const LENGTH_IN_BYTES: usize = 6;\n\n\n\n fn to_bytes(&self) -> [u8; Self::LENGTH_IN_BYTES] {\n\n let mut bytes = [0u8; Self::LENGTH_IN_BYTES];\n\n let (a, b) = bytes.split_at_mut(4);\n\n a.copy_from_slice(&(self.nominal_message_length_in_bytes as u32).to_be_bytes());\n\n b.copy_from_slice(&self.encoding_type.to_be_bytes());\n\n bytes\n\n }\n\n\n\n fn from_bytes(bytes: &[u8]) -> Result<Self, Error> {\n", "file_path": "crates/fesofh/src/lib.rs", "rank": 63, "score": 72519.49365722103 }, { "content": "#[derive(Debug, Clone)]\n\nstruct DecoderState {\n\n group_information: Vec<DecoderGroupState>,\n\n new_group: Option<DecoderStateNewGroup>,\n\n data_field_length: Option<usize>,\n\n}\n\n\n\nimpl DecoderState {\n\n fn current_field_locator(&self, tag: TagU16) -> FieldLocator {\n\n FieldLocator {\n\n tag,\n\n context: match self.group_information.last() {\n\n Some(group_info) => FieldLocatorContext::WithinGroup {\n\n index_of_group_tag: group_info.index_of_group_tag as u32,\n\n entry_index: group_info.current_entry_i as u32,\n\n },\n\n None => FieldLocatorContext::TopLevel,\n\n },\n\n }\n\n }\n\n\n", "file_path": "crates/fefix/src/tagvalue/decoder.rs", "rank": 64, "score": 70772.31277686602 }, { "content": "#[derive(Debug, Clone)]\n\nstruct HeaderInfo {\n\n field_0: Range<usize>,\n\n field_1: Range<usize>,\n\n nominal_body_len: usize,\n\n}\n\n\n\nimpl HeaderInfo {\n\n fn parse(data: &[u8], separator: u8) -> Option<Self> {\n\n let mut info = Self {\n\n field_0: 0..1,\n\n field_1: 0..1,\n\n nominal_body_len: 0,\n\n };\n\n\n\n let mut iterator = data.iter();\n\n let mut find_byte = |byte| iterator.position(|b| *b == byte);\n\n let mut i = 0;\n\n\n\n i += find_byte(b'=')? + 1;\n\n info.field_0.start = i;\n", "file_path": "crates/fefix/src/tagvalue/raw_decoder.rs", "rank": 65, "score": 69951.37048331942 }, { "content": "#[derive(Clone, Debug)]\n\nstruct MessageData {\n\n /// The unique integer identifier of this message type.\n\n component_id: u32,\n\n /// **Primary key**. The unique character identifier of this message\n\n /// type; used literally in FIX messages.\n\n msg_type: String,\n\n /// The name of this message type.\n\n name: String,\n\n /// Identifier of the category to which this message belongs.\n\n category_iid: InternalId,\n\n /// Identifier of the section to which this message belongs.\n\n section_id: String,\n\n layout_items: LayoutItems,\n\n /// The abbreviated name of this message, when used in an XML context.\n\n abbr_name: Option<String>,\n\n /// A boolean used to indicate if the message is to be generated as part\n\n /// of FIXML.\n\n required: bool,\n\n description: String,\n\n elaboration: Option<String>,\n", "file_path": "crates/fefix/src/fefix_core/dict.rs", "rank": 66, "score": 69951.37048331942 }, { "content": "#[derive(Clone, Debug)]\n\nstruct DictionaryData {\n\n version: String,\n\n symbol_table: SymbolTable,\n\n abbreviations: Vec<AbbreviationData>,\n\n data_types: Vec<DatatypeData>,\n\n fields: Vec<FieldData>,\n\n components: Vec<ComponentData>,\n\n messages: Vec<MessageData>,\n\n //layout_items: Vec<LayoutItemData>,\n\n categories: Vec<CategoryData>,\n\n header: Vec<FieldData>,\n\n}\n\n\n\nimpl fmt::Display for Dictionary {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n writeln!(f, \"<fix type='FIX' version='{}'>\", self.inner.version)?;\n\n {\n\n writeln!(f, \" <header>\")?;\n\n let std_header = self.component_by_name(\"StandardHeader\").unwrap();\n\n for item in std_header.items() {\n", "file_path": "crates/fefix/src/fefix_core/dict.rs", "rank": 67, "score": 69951.37048331942 }, { "content": "#[derive(Clone, Debug)]\n\nstruct ComponentData {\n\n /// **Primary key.** The unique integer identifier of this component\n\n /// type.\n\n id: usize,\n\n component_type: FixmlComponentAttributes,\n\n layout_items: Vec<LayoutItemData>,\n\n category_iid: InternalId,\n\n /// The human readable name of the component.\n\n name: String,\n\n /// The name for this component when used in an XML context.\n\n abbr_name: Option<String>,\n\n}\n\n\n\n/// A [`Component`] is an ordered collection of fields and/or other components.\n\n/// There are two kinds of components: (1) common blocks and (2) repeating\n\n/// groups. Common blocks are merely commonly reused sequences of the same\n\n/// fields/components\n\n/// which are given names for simplicity, i.e. they serve as \"macros\". Repeating\n\n/// groups, on the other hand, are components which can appear zero or more times\n\n/// inside FIX messages (or other components, for that matter).\n", "file_path": "crates/fefix/src/fefix_core/dict.rs", "rank": 68, "score": 69951.37048331942 }, { "content": "#[derive(Clone, Debug)]\n\nstruct FieldData {\n\n /// A human readable string representing the name of the field.\n\n name: String,\n\n /// **Primary key.** A positive integer representing the unique\n\n /// identifier for this field type.\n\n tag: u32,\n\n /// The datatype of the field.\n\n data_type_iid: InternalId,\n\n /// The associated data field. If given, this field represents the length of\n\n /// the referenced data field\n\n associated_data_tag: Option<usize>,\n\n value_restrictions: Option<Vec<FieldEnumData>>,\n\n /// Abbreviated form of the Name, typically to specify the element name when\n\n /// the field is used in an XML message. Can be overridden by BaseCategory /\n\n /// BaseCategoryAbbrName.\n\n abbr_name: Option<String>,\n\n /// Specifies the base message category when field is used in an XML message.\n\n base_category_id: Option<usize>,\n\n /// If BaseCategory is specified, this is the XML element identifier to use\n\n /// for this field, overriding AbbrName.\n\n base_category_abbr_name: Option<String>,\n\n /// Indicates whether the field is required in an XML message.\n\n required: bool,\n\n description: Option<String>,\n\n}\n\n\n", "file_path": "crates/fefix/src/fefix_core/dict.rs", "rank": 69, "score": 69951.37048331942 }, { "content": "#[derive(Clone, Debug)]\n\nstruct AbbreviationData {\n\n abbreviation: String,\n\n is_last: bool,\n\n}\n\n\n\n/// An [`Abbreviation`] is a standardized abbreviated form for a specific word,\n\n/// pattern, or name. Abbreviation data is mostly meant for documentation\n\n/// purposes, but in general it can have other uses as well, e.g. FIXML field\n\n/// naming.\n\n#[derive(Debug)]\n\npub struct Abbreviation<'a>(&'a Dictionary, &'a AbbreviationData);\n\n\n\nimpl<'a> Abbreviation<'a> {\n\n /// Returns the full term (non-abbreviated) associated with `self`.\n\n pub fn term(&self) -> &str {\n\n self.1.abbreviation.as_str()\n\n }\n\n}\n\n\n", "file_path": "crates/fefix/src/fefix_core/dict.rs", "rank": 70, "score": 69951.37048331942 }, { "content": "#[derive(Clone, Debug)]\n\nstruct CategoryData {\n\n /// **Primary key**. A string uniquely identifying this category.\n\n name: String,\n\n /// The FIXML file name for a Category.\n\n fixml_filename: String,\n\n}\n\n\n\n/// A [`Category`] is a collection of loosely related FIX messages or components\n\n/// all belonging to the same [`Section`].\n\n#[derive(Clone, Debug)]\n\npub struct Category<'a>(&'a Dictionary, &'a CategoryData);\n\n\n", "file_path": "crates/fefix/src/fefix_core/dict.rs", "rank": 71, "score": 69951.37048331942 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nstruct DecoderGroupState {\n\n first_tag_of_every_group_entry: TagU16,\n\n num_entries: usize,\n\n current_entry_i: usize,\n\n index_of_group_tag: usize,\n\n}\n\n\n", "file_path": "crates/fefix/src/tagvalue/decoder.rs", "rank": 72, "score": 69951.32916596996 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\nstruct DatatypeData {\n\n /// **Primary key.** Identifier of the datatype.\n\n datatype: FixDatatype,\n\n /// Human readable description of this Datatype.\n\n description: String,\n\n /// A string that contains examples values for a datatype\n\n examples: Vec<String>,\n\n // TODO: 'XML'.\n\n}\n\n\n\n/// A FIX data type defined as part of a [`Dictionary`].\n\n#[derive(Debug)]\n\npub struct Datatype<'a>(&'a Dictionary, &'a DatatypeData);\n\n\n\nimpl<'a> Datatype<'a> {\n\n /// Returns the name of `self`. This is also guaranteed to be a valid Rust\n\n /// identifier.\n\n pub fn name(&self) -> &str {\n\n self.1.datatype.name()\n\n }\n", "file_path": "crates/fefix/src/fefix_core/dict.rs", "rank": 73, "score": 69951.28857258605 }, { "content": "struct DictionaryBuilder {\n\n version: String,\n\n symbol_table: FnvHashMap<Key, InternalId>,\n\n abbreviations: Vec<AbbreviationData>,\n\n data_types: Vec<DatatypeData>,\n\n fields: Vec<FieldData>,\n\n components: Vec<ComponentData>,\n\n messages: Vec<MessageData>,\n\n //layout_items: Vec<LayoutItemData>,\n\n categories: Vec<CategoryData>,\n\n header: Vec<FieldData>,\n\n}\n\n\n\nimpl DictionaryBuilder {\n\n pub fn new(version: String) -> Self {\n\n Self {\n\n version,\n\n symbol_table: FnvHashMap::default(),\n\n abbreviations: Vec::new(),\n\n data_types: Vec::new(),\n", "file_path": "crates/fefix/src/fefix_core/dict.rs", "rank": 74, "score": 69946.69576583819 }, { "content": "#[derive(Clone, Debug)]\n\nstruct LayoutItemData {\n\n required: bool,\n\n kind: LayoutItemKindData,\n\n}\n\n\n", "file_path": "crates/fefix/src/fefix_core/dict.rs", "rank": 75, "score": 69162.8076675839 }, { "content": "#[derive(Clone, Debug)]\n\nstruct FieldEnumData {\n\n value: String,\n\n description: String,\n\n}\n\n\n\n/// A limitation imposed on the value of a specific FIX [`Field`]. Also known as\n\n/// \"code set\".\n\n#[derive(Debug)]\n\npub struct FieldEnum<'a>(&'a Dictionary, &'a FieldEnumData);\n\n\n\nimpl<'a> FieldEnum<'a> {\n\n /// Returns the string representation of this field variant.\n\n pub fn value(&self) -> &str {\n\n &self.1.value[..]\n\n }\n\n\n\n /// Returns the documentation description for `self`.\n\n pub fn description(&self) -> &str {\n\n &self.1.description[..]\n\n }\n", "file_path": "crates/fefix/src/fefix_core/dict.rs", "rank": 76, "score": 69162.8076675839 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nstruct DecoderStateNewGroup {\n\n tag: TagU16,\n\n index_of_group_tag: usize,\n\n num_entries: usize,\n\n}\n\n\n", "file_path": "crates/fefix/src/tagvalue/decoder.rs", "rank": 77, "score": 69162.76635023445 }, { "content": " trait EncoderStateAtTopLevel\n\n where\n\n Self: Sized,\n\n {\n\n fn encoder_mut(&mut self) -> &mut Encoder;\n\n\n\n /// Adds a `field` with a `value` to the current message.\n\n fn set<'a, T, F>(mut self, field: &F, value: T) -> Self\n\n where\n\n T: FixValue<'a>,\n\n F: IsFieldDefinition,\n\n {\n\n debug_assert!(field.name().is_ascii());\n\n let encoder = self.encoder_mut();\n\n encoder.buffer.extend_from_slice(br#\"\"\"#);\n\n field.name().as_bytes().serialize(&mut encoder.buffer);\n\n encoder.buffer.extend_from_slice(br#\"\":\"\"#);\n\n value.serialize(&mut encoder.buffer);\n\n encoder.buffer.extend_from_slice(br#\"\"\"#);\n\n self\n", "file_path": "crates/fefix/src/json/encoder.rs", "rank": 78, "score": 68936.79388613254 }, { "content": "#[derive(Deserialize, Serialize, Debug, Clone, Default)]\n\nstruct MessageInternal<'a> {\n\n #[serde(borrow, rename = \"Header\")]\n\n std_header: FieldMap<'a>,\n\n #[serde(borrow, rename = \"Body\")]\n\n body: FieldMap<'a>,\n\n #[serde(borrow, rename = \"Trailer\")]\n\n std_trailer: FieldMap<'a>,\n\n}\n\n\n\nimpl<'a> std::ops::Drop for MessageInternal<'a> {\n\n fn drop(&mut self) {\n\n self.clear();\n\n }\n\n}\n\n\n\nimpl<'a> MessageInternal<'a> {\n\n fn clear(&mut self) {\n\n self.std_header.clear();\n\n self.body.clear();\n\n self.std_trailer.clear();\n", "file_path": "crates/fefix/src/json/decoder.rs", "rank": 79, "score": 68679.34874950496 }, { "content": "#[derive(Debug, Clone)]\n\nstruct MessageBuilder<'a> {\n\n state: DecoderState,\n\n raw: &'a [u8],\n\n fields: HashMap<FieldLocator, (TagU16, &'a [u8], usize)>,\n\n field_locators: Vec<FieldLocator>,\n\n i_first_cell: usize,\n\n i_last_cell: usize,\n\n len_end_header: usize,\n\n len_end_body: usize,\n\n len_end_trailer: usize,\n\n bytes: &'a [u8],\n\n}\n\n\n\nimpl<'a> Default for MessageBuilder<'a> {\n\n fn default() -> Self {\n\n Self {\n\n state: DecoderState {\n\n group_information: Vec::new(),\n\n new_group: None,\n\n data_field_length: None,\n", "file_path": "crates/fefix/src/tagvalue/decoder.rs", "rank": 80, "score": 68674.81232884484 }, { "content": "struct ResponseData<'a> {\n\n pub begin_stringt: &'a [u8],\n\n pub msg_type: &'a [u8],\n\n pub msg_seq_num: u32,\n\n}\n\n\n", "file_path": "crates/fefix/src/session/connection.rs", "rank": 81, "score": 68670.1376113636 }, { "content": "#[derive(Debug, Clone, FromVariant)]\n\n#[darling(attributes(fefix))]\n\nstruct EnumVariantInfo {\n\n ident: syn::Ident,\n\n variant: String,\n\n}\n\n\n", "file_path": "crates/fefix_derive/src/derive_fix_value.rs", "rank": 82, "score": 68404.5847177393 }, { "content": "#[derive(Debug, Clone, FromDeriveInput)]\n\n#[darling(attributes(fefix))]\n\nstruct DataFieldWithVariants {\n\n ident: syn::Ident,\n\n data: darling::ast::Data<EnumVariantInfo, darling::util::Ignored>,\n\n}\n", "file_path": "crates/fefix_derive/src/derive_fix_value.rs", "rank": 83, "score": 68404.54618564606 }, { "content": "fn main() {\n\n let fix_dictionary = Dictionary::fix42();\n\n // Let's create a FIX decoder. This is an expensive operation, and it should\n\n // only be done once at the beginning of your program and/or FIX session.\n\n let mut fix_decoder = Decoder::<Config>::new(fix_dictionary);\n\n // In this case, the FIX message is specified using \"|\" rather than SOH\n\n // (ASCII 0x1) bytes. FerrumFIX supports this.\n\n fix_decoder.config_mut().set_separator(b'|');\n\n let msg = fix_decoder\n\n .decode(FIX_MESSAGE)\n\n .expect(\"Invalid FIX message\");\n\n\n\n // Read the FIX message! You get nice type inference out of the box. You\n\n // have fine-grained control over how to decode each field, even down to raw\n\n // bytes if you want full control.\n\n assert_eq!(msg.fv(fix42::BEGIN_STRING), Ok(b\"FIX.4.2\"));\n\n assert_eq!(msg.fv(fix42::MSG_TYPE), Ok(b\"X\"));\n\n assert_eq!(\n\n msg.fv(fix42::MSG_TYPE),\n\n Ok(fix42::MsgType::MarketDataIncrementalRefresh)\n", "file_path": "examples/00_decode_fix/src/main.rs", "rank": 84, "score": 67036.17579165811 }, { "content": "fn main() {\n\n let dictionary = fefix::Dictionary::fix42();\n\n let mut decoder = fefix::json::Decoder::<fefix::json::Config>::new(dictionary.clone());\n\n let mut encoder = fefix::tagvalue::Encoder::new(fefix::tagvalue::Config::default());\n\n let mut buffer = Vec::new();\n\n\n\n let json_msg = decoder.decode(JSON_FIX_MESSAGE.as_bytes()).unwrap();\n\n let msg_type = json_msg.fv(fix42::MSG_TYPE).unwrap();\n\n let begin_string = json_msg.fv(fix42::BEGIN_STRING).unwrap();\n\n\n\n let mut fix_msg_builder = encoder.start_message(begin_string, &mut buffer, msg_type);\n\n\n\n for (field_name, field_value) in json_msg.iter_fields() {\n\n let field = dictionary\n\n .field_by_name(field_name)\n\n .expect(\"Invalid FIX.4.2 field!\");\n\n\n\n match field_value {\n\n FieldOrGroup::Field(s) => {\n\n fix_msg_builder.set(field.tag(), s.as_ref());\n", "file_path": "examples/70_json_to_tagvalue/src/main.rs", "rank": 85, "score": 67036.17579165811 }, { "content": "fn main() {\n\n let mut decoder = fix_decoder();\n\n decoder.config_mut().set_separator(b'|');\n\n let msg = decoder\n\n .decode(FIX_MESSAGE_EXEC_REPORT)\n\n .expect(\"Invalid FIX message\");\n\n assert_eq!(msg.fv(gdax::BEGIN_STRING), Ok(gdax::BeginString::Fix42));\n\n assert_eq!(msg.fv(gdax::MSG_TYPE), Ok(gdax::MsgType::ExecutionReport));\n\n assert_eq!(msg.fv(gdax::TRADE_ID), Ok(\"123\"));\n\n assert_eq!(msg.fv(gdax::AGGRESSOR_INDICATOR), Ok(true));\n\n}\n\n\n", "file_path": "examples/05_coinbase_codegen/src/main.rs", "rank": 86, "score": 67036.17579165811 }, { "content": "fn main() {\n\n let fix_dictionary = Dictionary::fix42();\n\n // Let's create a FIX decoder. This is an expensive operation, and it should\n\n // only be done once at the beginning of your program and/or FIX session.\n\n let mut fix_decoder = Decoder::<Config>::new(fix_dictionary).buffered();\n\n // In this case, the FIX message is specified using \"|\" rather than SOH\n\n // (ASCII 0x1) bytes. FerrumFIX supports this.\n\n fix_decoder.config_mut().set_separator(b'|');\n\n let mut stream = Cursor::new(fix_stream());\n\n loop {\n\n let mut buffer = fix_decoder.supply_buffer();\n\n // You *must* use `std::io::Read::read_exact`.\n\n stream.read_exact(&mut buffer).unwrap();\n\n if let Ok(Some(())) = fix_decoder.parse() {\n\n let msg = fix_decoder.message();\n\n assert_eq!(msg.fv(fix42::BEGIN_STRING), Ok(\"FIX.4.2\"));\n\n }\n\n }\n\n}\n", "file_path": "examples/03_decode_fix_stream/src/main.rs", "rank": 87, "score": 66189.95977765997 }, { "content": "fn main() {}\n\n\n\n// FIXME\n\n//\n\n//use fefix::prelude::*;\n\n//use fefix::tagvalue::Decoder;\n\n//use slog::{debug, info, o, Logger};\n\n//use std::io;\n\n//use std::net::{Ipv4Addr, SocketAddrV4};\n\n//use std::ops::Range;\n\n//use tokio::net::TcpSocket;\n\n//use tokio_util::compat::{TokioAsyncReadCompatExt, TokioAsyncWriteCompatExt};\n\n//\n\n//const PORT: u16 = 0xF13;\n\n//\n\n//#[tokio::main]\n\n//async fn main() -> io::Result<()> {\n\n// let tcp_socket = TcpSocket::new_v4()?;\n\n// let socket_address = SocketAddrV4::new(Ipv4Addr::LOCALHOST, PORT);\n\n// let tcp_stream = tcp_socket.connect(socket_address.into()).await?;\n", "file_path": "examples/20_tokio_fix_initiator/src/main.rs", "rank": 88, "score": 66189.95977765997 }, { "content": "fn main() {\n\n // TODO\n\n}\n", "file_path": "examples/tls_fixua_acceptor/src/main.rs", "rank": 89, "score": 66189.95977765997 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "tests/compile_full_features/src/main.rs", "rank": 90, "score": 66189.95977765997 }, { "content": "fn main() {\n\n let mut encoder = fix_encoder();\n\n let mut buffer = Vec::new();\n\n let mut msg = encoder.start_message(b\"FIX.4.4\", &mut buffer, b\"ExecutionReport\");\n\n msg.set(fix44::MSG_SEQ_NUM, 215);\n\n msg.set(fix44::SENDER_COMP_ID, \"CLIENT12\");\n\n msg.set(fix44::TARGET_COMP_ID, \"B\");\n\n msg.set(fix44::ACCOUNT, \"Marcel\");\n\n msg.set(fix44::CL_ORD_ID, \"13346\");\n\n msg.set(\n\n fix44::HANDL_INST,\n\n fix44::HandlInst::AutomatedExecutionOrderPrivateNoBrokerIntervention,\n\n );\n\n msg.set(fix44::ORD_TYPE, fix44::OrdType::Limit);\n\n msg.set(fix44::PRICE, dec!(150.08));\n\n msg.set(fix44::PRICE_DELTA, d128!(32.99));\n\n msg.set(fix44::SIDE, fix44::Side::Buy);\n\n msg.set(fix44::TIME_IN_FORCE, fix44::TimeInForce::Day);\n\n}\n\n\n", "file_path": "examples/10_encode_new_order_single/src/main.rs", "rank": 91, "score": 65377.79153709313 }, { "content": "fn gen_field_definition_with_hashsets(\n\n fix_dictionary: dict::Dictionary,\n\n header_tags: &FnvHashSet<TagU16>,\n\n trailer_tags: &FnvHashSet<TagU16>,\n\n field: dict::Field,\n\n) -> String {\n\n let name = field.name().to_shouty_snake_case();\n\n let tag = field.tag().to_string();\n\n let field_location = if header_tags.contains(&field.tag()) {\n\n \"Header\"\n\n } else if trailer_tags.contains(&field.tag()) {\n\n \"Trailer\"\n\n } else {\n\n \"Body\"\n\n };\n\n let doc_link = onixs_link_to_field(fix_dictionary.get_version(), field);\n\n let doc = if let Some(doc_link) = doc_link {\n\n format!(\n\n \"/// Field attributes for [`{} <{}>`]({}).\",\n\n name, tag, doc_link\n", "file_path": "crates/fefix/src/fefix_core/codegen.rs", "rank": 92, "score": 63847.696761586 }, { "content": "fn main() -> io::Result<()> {\n\n println!(\"cargo:rerun-if-changed=src/fefix_core\");\n\n #[cfg(feature = \"fix40\")]\n\n codegen(Dictionary::fix40(), \"fix40.rs\")?;\n\n #[cfg(feature = \"fix41\")]\n\n codegen(Dictionary::fix41(), \"fix41.rs\")?;\n\n #[cfg(feature = \"fix42\")]\n\n codegen(Dictionary::fix42(), \"fix42.rs\")?;\n\n #[cfg(feature = \"fix43\")]\n\n codegen(Dictionary::fix43(), \"fix43.rs\")?;\n\n // FIX 4.4 is always available.\n\n codegen(Dictionary::fix44(), \"fix44.rs\")?;\n\n #[cfg(feature = \"fix50\")]\n\n codegen(Dictionary::fix50(), \"fix50.rs\")?;\n\n #[cfg(feature = \"fix50sp1\")]\n\n codegen(Dictionary::fix50sp1(), \"fix50sp1.rs\")?;\n\n #[cfg(feature = \"fix50sp2\")]\n\n codegen(Dictionary::fix50sp2(), \"fix50sp2.rs\")?;\n\n #[cfg(feature = \"fixt11\")]\n\n codegen(Dictionary::fixt11(), \"fixt11.rs\")?;\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/fefix/build.rs", "rank": 93, "score": 63657.592332050146 }, { "content": "fn project_root() -> PathBuf {\n\n PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"))\n\n}\n", "file_path": "tests/codegen_fix44/build.rs", "rank": 94, "score": 63570.234785582914 }, { "content": "fn project_root() -> PathBuf {\n\n PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"))\n\n}\n", "file_path": "examples/05_coinbase_codegen/build.rs", "rank": 95, "score": 63570.234785582914 }, { "content": "fn gen_enum_variant_of_allowed_value(\n\n allowed_value: dict::FieldEnum,\n\n settings: &Settings,\n\n) -> String {\n\n let mut identifier = allowed_value.description().to_camel_case();\n\n let identifier_needs_prefix = !allowed_value\n\n .description()\n\n .chars()\n\n .next()\n\n .unwrap_or('_')\n\n .is_ascii_alphabetic();\n\n if identifier_needs_prefix {\n\n identifier = format!(\"_{}\", identifier);\n\n }\n\n let value_literal = allowed_value.value();\n\n indent_string(\n\n format!(\n\n indoc!(\n\n r#\"\n\n /// {doc}\n", "file_path": "crates/fefix/src/fefix_core/codegen.rs", "rank": 96, "score": 63126.19420489328 }, { "content": "fn main() -> io::Result<()> {\n\n let path = project_root().join(\"src\").join(\"generated_fix44.rs\");\n\n let mut file = File::create(path)?;\n\n let fix_dictionary = Dictionary::fix44();\n\n let rust_code = {\n\n let settings = fefix::codegen::Settings::default();\n\n fefix::codegen::gen_definitions(fix_dictionary, &settings)\n\n };\n\n file.write_all(rust_code.as_bytes())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/codegen_fix44/build.rs", "rank": 97, "score": 62811.37631805202 }, { "content": "fn main() -> io::Result<()> {\n\n let fix_dictionary = coinbase_fix_dictionary()?;\n\n let rust_code = {\n\n let settings = codegen::Settings::default();\n\n codegen::gen_definitions(fix_dictionary, &settings)\n\n };\n\n let mut file = {\n\n let path = project_root().join(\"src\").join(\"gdax.rs\");\n\n File::create(path)?\n\n };\n\n file.write_all(rust_code.as_bytes())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/05_coinbase_codegen/build.rs", "rank": 98, "score": 62811.37631805202 }, { "content": "fn main() -> io::Result<()> {\n\n let listener = net::TcpListener::bind(\"127.0.0.1:8080\")?;\n\n let reader = listener.accept()?.0;\n\n let decoder = SeqDecoder::default();\n\n let mut frames = decoder.read_frames(reader);\n\n while let Ok(frame) = frames.next() {\n\n if let Some(frame) = frame {\n\n let payload_clone = &frame.payload().to_vec()[..];\n\n let payload_utf8 = String::from_utf8_lossy(payload_clone);\n\n println!(\"Received message '{}'\", payload_utf8);\n\n } else {\n\n break;\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "examples/30_tcp_sofh/src/main.rs", "rank": 99, "score": 61999.20807748517 } ]
Rust
ivy-graphics/build.rs
ten3roberts/ivy
fb5a7645c9f699c2aebf3d1b90c1d1f9e78355fa
use anyhow::{Context, Result}; use shaderc::ShaderKind; use std::{ env, error::Error, ffi::OsString, fs, path::{Path, PathBuf}, slice, }; #[derive(Debug)] struct CompilationFailure(PathBuf); impl Error for CompilationFailure {} impl std::fmt::Display for CompilationFailure { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "Failed to compile resource: {}", self.0.display()) } } fn rerun_if_changed<P: AsRef<Path>>(path: P) { let path = path.as_ref(); println!( "cargo:rerun-if-changed={}", path.canonicalize().unwrap().display() ); } fn compile_dir<A, B, F, C>(src: A, dst: B, rename_func: F, compile_func: C) -> Result<()> where A: AsRef<Path>, B: AsRef<Path>, F: Fn(&mut OsString), C: Fn(&Path, &Path) -> Result<()>, { let src = src.as_ref(); let dst = dst.as_ref(); walkdir::WalkDir::new(src) .follow_links(true) .into_iter() .flat_map(Result::ok) .map(|entry| -> Result<Option<_>> { let path = entry.path(); rerun_if_changed(path); let metadata = entry.metadata()?; if metadata.is_dir() { return Ok(None); } let mut fname = entry.file_name().to_os_string(); rename_func(&mut fname); let base = path .strip_prefix(src)? .parent() .context("No parent for path")?; let mut dst_path = PathBuf::new(); dst_path.push(dst); dst_path.push(base); fs::create_dir_all(&dst_path)?; dst_path.push(fname); let dst_metadata = dst_path.metadata().ok(); if let Some(dst_metadata) = dst_metadata { if dst_metadata.modified()? > metadata.modified()? { return Ok(None); } } eprintln!("{:?} => {:?}", path, dst_path); compile_func(path, &dst_path) .with_context(|| format!("Failed to compile {:?}", path))?; Ok(Some(())) }) .flat_map(|val| val.transpose()) .collect() } fn glslc(src: &Path, dst: &Path) -> Result<()> { let mut compiler = shaderc::Compiler::new().unwrap(); let mut options = shaderc::CompileOptions::new().unwrap(); let source = fs::read_to_string(src)?; let ext = src.extension().unwrap_or_default(); let kind = match ext.to_string_lossy().as_ref() { "vert" => ShaderKind::Vertex, "frag" => ShaderKind::Fragment, "geom" => ShaderKind::Geometry, "comp" => ShaderKind::Compute, _ => ShaderKind::InferFromSource, }; options.add_macro_definition("EP", Some("main")); let binary_result = compiler.compile_into_spirv( &source, kind, &src.to_string_lossy(), "main", Some(&options), )?; assert_eq!(Some(&0x07230203), binary_result.as_binary().first()); let bin = binary_result.as_binary(); let data = bin.as_ptr() as *const u8; let bin = unsafe { slice::from_raw_parts(data, bin.len() * 4) }; fs::write(dst, bin)?; Ok(()) } fn main() -> Result<()> { let out_dir = env::var("OUT_DIR")?; let mut dst = PathBuf::new(); dst.push(out_dir); dst.push("shaders"); compile_dir( "./shaders/", &dst, |path| path.push(".spv"), |src, dst| glslc(src, dst), )?; Ok(()) }
use anyhow::{Context, Result}; use shaderc::ShaderKind; use std::{ env, error::Error, ffi::OsString, fs, path::{Path, PathBuf}, slice, }; #[derive(Debug)] struct CompilationFailure(PathBuf); impl Error for CompilationFailure {} impl std::fmt::Display for CompilationFailure { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "Failed to compile resource: {}", self.0.display()) } } fn rerun_if_changed<P: AsRef<Path>>(path: P) { let path = path.as_ref(); println!( "cargo:rerun-if-changed={}", path.canonicalize().unwrap().display() ); } fn compile_dir<A, B, F, C>(src: A, dst: B, rename_func: F, compile_func: C) -> Result<()> where A: AsRef<Path>, B: AsRef<Path>, F: Fn(&mut OsString), C: Fn(&Path, &Path) -> Result<()>, { let src = src.as_ref(); let dst = dst.as_ref(); walkdir::WalkDir::new(src) .follow_links(true) .into_iter() .flat_map(Result::ok) .map(|entry| -> Result<Option<_>> { let path = entry.path(); rerun_if_changed(path); let metadata = entry.metadata()?; if metadata.is_dir() { return Ok(None); } let mut fname = entry.file_name().to_os_string(); rename_func(&mut fname); let base = path .strip_prefix(src)? .parent() .context("No parent for path")?; let mut dst_path = PathBuf::new(); dst_path.push(dst); dst_path.push(base); fs::create_dir_all(&dst_path)?; dst_path.push(fname); let dst_metadata = dst_path.metadata().ok(); if let Some(dst_metadata) = dst_metadata { if dst_metadata.modified()? > metadata.modified()? { return Ok(None); } } eprintln!("{:?} => {:?}", path, dst_path); compile_func(path, &dst_path) .with_context(|| format!("Failed to compile {:?}", path))?; Ok(Some(())) }) .flat_map(|val| val.transpose()) .collect() } fn glslc(src: &Path, dst: &Path) -> Result<()> { let mut compiler = shaderc::Compiler::new().unwrap(); let mut options = shaderc::CompileOptions::new().unwrap(); let source = fs::read_to_string(src)?; let ext = src.extension().unwrap_or_default(); let kind = match ext.to_string_lossy().as_ref() { "vert" => ShaderKind::Vertex, "frag" => ShaderKind::Fragment, "geom" => ShaderKind::Geometry, "comp" => ShaderKind::Compute, _ => ShaderKind::InferFromSource, }; options.add_macro_definition("EP", Some("main")); let binary_result = compiler.compile_into_spirv( &source, kind, &src.to_str
; let bin = binary_result.as_binary(); let data = bin.as_ptr() as *const u8; let bin = unsafe { slice::from_raw_parts(data, bin.len() * 4) }; fs::write(dst, bin)?; Ok(()) } fn main() -> Result<()> { let out_dir = env::var("OUT_DIR")?; let mut dst = PathBuf::new(); dst.push(out_dir); dst.push("shaders"); compile_dir( "./shaders/", &dst, |path| path.push(".spv"), |src, dst| glslc(src, dst), )?; Ok(()) }
ing_lossy(), "main", Some(&options), )?; assert_eq!(Some(&0x07230203), binary_result.as_binary().first())
random
[ { "content": "fn read_to_end<P>(path: P) -> Result<Vec<u8>>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let file = fs::File::open(path.as_ref()).map_err(Error::Io)?;\n\n // Allocate one extra byte so the buffer doesn't need to grow before the\n\n // final `read` call at the end of the file. Don't worry about `usize`\n\n // overflow because reading will fail regardless in that case.\n\n let length = file.metadata().map(|x| x.len() + 1).unwrap_or(0);\n\n let mut reader = io::BufReader::new(file);\n\n let mut data = Vec::with_capacity(length as usize);\n\n reader.read_to_end(&mut data).map_err(Error::Io)?;\n\n Ok(data)\n\n}\n", "file_path": "ivy-graphics/src/document/scheme.rs", "rank": 2, "score": 306350.41336657805 }, { "content": "/// Recursively draw the connection tree using gizmos\n\npub fn draw_connections(world: &impl GenericWorld, gizmos: &mut Gizmos) -> Result<()> {\n\n world\n\n .roots::<Connection>()?\n\n .into_iter()\n\n .try_for_each(|root| draw_subtree(world, root.0, gizmos))\n\n}\n\n\n", "file_path": "ivy-physics/src/connections/systems.rs", "rank": 3, "score": 265705.59674573527 }, { "content": "fn draw_subtree(world: &impl GenericWorld, root: Entity, gizmos: &mut Gizmos) -> Result<()> {\n\n let parent_pos = world.try_get::<Position>(root)?;\n\n\n\n world\n\n .children::<Connection>(root)\n\n .try_for_each(|child| -> Result<()> {\n\n let mut query = world.try_query_one::<(&Position, &ConnectionKind)>(child)?;\n\n let (pos, kind) = query\n\n .get()\n\n .expect(\"Failed to execute query in draw_connections\");\n\n\n\n let color = match kind {\n\n ConnectionKind::Rigid => Color::green(),\n\n ConnectionKind::Spring {\n\n strength: _,\n\n dampening: _,\n\n } => Color::red(),\n\n };\n\n\n\n gizmos.draw(ivy_base::Gizmo::Line {\n", "file_path": "ivy-physics/src/connections/systems.rs", "rank": 4, "score": 258632.00851810916 }, { "content": "fn setup_graphics(world: &mut World, resources: &Resources) -> anyhow::Result<Assets> {\n\n let pbr = presets::PBRRendering::setup(\n\n world,\n\n resources,\n\n PBRInfo {\n\n max_lights: 5,\n\n env_data: DefaultEnvData {\n\n ambient_radiance: Vec3::ONE * 0.01,\n\n fog_density: 0.05,\n\n fog_color: Vec3::new(0.0, 0.0, 0.0),\n\n fog_gradient: 2.0,\n\n },\n\n },\n\n FRAMES_IN_FLIGHT,\n\n )?;\n\n\n\n pbr.setup_pipelines(resources, presets::PipelinesInfo::default())?;\n\n\n\n Ok(Assets {\n\n geometry_pass: resources.default()?,\n\n text_pass: resources.default()?,\n\n ui_pass: resources.default()?,\n\n })\n\n}\n\n\n", "file_path": "examples/vulkan/main.rs", "rank": 6, "score": 239631.9058250912 }, { "content": "fn setup_ui(world: &mut World, resources: &Resources, assets: &Assets) -> anyhow::Result<()> {\n\n let canvas = world\n\n .query::<&Canvas>()\n\n .iter()\n\n .next()\n\n .ok_or(anyhow!(\"Missing canvas\"))?\n\n .0;\n\n\n\n let heart: Handle<Image> = resources.load(ImageInfo {\n\n texture: \"./res/textures/heart.png\".into(),\n\n sampler: SamplerInfo::pixelated(),\n\n })??;\n\n\n\n let input_field: Handle<Image> = resources.load(ImageInfo {\n\n texture: \"./res/textures/field.png\".into(),\n\n sampler: SamplerInfo::pixelated(),\n\n })??;\n\n\n\n let font: Handle<Font> = resources.load(FontInfo {\n\n size: 48.0,\n", "file_path": "examples/vulkan/main.rs", "rank": 7, "score": 233874.33574166326 }, { "content": "fn update_subtree(world: &impl GenericWorld, root: Entity) -> Result<()> {\n\n let mut query = world.try_query_one::<(TransformQuery, Option<RbQuery>)>(root)?;\n\n\n\n if let Ok((parent_trans, rb)) = query.get() {\n\n let parent_trans = parent_trans.into_owned();\n\n let mut parent_rb = rb.map(|val| RbBundle {\n\n vel: *val.vel,\n\n mass: *val.mass,\n\n ang_mass: *val.ang_mass,\n\n ang_vel: *val.ang_vel,\n\n resitution: *val.resitution,\n\n effector: Effector::new(),\n\n });\n\n\n\n drop(query);\n\n\n\n world\n\n .children::<Connection>(root)\n\n .try_for_each(|child| -> Result<_> {\n\n let mut fixed = world\n", "file_path": "ivy-physics/src/connections/systems.rs", "rank": 8, "score": 216919.78891827876 }, { "content": "/// Returns the first widget that intersects the postiion\n\nfn intersect_widget(world: &impl GenericWorld, point: Position2D) -> Option<Entity> {\n\n world\n\n .try_query::<(&Position2D, &Size2D, &WidgetDepth, &Visible)>()\n\n .unwrap()\n\n .with::<Interactive>()\n\n .iter()\n\n .filter_map(|(e, (pos, size, depth, visible))| {\n\n if visible.is_visible() && box_intersection(*pos, *size, *point) {\n\n Some((e, depth))\n\n } else {\n\n None\n\n }\n\n })\n\n .max_by_key(|(_, depth)| *depth)\n\n .map(|(a, _)| a)\n\n}\n\n\n", "file_path": "ivy-ui/src/systems.rs", "rank": 9, "score": 211078.31628235395 }, { "content": "// Set directory to nth parent of current executable\n\npub fn normalize_dir(nth: usize) -> anyhow::Result<()> {\n\n let current_exe = env::current_exe()?\n\n .canonicalize()\n\n .context(\"Failed to canonicalize current exe\")?;\n\n\n\n let dir = (0..nth + 1)\n\n .fold(Some(current_exe.as_path()), |acc, _| {\n\n acc.and_then(|val| val.parent())\n\n })\n\n .context(\"Failed to get parent dir of executable\")?;\n\n\n\n env::set_current_dir(dir).context(\"Failed to set current directory\")?;\n\n\n\n Ok(())\n\n}\n", "file_path": "ivy-base/src/dir.rs", "rank": 11, "score": 203453.44332357473 }, { "content": "/// Returns the root of the rigid system, along with its mass\n\npub fn get_rigid_root(world: &impl GenericWorld, child: Entity) -> Result<(Entity, Mass)> {\n\n let mut system_mass = match world.try_get::<Mass>(child) {\n\n Ok(mass) => *mass,\n\n Err(_) => {\n\n panic!(\"No mass in leaf\");\n\n }\n\n };\n\n\n\n let mut root = child;\n\n\n\n for val in world.ancestors::<Connection>(child) {\n\n root = val;\n\n system_mass += match world.try_get::<Mass>(val) {\n\n Ok(mass) => *mass,\n\n Err(_) => break,\n\n };\n\n\n\n match *world.try_get::<ConnectionKind>(child)? {\n\n ConnectionKind::Rigid => {}\n\n ConnectionKind::Spring {\n\n strength: _,\n\n dampening: _,\n\n } => break,\n\n };\n\n }\n\n\n\n Ok((root, system_mass))\n\n}\n\n\n", "file_path": "ivy-physics/src/systems.rs", "rank": 12, "score": 199475.20139891744 }, { "content": "/// Returns a vector of missing layers\n\nfn get_missing_layers(entry: &Entry, layers: &[CString]) -> Result<Vec<CString>> {\n\n let available = entry.enumerate_instance_layer_properties()?;\n\n\n\n Ok(layers\n\n .iter()\n\n .filter(|ext| {\n\n available\n\n .iter()\n\n .all(|avail| unsafe { CStr::from_ptr(avail.layer_name.as_ptr()) == ext.as_c_str() })\n\n })\n\n .cloned()\n\n .collect())\n\n}\n", "file_path": "ivy-vulkan/src/instance.rs", "rank": 13, "score": 197836.84815531422 }, { "content": "/// Returns a vector of missing extensions\n\nfn get_missing_extensions(entry: &Entry, extensions: &[CString]) -> Result<Vec<CString>> {\n\n let available = entry.enumerate_instance_extension_properties()?;\n\n\n\n Ok(extensions\n\n .iter()\n\n .filter(|ext| {\n\n available.iter().all(|avail| unsafe {\n\n CStr::from_ptr(avail.extension_name.as_ptr()) == ext.as_c_str()\n\n })\n\n })\n\n .cloned()\n\n .collect())\n\n}\n\n\n", "file_path": "ivy-vulkan/src/instance.rs", "rank": 14, "score": 197836.84815531422 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Setup logging\n\n Logger {\n\n show_location: false,\n\n max_level: LevelFilter::Debug,\n\n }\n\n .install();\n\n\n\n let mut app = App::builder().push_layer(SandboxLayer::new).build();\n\n\n\n app.run()\n\n}\n\n\n", "file_path": "examples/sandbox/src/main.rs", "rank": 15, "score": 180367.08446715915 }, { "content": "pub fn epa<F: Fn(Vec3) -> SupportPoint>(support_func: F, simplex: Simplex) -> Contact {\n\n assert_eq!(simplex.points().len(), 4);\n\n let mut polytype = Polytype::new(\n\n simplex.points(),\n\n &[0, 1, 2, 0, 3, 1, 0, 2, 3, 1, 3, 2],\n\n Face::new,\n\n );\n\n\n\n let mut iterations = 0;\n\n loop {\n\n let (_, min) = match polytype.find_closest_face() {\n\n Some(val) => val,\n\n None => {\n\n // eprintln!(\"The two shapes are the same\");\n\n let p = support_func(Vec3::X);\n\n return Contact {\n\n points: ContactPoints::double(p.a, p.b),\n\n depth: p.support.length(),\n\n normal: p.support.normalize(),\n\n };\n", "file_path": "ivy-collision/src/epa/epa3d.rs", "rank": 16, "score": 179575.42812336076 }, { "content": "pub fn create(entry: &Entry, instance: &Instance) -> Result<(DebugUtils, DebugUtilsMessengerEXT)> {\n\n let debug_utils = DebugUtils::new(entry, instance);\n\n\n\n let create_info = vk::DebugUtilsMessengerCreateInfoEXT::builder()\n\n .message_severity(\n\n vk::DebugUtilsMessageSeverityFlagsEXT::ERROR\n\n | vk::DebugUtilsMessageSeverityFlagsEXT::INFO\n\n | vk::DebugUtilsMessageSeverityFlagsEXT::WARNING\n\n | vk::DebugUtilsMessageSeverityFlagsEXT::VERBOSE,\n\n )\n\n .message_type(\n\n vk::DebugUtilsMessageTypeFlagsEXT::GENERAL\n\n | vk::DebugUtilsMessageTypeFlagsEXT::VALIDATION\n\n | vk::DebugUtilsMessageTypeFlagsEXT::PERFORMANCE,\n\n )\n\n .pfn_user_callback(Some(debug_callback));\n\n\n\n let messenger = unsafe { debug_utils.create_debug_utils_messenger(&create_info, None)? };\n\n Ok((debug_utils, messenger))\n\n}\n\n\n", "file_path": "ivy-vulkan/src/debug_utils.rs", "rank": 17, "score": 179223.70513534747 }, { "content": "pub fn write<B>(\n\n device: &Device,\n\n descriptor_set: vk::DescriptorSet,\n\n buffer: B,\n\n texture: &Texture,\n\n sampler: &Sampler,\n\n) where\n\n B: AsRef<vk::Buffer>,\n\n{\n\n let buffer_info = vk::DescriptorBufferInfo {\n\n buffer: *buffer.as_ref(),\n\n offset: 0,\n\n range: vk::WHOLE_SIZE,\n\n };\n\n\n\n let image_info = vk::DescriptorImageInfo {\n\n sampler: sampler.sampler(),\n\n image_view: texture.image_view(),\n\n image_layout: vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL,\n\n };\n", "file_path": "ivy-vulkan/src/descriptors/mod.rs", "rank": 18, "score": 178595.39423116297 }, { "content": "pub fn epa_ray<F: Fn(Vec3) -> SupportPoint>(\n\n support_func: F,\n\n simplex: Simplex,\n\n ray: &Ray,\n\n) -> Contact {\n\n let mut polytype =\n\n Polytype::from_simplex(&simplex, |a, b| Face::new_ray(a, b, ray, Vec3::ZERO));\n\n\n\n let mut iterations = 0;\n\n loop {\n\n // Find the face closest to the ray\n\n let (_index, max_face) = match polytype.find_furthest_face() {\n\n Some(val) => val,\n\n None => {\n\n unreachable!(\"No intersecting faces\");\n\n }\n\n };\n\n\n\n // Search in the normal of the face pointing against the ray\n\n\n", "file_path": "ivy-collision/src/epa/epa_ray.rs", "rank": 19, "score": 176079.09121762819 }, { "content": "pub fn create() -> Result<Entry> {\n\n unsafe { Entry::load().map_err(|_| Error::LibLoading) }\n\n}\n", "file_path": "ivy-vulkan/src/entry.rs", "rank": 20, "score": 173874.3356882327 }, { "content": "/// Updates all UI trees and applies constraints.\n\n/// Also updates canvas cameras.\n\npub fn update(world: &World) -> Result<()> {\n\n world.roots::<Widget>()?.iter().try_for_each(|(root, _)| {\n\n apply_constraints(\n\n world,\n\n root,\n\n Position2D::default(),\n\n Size2D::new(1.0, 1.0),\n\n true,\n\n )?;\n\n\n\n if world.get::<Canvas>(root).is_ok() {\n\n update_canvas(world, root)?;\n\n }\n\n\n\n update_from(world, root, 1)\n\n })\n\n}\n\n\n\npub(crate) fn update_from(world: &impl GenericWorld, parent: Entity, depth: u32) -> Result<()> {\n\n let mut query =\n", "file_path": "ivy-ui/src/systems.rs", "rank": 21, "score": 168026.8843241603 }, { "content": "pub fn wait_idle(device: &Device) -> Result<()> {\n\n // log::debug!(\"Device wait idle\");\n\n unsafe { device.device_wait_idle()? }\n\n Ok(())\n\n}\n\n\n", "file_path": "ivy-vulkan/src/device.rs", "rank": 22, "score": 165906.59526853036 }, { "content": "/// Performs a gjk intersection test.\n\n/// Returns true if the shapes intersect.\n\npub fn gjk<A: CollisionPrimitive, B: CollisionPrimitive>(\n\n a_transform: &Mat4,\n\n b_transform: &Mat4,\n\n a_transform_inv: &Mat4,\n\n b_transform_inv: &Mat4,\n\n a_coll: &A,\n\n b_coll: &B,\n\n) -> (bool, Simplex) {\n\n // Get first support function in direction of separation\n\n // let dir = (a_pos - b_pos).normalized();\n\n let dir = Vec3::X;\n\n let a = minkowski_diff(\n\n a_transform,\n\n b_transform,\n\n a_transform_inv,\n\n b_transform_inv,\n\n a_coll,\n\n b_coll,\n\n dir,\n\n );\n", "file_path": "ivy-collision/src/gjk.rs", "rank": 23, "score": 161105.61126664357 }, { "content": "pub fn intersect<A: CollisionPrimitive, B: CollisionPrimitive>(\n\n a_transform: &Mat4,\n\n b_transform: &Mat4,\n\n a: &A,\n\n b: &B,\n\n) -> Option<Contact> {\n\n let a_transform_inv = a_transform.inverse();\n\n let b_transform_inv = b_transform.inverse();\n\n\n\n let (intersect, simplex) = gjk(\n\n a_transform,\n\n b_transform,\n\n &a_transform_inv,\n\n &b_transform_inv,\n\n a,\n\n b,\n\n );\n\n\n\n if intersect {\n\n Some(epa(\n", "file_path": "ivy-collision/src/collision.rs", "rank": 24, "score": 161100.50330231444 }, { "content": "pub fn init() -> Result<Arc<RwLock<Glfw>>> {\n\n Ok(Arc::new(RwLock::new(glfw::init(glfw::FAIL_ON_ERRORS)?)))\n\n}\n\n\n\nimpl Window {\n\n pub fn new(\n\n glfw: Arc<RwLock<Glfw>>,\n\n info: WindowInfo,\n\n ) -> Result<(Window, Receiver<(f64, WindowEvent)>)> {\n\n let mut glfw_mut = glfw.write();\n\n glfw_mut.window_hint(WindowHint::ClientApi(ClientApiHint::NoApi));\n\n\n\n glfw_mut.window_hint(WindowHint::Resizable(info.resizable));\n\n\n\n let (mut window, events) = match info.mode {\n\n WindowMode::Windowed(extent) => glfw_mut\n\n .create_window(\n\n extent.width,\n\n extent.height,\n\n info.title.as_ref(),\n", "file_path": "ivy-window/src/lib.rs", "rank": 25, "score": 160670.22337010817 }, { "content": "#[inline]\n\npub fn minkowski_diff<A: CollisionPrimitive, B: CollisionPrimitive>(\n\n a_transform: &Mat4,\n\n b_transform: &Mat4,\n\n a_transform_inv: &Mat4,\n\n b_transform_inv: &Mat4,\n\n a_coll: &A,\n\n b_coll: &B,\n\n dir: Vec3,\n\n) -> SupportPoint {\n\n let a = support(a_transform, a_transform_inv, a_coll, dir);\n\n let b = support(b_transform, b_transform_inv, b_coll, -dir);\n\n\n\n SupportPoint {\n\n support: *a - *b,\n\n a,\n\n b,\n\n }\n\n}\n\n\n", "file_path": "ivy-collision/src/util.rs", "rank": 27, "score": 159125.59291156137 }, { "content": "pub fn reset(device: &Device, fences: &[Fence]) -> Result<()> {\n\n unsafe { device.reset_fences(fences)? }\n\n Ok(())\n\n}\n\n\n", "file_path": "ivy-vulkan/src/fence.rs", "rank": 28, "score": 157854.00364901917 }, { "content": "pub fn create(device: &Device) -> Result<vk::Semaphore> {\n\n let create_info = vk::SemaphoreCreateInfo {\n\n s_type: vk::StructureType::SEMAPHORE_CREATE_INFO,\n\n p_next: std::ptr::null(),\n\n flags: vk::SemaphoreCreateFlags::default(),\n\n };\n\n\n\n let semaphore = unsafe { device.create_semaphore(&create_info, None)? };\n\n Ok(semaphore)\n\n}\n\n\n", "file_path": "ivy-vulkan/src/semaphore.rs", "rank": 29, "score": 157854.00364901917 }, { "content": "pub fn wrap_around_system(world: SubWorld<&mut Position>) {\n\n world.native_query().iter().for_each(|(_, pos)| {\n\n if pos.y < -100.0 {\n\n pos.y = 100.0\n\n }\n\n });\n\n}\n\n\n", "file_path": "ivy-physics/src/systems.rs", "rank": 30, "score": 157111.1950155741 }, { "content": "/// Updates the canvas view and projection\n\npub fn update_canvas(world: &World, canvas: Entity) -> Result<()> {\n\n let mut camera_query = world.try_query_one::<(&mut Camera, &Size2D, &Position2D)>(canvas)?;\n\n\n\n let (camera, size, position) = camera_query.get()?;\n\n\n\n camera.set_orthographic(size.x * 2.0, size.y * 2.0, 0.0, 100.0);\n\n camera.set_view(Mat4::from_translation(-position.extend(0.0)));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "ivy-ui/src/systems.rs", "rank": 31, "score": 155880.78001634617 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Install a default logger\n\n Logger::default().install();\n\n\n\n // Create a simple app\n\n let result = App::builder()\n\n .push_layer(|_, _, _| GameLayer::new())\n\n .build()\n\n .run();\n\n\n\n // Pretty print results\n\n match &result {\n\n Ok(()) => {}\n\n Err(val) => error!(\"Error: {}\", val),\n\n }\n\n\n\n result\n\n}\n", "file_path": "examples/layer.rs", "rank": 32, "score": 155574.91730506532 }, { "content": "fn main() -> anyhow::Result<()> {\n\n let window = WindowLayerInfo {\n\n swapchain: SwapchainInfo {\n\n present_mode: PresentModeKHR::FIFO,\n\n ..Default::default()\n\n },\n\n ..Default::default()\n\n };\n\n\n\n App::builder()\n\n .try_push_layer(|_, r, _| WindowLayer::new(r, window))?\n\n .try_push_layer(GameLayer::new)?\n\n .try_push_layer(|w, r, e| GraphicsLayer::new(w, r, e, FRAMES_IN_FLIGHT))?\n\n .try_push_layer(|w, r, e| {\n\n PhysicsLayer::new(\n\n w,\n\n r,\n\n e,\n\n physics::PhysicsLayerInfo {\n\n gravity: Vec3::ZERO.into(),\n\n tree_root: (), // Disable collisions\n\n debug: false,\n\n },\n\n )\n\n })?\n\n .build()\n\n .run()\n\n}\n", "file_path": "examples/rendergraph.rs", "rank": 33, "score": 155574.91730506532 }, { "content": "struct Subscriber<T> {\n\n sender: Box<dyn EventSender<T> + Send>,\n\n filter: fn(&T) -> bool,\n\n}\n\n\n\nimpl<T: Event> Subscriber<T> {\n\n pub fn new<S>(sender: S, filter: fn(&T) -> bool) -> Self\n\n where\n\n S: 'static + EventSender<T> + Send,\n\n {\n\n Self {\n\n sender: Box::new(sender),\n\n filter,\n\n }\n\n }\n\n pub fn send(&self, event: T) -> bool {\n\n self.sender.send(event)\n\n }\n\n}\n\n\n", "file_path": "ivy-base/src/events/dispatcher.rs", "rank": 34, "score": 153737.63810418182 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Open a simple window for input events\n\n let window = WindowLayerInfo {\n\n window: WindowInfo {\n\n title: \"Layer\".into(),\n\n ..Default::default()\n\n },\n\n swapchain: SwapchainInfo::default(),\n\n };\n\n\n\n Logger::default().install();\n\n\n\n let result = App::builder()\n\n .try_push_layer(|_, r, _| WindowLayer::new(r, window))?\n\n .push_layer(GameLayer::new)\n\n .build()\n\n .run();\n\n\n\n // Pretty pritn result\n\n match &result {\n\n Ok(()) => {}\n\n Err(val) => error!(\"Error: {}\", val),\n\n }\n\n\n\n result\n\n}\n", "file_path": "examples/layers_and_events.rs", "rank": 35, "score": 153685.81445336965 }, { "content": "fn main() -> anyhow::Result<()> {\n\n Logger {\n\n show_location: true,\n\n max_level: LevelFilter::Debug,\n\n }\n\n .install();\n\n\n\n // Go up three levels\n\n ivy_base::normalize_dir(3)?;\n\n\n\n let window = WindowInfo {\n\n resizable: false,\n\n mode: WindowMode::Fullscreen,\n\n ..Default::default()\n\n };\n\n\n\n let swapchain = SwapchainInfo {\n\n present_mode: PresentModeKHR::IMMEDIATE,\n\n image_count: FRAMES_IN_FLIGHT as u32 + 1,\n\n ..Default::default()\n", "file_path": "examples/vulkan/main.rs", "rank": 36, "score": 153685.81445336965 }, { "content": "// Generates a random scalar between -1 and 1\n\npub fn one<R: Rng>(rng: &mut R) -> f32 {\n\n rng.gen_range(-1.0..=1.0)\n\n}\n", "file_path": "ivy-random/src/scalar.rs", "rank": 37, "score": 153670.07667708118 }, { "content": "// Generates a random scalar between 0 and 1\n\npub fn normalized<R: Rng>(rng: &mut R) -> f32 {\n\n rng.gen_range(0.0..=1.0)\n\n}\n\n\n", "file_path": "ivy-random/src/scalar.rs", "rank": 38, "score": 153670.07667708118 }, { "content": "pub fn create(device: &Device, signaled: bool) -> Result<Fence> {\n\n let create_info = vk::FenceCreateInfo {\n\n s_type: vk::StructureType::FENCE_CREATE_INFO,\n\n p_next: std::ptr::null(),\n\n flags: if signaled {\n\n vk::FenceCreateFlags::SIGNALED\n\n } else {\n\n vk::FenceCreateFlags::default()\n\n },\n\n };\n\n\n\n let fence = unsafe { device.create_fence(&create_info, None)? };\n\n Ok(fence)\n\n}\n\n\n", "file_path": "ivy-vulkan/src/fence.rs", "rank": 39, "score": 153377.3914249895 }, { "content": "/// Returns an optional intersection between a triangle and a ray\n\npub fn triangle_ray(points: &[Vec3], ray: &Ray) -> Option<Vec3> {\n\n let [a, b, c] = [points[0], points[1], points[2]];\n\n\n\n let ab = b - a;\n\n let ac = c - a;\n\n let a0 = -a;\n\n\n\n let ab = project_plane(ab, ray.dir());\n\n let ac = project_plane(ac, ray.dir());\n\n let a0 = project_plane(a0, ray.dir());\n\n\n\n let perp = triple_prod(ac, ab, ab);\n\n\n\n if perp.dot(a0) > 0.0 {\n\n return None;\n\n }\n\n let perp = triple_prod(ab, ac, ac);\n\n\n\n if perp.dot(a0) > 0.0 {\n\n return None;\n\n }\n\n\n\n let normal = (b - a).cross(c - a).normalize();\n\n Some(plane_ray(a, normal, ray))\n\n}\n\n\n", "file_path": "ivy-collision/src/util.rs", "rank": 40, "score": 151916.16909963408 }, { "content": "/// Returns an optional intersection between a triangle and a ray\n\n/// Assumes the points are relative to the ray origin\n\npub fn triangle_intersect(points: &[Vec3], dir: Vec3) -> Option<Vec3> {\n\n let [a, b, c] = [points[0], points[1], points[2]];\n\n\n\n let ab = b - a;\n\n let ac = c - a;\n\n let a0 = -a;\n\n\n\n let ab = project_plane(ab, dir);\n\n let ac = project_plane(ac, dir);\n\n let a0 = project_plane(a0, dir);\n\n\n\n let perp = triple_prod(ac, ab, ab);\n\n\n\n if perp.dot(a0) > 0.0 {\n\n return None;\n\n }\n\n let perp = triple_prod(ab, ac, ac);\n\n\n\n if perp.dot(a0) > 0.0 {\n\n return None;\n\n }\n\n\n\n let normal = (b - a).cross(c - a).normalize();\n\n Some(plane_intersect(a, normal, dir))\n\n}\n\n\n", "file_path": "ivy-collision/src/util.rs", "rank": 41, "score": 151915.73312822648 }, { "content": "pub fn queue_wait_idle(device: &Device, queue: vk::Queue) -> Result<()> {\n\n // log::debug!(\"Queue wait idle\");\n\n unsafe { device.queue_wait_idle(queue)? }\n\n Ok(())\n\n}\n\n\n", "file_path": "ivy-vulkan/src/device.rs", "rank": 42, "score": 149631.08247347013 }, { "content": "pub fn wait(device: &Device, fences: &[Fence], wait_all: bool) -> Result<()> {\n\n unsafe { device.wait_for_fences(fences, wait_all, std::u64::MAX)? }\n\n Ok(())\n\n}\n\n\n", "file_path": "ivy-vulkan/src/fence.rs", "rank": 43, "score": 149232.08814052134 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq)]\n\nstruct ObjectBufferMarker<K> {\n\n /// Index into the object buffer\n\n id: ObjectId,\n\n marker: PhantomData<K>,\n\n}\n\n\n\n/// Marker is send + sync\n\nunsafe impl<K> Send for ObjectBufferMarker<K> {}\n\nunsafe impl<K> Sync for ObjectBufferMarker<K> {}\n\n\n\n/// Marks the entity as already being batched for this shaderpasss with the batch index and object buffer index.\n\npub struct BatchMarker<Obj, Pass> {\n\n pub(crate) batch_id: BatchId,\n\n pub(crate) marker: PhantomData<(Obj, Pass)>,\n\n}\n\n\n\n/// Marker is send + sync\n\nunsafe impl<Obj, Pass> Sync for BatchMarker<Obj, Pass> {}\n\nunsafe impl<Obj, Pass> Send for BatchMarker<Obj, Pass> {}\n\n\n", "file_path": "ivy-graphics/src/base_renderer/batch.rs", "rank": 44, "score": 147148.50833040892 }, { "content": "pub trait RendererKey: std::hash::Hash + std::cmp::Eq + Copy {}\n\n\n\nimpl<T> RendererKey for T where T: std::hash::Hash + std::cmp::Eq + Copy {}\n\n\n", "file_path": "ivy-graphics/src/base_renderer/mod.rs", "rank": 45, "score": 146242.53242774363 }, { "content": "pub fn create(device: &Device, info: &DescriptorLayoutInfo) -> Result<DescriptorSetLayout> {\n\n let create_info = DescriptorSetLayoutCreateInfo {\n\n binding_count: info.bindings.len() as u32,\n\n p_bindings: info.bindings.as_ptr(),\n\n ..Default::default()\n\n };\n\n\n\n let layout = unsafe { device.create_descriptor_set_layout(&create_info, None)? };\n\n Ok(layout)\n\n}\n\n\n", "file_path": "ivy-vulkan/src/descriptors/layout.rs", "rank": 46, "score": 144463.66564110317 }, { "content": "pub fn gravity_system(world: SubWorld<&mut Velocity>, dt: Read<DeltaTime>) {\n\n world\n\n .native_query()\n\n .iter()\n\n .for_each(|(_, vel)| vel.y -= 1.0 * **dt)\n\n}\n\n\n", "file_path": "ivy-physics/src/systems.rs", "rank": 47, "score": 144166.27963740163 }, { "content": "pub fn integrate_velocity(world: SubWorld<(&mut Position, &Velocity)>, dt: Read<DeltaTime>) {\n\n world\n\n .native_query()\n\n .iter()\n\n .for_each(|(_, (pos, vel))| *pos += Position(**vel * **dt));\n\n}\n\n\n", "file_path": "ivy-physics/src/systems.rs", "rank": 48, "score": 140472.48416282554 }, { "content": "fn nearest_power_2(val: u32) -> u32 {\n\n let mut result = 1;\n\n while result < val {\n\n result *= 2;\n\n }\n\n result\n\n}\n", "file_path": "ivy-graphics/src/base_renderer/pass.rs", "rank": 49, "score": 138745.87790825136 }, { "content": "pub fn gravity(world: SubWorld<(&GravityInfluence, &Mass, &mut Effector)>, gravity: Read<Gravity>) {\n\n if gravity.length_squared() < TOLERANCE {\n\n return;\n\n }\n\n\n\n world\n\n .native_query()\n\n .iter()\n\n .for_each(|(_, (influence, mass, effector))| {\n\n effector.apply_force(**gravity * **influence * **mass)\n\n })\n\n}\n\n\n", "file_path": "ivy-physics/src/systems.rs", "rank": 50, "score": 138638.82515547637 }, { "content": "fn loglevel_ansi_color(level: Level) -> &'static str {\n\n match level {\n\n Level::Error => \"\\x1B[1;31m\", // Red\n\n Level::Warn => \"\\x1B[1;33m\", // Yellow\n\n Level::Info => \"\\x1B[1;34m\", // Blue\n\n Level::Debug => \"\\x1B[1;35m\", // Magenta\n\n Level::Trace => \"\\x1B[1;36m\", // Cyan\n\n }\n\n}\n\n\n\npub struct Logger {\n\n /// include the file:line of the log call\n\n pub show_location: bool,\n\n //// Maximum log level visible. Useful for removing debug and trace calls in release builds\n\n pub max_level: LevelFilter,\n\n}\n\n\n\nimpl Default for Logger {\n\n fn default() -> Self {\n\n Self {\n", "file_path": "ivy-base/src/logger.rs", "rank": 51, "score": 136978.6379085432 }, { "content": "struct InfoCache<I, T>(HashMap<I, Handle<T>>);\n", "file_path": "ivy-resources/src/manager.rs", "rank": 52, "score": 135895.71272647963 }, { "content": "fn remove_or_add_edge<T: Array<Item = Edge>>(edges: &mut SmallVec<T>, edge: Edge) {\n\n if let Some((index, _)) = edges.iter().enumerate().find(|(_, val)| {\n\n // assert_ne!(**val, edge);\n\n (val.0, val.1) == (edge.1, edge.0)\n\n }) {\n\n edges.remove(index);\n\n } else {\n\n edges.push(edge);\n\n }\n\n}\n", "file_path": "ivy-collision/src/epa/polytype.rs", "rank": 53, "score": 135460.40707442467 }, { "content": "pub fn move_system(world: &mut World, input: &Input) {\n\n world\n\n .query::<(&Mover, &mut Velocity, &mut AngularVelocity, &Rotation)>()\n\n .iter()\n\n .for_each(|(_, (m, v, a, r))| {\n\n let movement = m.translate.get(&input);\n\n if m.local {\n\n *v = Velocity(**r * movement) * m.speed;\n\n } else {\n\n *v = Velocity(movement) * m.speed;\n\n }\n\n\n\n let ang = m.rotate.get(&input);\n\n *a = ang.into();\n\n })\n\n}\n", "file_path": "examples/vulkan/movement.rs", "rank": 54, "score": 131693.0900166965 }, { "content": "/// Gets the normal of a direction vector with a reference point. Normal will\n\n/// face the same direciton as reference\n\npub fn triple_prod(a: Vec3, b: Vec3, c: Vec3) -> Vec3 {\n\n a.cross(b).cross(c).normalize()\n\n}\n\n\n", "file_path": "ivy-collision/src/util.rs", "rank": 55, "score": 131177.81890932162 }, { "content": "use thiserror::Error;\n\n\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n\n#[derive(Debug, Error, Clone)]\n\npub enum Error {\n\n #[error(\"Invalid handle for {0:?}\")]\n\n InvalidHandle(&'static str),\n\n\n\n #[error(\"attempt to use null handle for {0:?}\")]\n\n NullHandle(&'static str),\n\n\n\n #[error(\"Missing default resource for {0:?}\")]\n\n MissingDefault(&'static str),\n\n\n\n #[error(\"Resource cache for {0:?} cannot be immutably borrowed while it is mutably borrowed\")]\n\n Borrow(&'static str),\n\n #[error(\"Resource cache for {0:?} cannot be mutably borrowed while it is immutably borrowed\")]\n\n BorrowMut(&'static str),\n\n}\n", "file_path": "ivy-resources/src/error.rs", "rank": 56, "score": 129121.50650681627 }, { "content": "pub fn destroy(debug_utils: &DebugUtils, messenger: DebugUtilsMessengerEXT) {\n\n unsafe { debug_utils.destroy_debug_utils_messenger(messenger, None) };\n\n}\n\n\n\n// Debug callback\n\nunsafe extern \"system\" fn debug_callback(\n\n message_severity: vk::DebugUtilsMessageSeverityFlagsEXT,\n\n _message_types: vk::DebugUtilsMessageTypeFlagsEXT,\n\n p_callback_data: *const vk::DebugUtilsMessengerCallbackDataEXT,\n\n _p_user_data: *mut c_void,\n\n) -> vk::Bool32 {\n\n let msg = CStr::from_ptr((*p_callback_data).p_message)\n\n .to_str()\n\n .unwrap_or(\"Invalid UTF-8\");\n\n match message_severity {\n\n vk::DebugUtilsMessageSeverityFlagsEXT::ERROR => log::error!(\"{}\", msg),\n\n vk::DebugUtilsMessageSeverityFlagsEXT::WARNING => log::warn!(\"{}\", msg),\n\n vk::DebugUtilsMessageSeverityFlagsEXT::INFO => log::info!(\"{}\", msg),\n\n vk::DebugUtilsMessageSeverityFlagsEXT::VERBOSE => log::trace!(\"{}\", msg),\n\n _ => log::trace!(\"{}\", msg),\n\n };\n\n vk::FALSE\n\n}\n", "file_path": "ivy-vulkan/src/debug_utils.rs", "rank": 57, "score": 126852.2062068661 }, { "content": "pub fn new_event_dispatcher<T: Event>() -> Box<dyn AnyEventDispatcher> {\n\n let dispatcher: EventDispatcher<T> = EventDispatcher::new();\n\n Box::new(dispatcher)\n\n}\n\n\n\npub struct ConcreteSender<T> {\n\n inner: Mutex<Box<dyn EventSender<T>>>,\n\n}\n\n\n\nimpl<T> ConcreteSender<T> {\n\n pub fn new<S: EventSender<T>>(sender: S) -> Self {\n\n Self {\n\n inner: Mutex::new(Box::new(sender)),\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Event> EventSender<T> for ConcreteSender<T> {\n\n fn send(&self, event: T) -> bool {\n\n self.inner.lock().send(event)\n\n }\n\n}\n\n\n\nimpl<T: Event> AnyEventSender for ConcreteSender<T> {}\n", "file_path": "ivy-base/src/events/dispatcher.rs", "rank": 58, "score": 126605.16569085576 }, { "content": "/// Installs PBR rendering for the specified camera. Returns a list of nodes suitable for\n\n/// rendergraph insertions. Configures gpu camera data, light management and\n\n/// environment manager and attaches them to the camera.\n\npub fn create_pbr_pipeline<GeometryPass, PostProcessingPass, EnvData, R>(\n\n context: SharedVulkanContext,\n\n world: &mut World,\n\n resources: &Resources,\n\n camera: Entity,\n\n renderer: R,\n\n extent: Extent,\n\n frames_in_flight: usize,\n\n read_attachments: &[Handle<Texture>],\n\n color_attachments: &[AttachmentInfo],\n\n bindables: &[&dyn MultiDescriptorBindable],\n\n info: PBRInfo<EnvData>,\n\n) -> ivy_rendergraph::Result<[Box<dyn Node>; 2]>\n\nwhere\n\n GeometryPass: ShaderPass,\n\n PostProcessingPass: ShaderPass,\n\n R: Renderer + Storage,\n\n R::Error: Storage + Into<anyhow::Error>,\n\n EnvData: Copy + Component,\n\n{\n", "file_path": "ivy-postprocessing/src/pbr/mod.rs", "rank": 59, "score": 125203.83991268903 }, { "content": "pub trait BuilderExt {\n\n /// Helper function for spawning entity builders\n\n fn spawn(&mut self, world: &mut World) -> Entity;\n\n}\n\n\n\nimpl BuilderExt for EntityBuilder {\n\n fn spawn(&mut self, world: &mut World) -> Entity {\n\n world.spawn(self.build())\n\n }\n\n}\n\n\n\npub struct WorldNameIterator<'a, 'w> {\n\n name: &'a Name,\n\n query: hecs::QueryIter<'w, &'static Name>,\n\n}\n\n\n\nimpl<'a, 'w> Iterator for WorldNameIterator<'a, 'w> {\n\n type Item = Entity;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n", "file_path": "ivy-base/src/extensions.rs", "rank": 60, "score": 121313.86227583198 }, { "content": "pub trait WorldExt {\n\n /// Finds an entity by name\n\n fn by_name(&self, name: Name) -> Option<EntityRef>;\n\n /// Finds an entity by tag\n\n fn by_tag<T: Component>(&self) -> Option<EntityRef>;\n\n}\n\n\n\nimpl WorldExt for World {\n\n fn by_name(&self, name: Name) -> Option<EntityRef> {\n\n self.query::<&Name>()\n\n .iter()\n\n .find(|(_, val)| **val == name)\n\n .map(|(e, _)| self.entity(e).unwrap())\n\n }\n\n\n\n fn by_tag<T: Component>(&self) -> Option<EntityRef> {\n\n self.query::<&T>()\n\n .iter()\n\n .next()\n\n .map(|(e, _)| self.entity(e).unwrap())\n\n }\n\n}\n", "file_path": "ivy-base/src/extensions.rs", "rank": 61, "score": 121313.86227583198 }, { "content": "/// Calculates the perpendicular velocity of a point rotating around origin.\n\npub fn point_vel(p: Position, w: AngularVelocity) -> Velocity {\n\n if w.length_squared() < std::f32::EPSILON {\n\n Velocity::default()\n\n } else {\n\n Velocity(-p.cross(*w))\n\n }\n\n}\n", "file_path": "ivy-physics/src/util.rs", "rank": 62, "score": 114825.29510128018 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq)]\n\nstruct Position {\n\n x: i32,\n\n y: i32,\n\n}\n\n\n", "file_path": "examples/sandbox/src/main.rs", "rank": 63, "score": 113634.79697168554 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq)]\n\nstruct Velocity {\n\n x: i32,\n\n y: i32,\n\n}\n\n\n", "file_path": "examples/sandbox/src/main.rs", "rank": 64, "score": 113634.79697168554 }, { "content": "#[repr(C)]\n\nstruct MaterialData {\n\n roughness: f32,\n\n metallic: f32,\n\n normal: i32,\n\n}\n\n\n\nimpl LoadResource for Material {\n\n type Info = MaterialInfo;\n\n\n\n type Error = Error;\n\n\n\n fn load(resources: &Resources, info: &Self::Info) -> Result<Self> {\n\n let context = resources.get_default::<SharedVulkanContext>()?;\n\n let sampler: Handle<Sampler> = resources.load(info.sampler)??;\n\n let albedo = resources.load(info.albedo.clone())??;\n\n let normal = if let Some(normal) = info.normal.clone() {\n\n Some(resources.load(normal)??)\n\n } else {\n\n None\n\n };\n", "file_path": "ivy-graphics/src/material.rs", "rank": 65, "score": 112113.93559586469 }, { "content": "#[repr(C, align(16))]\n\n#[derive(Default, PartialEq, Debug)]\n\nstruct LightData {\n\n position: Position,\n\n reference_illuminance: f32,\n\n radiance: Vec3,\n\n radius: f32,\n\n}\n\n\n\nimpl std::cmp::Eq for LightData {}\n\n\n", "file_path": "ivy-graphics/src/light.rs", "rank": 66, "score": 112113.66061789845 }, { "content": "#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]\n\nstruct Marker;\n", "file_path": "ivy-ui/src/text_renderer.rs", "rank": 67, "score": 112108.51487825785 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\nstruct Key {\n\n font: Handle<Font>,\n\n}\n\n\n\nimpl<'a> ivy_graphics::KeyQuery for KeyQuery<'a> {\n\n type K = Key;\n\n\n\n fn into_key(&self) -> Self::K {\n\n Self::K { font: *self.font }\n\n }\n\n}\n\n\n\npub struct TextUpdateNode {\n\n text_renderer: Handle<TextRenderer>,\n\n buffer: vk::Buffer,\n\n}\n\n\n\nimpl TextUpdateNode {\n\n pub fn new(resources: &Resources, text_renderer: Handle<TextRenderer>) -> Result<Self> {\n\n let buffer = resources\n", "file_path": "ivy-ui/src/text_renderer.rs", "rank": 68, "score": 112108.51487825785 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\nstruct Key {\n\n mesh: Handle<Mesh>,\n\n material: Handle<Material>,\n\n}\n\n\n\nimpl<'a> crate::KeyQuery for KeyQuery<'a> {\n\n type K = Key;\n\n\n\n fn into_key(&self) -> Self::K {\n\n Self::K {\n\n mesh: *self.mesh,\n\n material: self.material.cloned().unwrap_or_default(),\n\n }\n\n }\n\n}\n", "file_path": "ivy-graphics/src/mesh_renderer.rs", "rank": 69, "score": 112108.51487825785 }, { "content": "struct Pool {\n\n pool: vk::DescriptorPool,\n\n set_count: u32,\n\n allocated: u32,\n\n}\n\n\n\nimpl Pool {\n\n /// Creates a new fresh pool\n\n fn new(device: &Device, set_count: u32, sizes: &[vk::DescriptorPoolSize]) -> Result<Self> {\n\n let create_info = vk::DescriptorPoolCreateInfo {\n\n max_sets: set_count,\n\n pool_size_count: sizes.len() as u32,\n\n p_pool_sizes: sizes.as_ptr(),\n\n ..Default::default()\n\n };\n\n\n\n let pool = unsafe { device.create_descriptor_pool(&create_info, None)? };\n\n Ok(Self {\n\n pool,\n\n set_count,\n", "file_path": "ivy-vulkan/src/descriptors/allocator.rs", "rank": 70, "score": 112108.51487825785 }, { "content": "struct SandboxLayer {\n\n frame: usize,\n\n elapsed: Clock,\n\n last_status: Clock,\n\n\n\n rx: Receiver<SandboxEvent>,\n\n}\n\n\n\nimpl SandboxLayer {\n\n fn new(world: &mut World, events: &mut Events) -> Self {\n\n info!(\"Attached sandbox layer\");\n\n\n\n let mut rng = StdRng::seed_from_u64(0);\n\n // Spawn some with velocities\n\n world.spawn_batch((0..10).map(|_| {\n\n (\n\n Position {\n\n x: rng.gen_range(-5..5),\n\n y: rng.gen_range(-5..5),\n\n },\n", "file_path": "examples/sandbox/src/main.rs", "rank": 71, "score": 112108.51487825785 }, { "content": "struct FrameData {\n\n context: SharedVulkanContext,\n\n fence: Fence,\n\n commandpool: CommandPool,\n\n commandbuffer: CommandBuffer,\n\n wait_semaphore: Semaphore,\n\n signal_semaphore: Semaphore,\n\n}\n\n\n\nimpl FrameData {\n\n pub fn new(context: SharedVulkanContext) -> Result<Self> {\n\n let commandpool = CommandPool::new(\n\n context.device().clone(),\n\n context.queue_families().graphics().unwrap(),\n\n true,\n\n false,\n\n )?;\n\n\n\n let commandbuffer = commandpool.allocate_one()?;\n\n let fence = fence::create(context.device(), true)?;\n", "file_path": "ivy-rendergraph/src/rendergraph.rs", "rank": 72, "score": 112108.51487825785 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\nstruct Key {\n\n depth: WidgetDepth,\n\n image: Handle<Image>,\n\n}\n\n\n\nimpl PartialOrd for Key {\n\n fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {\n\n self.depth.partial_cmp(&other.depth)\n\n }\n\n}\n\n\n\nimpl Ord for Key {\n\n fn cmp(&self, other: &Self) -> std::cmp::Ordering {\n\n self.depth.cmp(&other.depth)\n\n }\n\n}\n\n\n\nimpl<'a> ivy_graphics::KeyQuery for KeyQuery<'a> {\n\n type K = Key;\n\n\n\n fn into_key(&self) -> Self::K {\n\n Self::K {\n\n depth: *self.depth,\n\n image: *self.image,\n\n }\n\n }\n\n}\n", "file_path": "ivy-ui/src/image_renderer.rs", "rank": 73, "score": 112108.51487825785 }, { "content": "#[repr(C)]\n\nstruct PushConstantData {\n\n model: Mat4,\n\n color: Vec4,\n\n billboard_axis: Vec3,\n\n corner_radius: f32,\n\n}\n", "file_path": "ivy-graphics/src/gizmos.rs", "rank": 74, "score": 110646.18130309443 }, { "content": "#[repr(C)]\n\nstruct LightSceneData {\n\n num_lights: u32,\n\n}\n", "file_path": "ivy-graphics/src/light.rs", "rank": 75, "score": 110646.18130309443 }, { "content": "#[repr(C, align(16))]\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\nstruct UIData {\n\n color: Vec4,\n\n viewproj: Mat4,\n\n}\n", "file_path": "ivy-ui/src/ui_renderer.rs", "rank": 76, "score": 110645.8631850085 }, { "content": "#[repr(C, align(16))]\n\n#[derive(Default, Debug, Clone, Copy, PartialEq)]\n\nstruct ObjectData {\n\n mvp: Mat4,\n\n color: Vec4,\n\n}\n\n\n", "file_path": "ivy-ui/src/image_renderer.rs", "rank": 77, "score": 110645.82076221894 }, { "content": "#[repr(C, align(16))]\n\n#[derive(Default, Debug, Clone, Copy, PartialEq)]\n\nstruct ObjectData {\n\n mvp: Mat4,\n\n color: Vec4,\n\n offset: u32,\n\n len: u32,\n\n}\n\n\n", "file_path": "ivy-ui/src/text_renderer.rs", "rank": 78, "score": 110645.82076221894 }, { "content": "#[repr(C, align(16))]\n\n#[derive(Default, Debug, Clone, Copy, PartialEq)]\n\nstruct ObjectData {\n\n model: Mat4,\n\n color: Vec4,\n\n}\n\n\n", "file_path": "ivy-graphics/src/mesh_renderer.rs", "rank": 79, "score": 110645.82076221894 }, { "content": "#[derive(Default, Debug, Clone, PartialEq)]\n\nstruct AnimationState {\n\n animation: Handle<Animation>,\n\n states: BTreeMap<ChannelIndex, Frame>,\n\n repeat: bool,\n\n time: f32,\n\n playing: bool,\n\n influence: f32,\n\n}\n\n\n\nimpl AnimationState {\n\n pub fn new(animation: Handle<Animation>, repeat: bool, influence: f32) -> Self {\n\n Self {\n\n animation,\n\n states: BTreeMap::new(),\n\n repeat,\n\n playing: true,\n\n time: 0.0,\n\n influence,\n\n }\n\n }\n", "file_path": "ivy-graphics/src/animation/animator.rs", "rank": 80, "score": 110640.76058548759 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\nstruct Key {\n\n mesh: Handle<SkinnedMesh>,\n\n material: Handle<Material>,\n\n}\n\n\n\nimpl<'a> crate::KeyQuery for KeyQuery<'a> {\n\n type K = Key;\n\n\n\n fn into_key(&self) -> Self::K {\n\n Self::K {\n\n mesh: *self.mesh,\n\n material: self.material.cloned().unwrap_or_default(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "ivy-graphics/src/skinned_mesh_renderer.rs", "rank": 81, "score": 110640.76058548759 }, { "content": "#[derive(Default, Debug, Clone, Eq, PartialEq)]\n\nstruct Marker;\n", "file_path": "ivy-graphics/src/skinned_mesh_renderer.rs", "rank": 82, "score": 110640.76058548759 }, { "content": "struct FrameData {\n\n set: DescriptorSet,\n\n uniformbuffer: Buffer,\n\n}\n\n\n\nimpl FrameData {\n\n pub fn new(\n\n context: SharedVulkanContext,\n\n ) -> Result<Self> {\n\n let uniformbuffer = Buffer::new(\n\n context.clone(),\n\n ivy_vulkan::BufferType::Uniform,\n\n ivy_vulkan::BufferAccess::MappedPersistent,\n\n &[UIData {\n\n color: Vec4::new(0.0, 0.0, 0.0, 1.0),\n\n viewproj: Mat4::identity(),\n\n }],\n\n );\n\n\n\n todo!()\n\n }\n\n}\n\n\n", "file_path": "ivy-ui/src/ui_renderer.rs", "rank": 83, "score": 110640.76058548759 }, { "content": "// Transitions image layout from one layout to another using a pipeline barrier\n\nfn transition_layout(\n\n commandpool: &CommandPool,\n\n queue: vk::Queue,\n\n image: vk::Image,\n\n mip_levels: u32,\n\n old_layout: vk::ImageLayout,\n\n new_layout: vk::ImageLayout,\n\n) -> Result<()> {\n\n let (src_access_mask, dst_access_mask, src_stage_mask, dst_stage_mask) =\n\n match (old_layout, new_layout) {\n\n (vk::ImageLayout::UNDEFINED, vk::ImageLayout::TRANSFER_DST_OPTIMAL) => (\n\n vk::AccessFlags::default(),\n\n vk::AccessFlags::TRANSFER_WRITE,\n\n vk::PipelineStageFlags::TOP_OF_PIPE,\n\n vk::PipelineStageFlags::TRANSFER,\n\n ),\n\n\n\n (vk::ImageLayout::TRANSFER_DST_OPTIMAL, vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL) => (\n\n vk::AccessFlags::TRANSFER_WRITE,\n\n vk::AccessFlags::SHADER_READ,\n", "file_path": "ivy-vulkan/src/texture.rs", "rank": 84, "score": 109989.8264258686 }, { "content": "fn generate_tangents(\n\n positions: &Vec<Vec3>,\n\n uvs: &Vec<Vec2>,\n\n indices: impl Iterator<Item = u32>,\n\n) -> Vec<Vec3> {\n\n let mut tangents = vec![Vec3::X; positions.len()];\n\n let chunks = indices.chunks(3);\n\n chunks.into_iter().for_each(|mut chunk| {\n\n let (a, b, c) = chunk.next_tuple::<(u32, u32, u32)>().unwrap();\n\n let [v0, v1, v2] = [\n\n positions[a as usize],\n\n positions[b as usize],\n\n positions[c as usize],\n\n ];\n\n\n\n let [t0, t1, t2] = [uvs[a as usize], uvs[b as usize], uvs[c as usize]];\n\n\n\n let d1 = v1 - v0;\n\n let d2 = v2 - v0;\n\n let dt1 = t1 - t0;\n", "file_path": "ivy-graphics/src/mesh.rs", "rank": 85, "score": 109984.86891072075 }, { "content": "fn pick_format(\n\n formats: &[vk::SurfaceFormatKHR],\n\n preferred_format: vk::SurfaceFormatKHR,\n\n) -> vk::SurfaceFormatKHR {\n\n for surface_format in formats {\n\n // Preferred surface_format\n\n if *surface_format == preferred_format {\n\n return *surface_format;\n\n }\n\n }\n\n\n\n formats[0]\n\n}\n\n\n", "file_path": "ivy-vulkan/src/swapchain.rs", "rank": 86, "score": 109984.86891072075 }, { "content": "fn generate_mipmaps(\n\n commandpool: &CommandPool,\n\n queue: vk::Queue,\n\n image: vk::Image,\n\n extent: Extent,\n\n mip_levels: u32,\n\n) -> Result<()> {\n\n let mut barrier = vk::ImageMemoryBarrier {\n\n s_type: vk::StructureType::IMAGE_MEMORY_BARRIER,\n\n p_next: std::ptr::null(),\n\n src_queue_family_index: vk::QUEUE_FAMILY_IGNORED,\n\n dst_queue_family_index: vk::QUEUE_FAMILY_IGNORED,\n\n image,\n\n subresource_range: vk::ImageSubresourceRange {\n\n aspect_mask: vk::ImageAspectFlags::COLOR,\n\n base_mip_level: 0,\n\n level_count: 1,\n\n base_array_layer: 0,\n\n layer_count: 1,\n\n },\n", "file_path": "ivy-vulkan/src/texture.rs", "rank": 87, "score": 109984.86891072075 }, { "content": "// Resolves a static collision\n\nfn resolve_static(\n\n world: &impl GenericWorld,\n\n a: Entity,\n\n b: Entity,\n\n contact: Position,\n\n normal: Vec3,\n\n depth: f32,\n\n dt: DeltaTime,\n\n) -> Result<()> {\n\n let mut a_query = world.try_query_one::<Option<&Resitution>>(a)?;\n\n let a_res = a_query\n\n .get()\n\n .expect(\"Static collider did not satisfy query\");\n\n\n\n let mut b_query = world.try_query_one::<(RbQuery, &Position, &mut Effector)>(b)?;\n\n\n\n if let Ok((rb, pos, effector)) = b_query.get() {\n\n let b = ResolveObject {\n\n pos: *pos,\n\n vel: *rb.vel + effector.net_velocity_change(*rb.mass, *dt),\n", "file_path": "ivy-physics/src/systems.rs", "rank": 88, "score": 109984.86891072075 }, { "content": "#[repr(C)]\n\n#[derive(Default, Debug, Clone, Copy, PartialEq)]\n\nstruct ObjectData {\n\n model: Mat4,\n\n color: Vec4,\n\n offset: u32,\n\n len: u32,\n\n pad: [f32; 2],\n\n}\n\n\n", "file_path": "ivy-graphics/src/skinned_mesh_renderer.rs", "rank": 89, "score": 109233.33350855054 }, { "content": "/// Manages allocation for a single descriptor set layout\n\nstruct DescriptorSubAllocator {\n\n device: Arc<Device>,\n\n layout: DescriptorSetLayout,\n\n set_count: u32,\n\n /// A list of pools with atleast 1 descriptor remaining.\n\n pools: Vec<Pool>,\n\n /// A list of completely full pools.\n\n full_pools: Vec<Pool>,\n\n sizes: Vec<vk::DescriptorPoolSize>,\n\n}\n\n\n\nimpl DescriptorSubAllocator {\n\n /// Creates a new descriptor allocator. Stores several pools contains `set_count` available\n\n /// descriptors each. `sizes` describes the relative\n\n pub fn new(\n\n device: Arc<Device>,\n\n layout: DescriptorSetLayout,\n\n layout_info: &DescriptorLayoutInfo,\n\n set_count: u32,\n\n ) -> Self {\n", "file_path": "ivy-vulkan/src/descriptors/allocator.rs", "rank": 90, "score": 109228.23090902963 }, { "content": "/// Picks a present mode\n\n/// If `preferred` is available, it is used\n\n/// Otherwise, FIFO is returned\n\nfn pick_present_mode(\n\n modes: &[vk::PresentModeKHR],\n\n preferred: vk::PresentModeKHR,\n\n) -> vk::PresentModeKHR {\n\n for mode in modes {\n\n // Preferred surface_format\n\n if *mode == preferred {\n\n return *mode;\n\n }\n\n }\n\n\n\n vk::PresentModeKHR::FIFO\n\n}\n\n\n", "file_path": "ivy-vulkan/src/swapchain.rs", "rank": 91, "score": 108533.13419237673 }, { "content": "// Picks an appropriate physical device\n\nfn pick_physical_device(\n\n instance: &Instance,\n\n surface: Option<(&Surface, SurfaceKHR)>,\n\n extensions: &[CString],\n\n) -> Result<PhysicalDeviceInfo> {\n\n let devices = unsafe { instance.enumerate_physical_devices()? };\n\n\n\n devices\n\n .into_iter()\n\n .filter_map(|d| rate_physical_device(instance, d, surface, extensions))\n\n .max_by_key(|v| v.score)\n\n .ok_or(Error::UnsuitableDevice)\n\n}\n\n\n", "file_path": "ivy-vulkan/src/device.rs", "rank": 92, "score": 108523.11153632418 }, { "content": "fn get_missing_extensions(\n\n instance: &Instance,\n\n device: vk::PhysicalDevice,\n\n extensions: &[CString],\n\n) -> Result<Vec<CString>> {\n\n let available = unsafe { instance.enumerate_device_extension_properties(device)? };\n\n\n\n Ok(extensions\n\n .iter()\n\n .filter(|ext| {\n\n available.iter().all(|avail| unsafe {\n\n CStr::from_ptr(avail.extension_name.as_ptr()) == ext.as_c_str()\n\n })\n\n })\n\n .cloned()\n\n .collect())\n\n}\n\n\n", "file_path": "ivy-vulkan/src/device.rs", "rank": 93, "score": 108523.11153632418 }, { "content": "// Maps descriptor type from spir-v reflect to ash::vk types\n\nfn map_descriptortype(\n\n ty: spirv_reflect::types::descriptor::ReflectDescriptorType,\n\n) -> vk::DescriptorType {\n\n match ty {\n\n spirv_reflect::types::ReflectDescriptorType::Undefined => unreachable!(),\n\n spirv_reflect::types::ReflectDescriptorType::Sampler => vk::DescriptorType::SAMPLER,\n\n spirv_reflect::types::ReflectDescriptorType::CombinedImageSampler => {\n\n vk::DescriptorType::COMBINED_IMAGE_SAMPLER\n\n }\n\n spirv_reflect::types::ReflectDescriptorType::SampledImage => {\n\n vk::DescriptorType::SAMPLED_IMAGE\n\n }\n\n spirv_reflect::types::ReflectDescriptorType::StorageImage => {\n\n vk::DescriptorType::STORAGE_IMAGE\n\n }\n\n spirv_reflect::types::ReflectDescriptorType::UniformTexelBuffer => {\n\n vk::DescriptorType::UNIFORM_TEXEL_BUFFER\n\n }\n\n spirv_reflect::types::ReflectDescriptorType::StorageTexelBuffer => {\n\n vk::DescriptorType::STORAGE_TEXEL_BUFFER\n", "file_path": "ivy-vulkan/src/pipeline/shader.rs", "rank": 94, "score": 108523.11153632418 }, { "content": "fn check_collision(\n\n colliders: &View<&Collider>,\n\n a: Object,\n\n a_obj: &ObjectData,\n\n b: Object,\n\n b_obj: &ObjectData,\n\n) -> Option<Collision> {\n\n if !a_obj.bounds.overlaps(b_obj.bounds) {\n\n return None;\n\n }\n\n\n\n let a_coll = colliders.get(a.entity).expect(\"Collider\");\n\n let b_coll = colliders.get(b.entity).expect(\"Collider\");\n\n\n\n if let Some(contact) = intersect(&a_obj.transform, &b_obj.transform, a_coll, b_coll) {\n\n let collision = Collision {\n\n a: crate::EntityPayload {\n\n entity: a.entity,\n\n is_trigger: a_obj.is_trigger,\n\n is_static: a_obj.is_static,\n", "file_path": "ivy-collision/src/tree/bvh.rs", "rank": 95, "score": 108523.11153632418 }, { "content": "// Rates physical device suitability\n\nfn rate_physical_device(\n\n instance: &Instance,\n\n physical_device: vk::PhysicalDevice,\n\n surface: Option<(&Surface, SurfaceKHR)>,\n\n extensions: &[CString],\n\n) -> Option<PhysicalDeviceInfo> {\n\n let properties = unsafe { instance.get_physical_device_properties(physical_device) };\n\n let features = unsafe { instance.get_physical_device_features(physical_device) };\n\n\n\n // Save the device name\n\n let name = unsafe {\n\n CStr::from_ptr(properties.device_name.as_ptr())\n\n .to_string_lossy()\n\n .to_string()\n\n };\n\n\n\n // Current device does not support one or more extensions\n\n if !get_missing_extensions(instance, physical_device, extensions)\n\n .ok()?\n\n .is_empty()\n", "file_path": "ivy-vulkan/src/device.rs", "rank": 96, "score": 108523.11153632418 }, { "content": "#[derive(Query)]\n\nstruct TextQuery<'a> {\n\n text: &'a mut Text,\n\n font: &'a Handle<Font>,\n\n block: &'a mut BufferAllocation<Marker>,\n\n bounds: &'a Size2D,\n\n alignment: &'a Alignment,\n\n wrap: &'a WrapStyle,\n\n margin: &'a Margin,\n\n}\n\n\n\n/// Renders arbitrary text using associated font and text objects attached to\n\n/// entity. TextUpdateNode needs to be added to rendergraph before as the text\n\n/// vertex data needs to be updated with a transfer.\n\npub struct TextRenderer {\n\n mesh: Mesh<UIVertex>,\n\n staging_buffers: Vec<Buffer>,\n\n allocator: Allocator<Marker>,\n\n base_renderer: BaseRenderer<Key, ObjectData, UIVertex>,\n\n /// The total number of glyphs\n\n glyph_count: u32,\n", "file_path": "ivy-ui/src/text_renderer.rs", "rank": 97, "score": 107678.8761416059 }, { "content": "#[derive(Query, PartialEq)]\n\nstruct KeyQuery<'a> {\n\n depth: &'a WidgetDepth,\n\n image: &'a Handle<Image>,\n\n}\n\n\n", "file_path": "ivy-ui/src/image_renderer.rs", "rank": 98, "score": 107678.8761416059 }, { "content": "#[derive(Query, PartialEq, Eq)]\n\nstruct KeyQuery<'a> {\n\n mesh: &'a Handle<Mesh>,\n\n material: Option<&'a Handle<Material>>,\n\n}\n\n\n", "file_path": "ivy-graphics/src/mesh_renderer.rs", "rank": 99, "score": 107678.8761416059 } ]
Rust
crates/sim1h/src/dht/bbdht/dynamodb/api/aspect/write.rs
jamesray1/sim1h
a3c7f43bd1fd45647d23667d451fecd07e144ce3
use crate::dht::bbdht::dynamodb::api::item::write::should_put_item_retry; use crate::dht::bbdht::dynamodb::client::Client; use crate::dht::bbdht::dynamodb::schema::blob_attribute_value; use crate::dht::bbdht::dynamodb::schema::cas::ADDRESS_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_ADDRESS_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_LIST_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_PUBLISH_TS_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_TYPE_HINT_KEY; use crate::dht::bbdht::dynamodb::schema::number_attribute_value; use crate::dht::bbdht::dynamodb::schema::string_attribute_value; use crate::dht::bbdht::dynamodb::schema::string_set_attribute_value; use crate::dht::bbdht::dynamodb::schema::TableName; use crate::dht::bbdht::error::BbDhtResult; use crate::trace::tracer; use crate::trace::LogContext; use holochain_persistence_api::cas::content::Address; use lib3h_protocol::data_types::EntryAspectData; use rusoto_dynamodb::AttributeValue; use rusoto_dynamodb::DynamoDb; use rusoto_dynamodb::PutItemInput; use rusoto_dynamodb::UpdateItemInput; use std::collections::HashMap; pub fn aspect_list_to_attribute(aspect_list: &Vec<EntryAspectData>) -> AttributeValue { string_set_attribute_value( aspect_list .iter() .map(|aspect| aspect.aspect_address.to_string()) .collect(), ) } pub fn put_aspect( log_context: &LogContext, client: &Client, table_name: &TableName, aspect: &EntryAspectData, ) -> BbDhtResult<()> { tracer(&log_context, "put_aspect"); let mut aspect_item = HashMap::new(); aspect_item.insert( String::from(ADDRESS_KEY), string_attribute_value(&aspect.aspect_address.to_string()), ); aspect_item.insert( String::from(ASPECT_ADDRESS_KEY), string_attribute_value(&aspect.aspect_address.to_string()), ); aspect_item.insert( String::from(ASPECT_TYPE_HINT_KEY), string_attribute_value(&aspect.type_hint), ); aspect_item.insert( String::from(ASPECT_KEY), blob_attribute_value(&aspect.aspect), ); aspect_item.insert( String::from(ASPECT_PUBLISH_TS_KEY), number_attribute_value(&aspect.publish_ts), ); if should_put_item_retry( log_context, client .put_item(PutItemInput { table_name: table_name.to_string(), item: aspect_item, ..Default::default() }) .sync(), )? { put_aspect(log_context, client, table_name, aspect) } else { Ok(()) } } pub fn append_aspect_list_to_entry( log_context: &LogContext, client: &Client, table_name: &TableName, entry_address: &Address, aspect_list: &Vec<EntryAspectData>, ) -> BbDhtResult<()> { tracer(&log_context, "append_aspects"); for aspect in aspect_list { put_aspect(&log_context, &client, &table_name, &aspect)?; } let mut aspect_addresses_key = HashMap::new(); aspect_addresses_key.insert( String::from(ADDRESS_KEY), string_attribute_value(&String::from(entry_address.to_owned())), ); let mut expression_attribute_values = HashMap::new(); expression_attribute_values.insert( ":aspects".to_string(), aspect_list_to_attribute(&aspect_list), ); let mut expression_attribute_names = HashMap::new(); expression_attribute_names.insert("#aspect_list".to_string(), ASPECT_LIST_KEY.to_string()); let update_expression = "ADD #aspect_list :aspects"; let aspect_list_update = UpdateItemInput { table_name: table_name.to_string(), key: aspect_addresses_key, update_expression: Some(update_expression.to_string()), expression_attribute_names: Some(expression_attribute_names), expression_attribute_values: Some(expression_attribute_values), ..Default::default() }; client.update_item(aspect_list_update).sync()?; Ok(()) } #[cfg(test)] pub mod tests { use crate::aspect::fixture::aspect_list_fresh; use crate::aspect::fixture::entry_aspect_data_fresh; use crate::dht::bbdht::dynamodb::api::aspect::write::append_aspect_list_to_entry; use crate::dht::bbdht::dynamodb::api::aspect::write::aspect_list_to_attribute; use crate::dht::bbdht::dynamodb::api::aspect::write::put_aspect; use crate::dht::bbdht::dynamodb::api::item::read::get_item_by_address; use crate::dht::bbdht::dynamodb::api::table::create::ensure_cas_table; use crate::dht::bbdht::dynamodb::api::table::exist::table_exists; use crate::dht::bbdht::dynamodb::api::table::fixture::table_name_fresh; use crate::dht::bbdht::dynamodb::client::local::local_client; use crate::dht::bbdht::dynamodb::schema::cas::ADDRESS_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_LIST_KEY; use crate::dht::bbdht::dynamodb::schema::string_attribute_value; use crate::entry::fixture::entry_hash_fresh; use crate::trace::tracer; use std::collections::HashMap; #[test] fn put_aspect_test() { let log_context = "put_aspect_test"; tracer(&log_context, "fixtures"); let local_client = local_client(); let table_name = table_name_fresh(); let entry_aspect = entry_aspect_data_fresh(); assert!(ensure_cas_table(&log_context, &local_client, &table_name).is_ok()); assert!(table_exists(&log_context, &local_client, &table_name).is_ok()); assert!(put_aspect(&log_context, &local_client, &table_name, &entry_aspect).is_ok()); } #[test] fn append_aspects_to_entry_test() { let log_context = "append_aspects_to_entry_test"; tracer(&log_context, "fixtures"); let local_client = local_client(); let table_name = table_name_fresh(); let entry_address = entry_hash_fresh(); let aspect_list = aspect_list_fresh(); let mut expected = HashMap::new(); expected.insert( ASPECT_LIST_KEY.to_string(), aspect_list_to_attribute(&aspect_list), ); expected.insert( ADDRESS_KEY.to_string(), string_attribute_value(&String::from(entry_address.clone())), ); assert!(ensure_cas_table(&log_context, &local_client, &table_name).is_ok()); assert!(table_exists(&log_context, &local_client, &table_name).is_ok()); for _ in 0..3 { assert!(append_aspect_list_to_entry( &log_context, &local_client, &table_name, &entry_address, &aspect_list ) .is_ok()); match get_item_by_address(&log_context, &local_client, &table_name, &entry_address) { Ok(get_item_output) => match get_item_output { Some(item) => { assert_eq!(expected["address"], item["address"],); assert_eq!( expected["aspect_list"].ss.iter().count(), item["aspect_list"].ss.iter().count(), ); } None => { tracer(&log_context, "get matches None"); panic!("None"); } }, Err(err) => { tracer(&log_context, "get matches err"); panic!("{:?}", err); } } } } }
use crate::dht::bbdht::dynamodb::api::item::write::should_put_item_retry; use crate::dht::bbdht::dynamodb::client::Client; use crate::dht::bbdht::dynamodb::schema::blob_attribute_value; use crate::dht::bbdht::dynamodb::schema::cas::ADDRESS_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_ADDRESS_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_LIST_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_PUBLISH_TS_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_TYPE_HINT_KEY; use crate::dht::bbdht::dynamodb::schema::number_attribute_value; use crate::dht::bbdht::dynamodb::schema::string_attribute_value; use crate::dht::bbdht::dynamodb::schema::string_set_attribute_value; use crate::dht::bbdht::dynamodb::schema::TableName; use crate::dht::bbdht::error::BbDhtResult; use crate::trace::tracer; use crate::trace::LogContext; use holochain_persistence_api::cas::content::Address; use lib3h_protocol::data_types::EntryAspectData; use rusoto_dynamodb::AttributeValue; use rusoto_dynamodb::DynamoDb; use rusoto_dynamodb::PutItemInput; use rusoto_dynamodb::UpdateItemInput; use std::collections::HashMap; pub fn aspect_list_to_attribute(aspect_list: &Vec<EntryAspectData>) -> AttributeValue { string_set_attribute_value( aspect_list .iter() .map(|aspect| aspect.aspect_address.to_string()) .collect(), ) } pub fn put_aspect( log_context: &LogContext, client: &Client, table_name: &TableName, aspect: &EntryAspectData, ) -> BbDhtResult<()> { tracer(&log_context, "put_aspect"); let mut aspect_item = HashMap::new(); aspect_item.insert( String::from(ADDRESS_KEY), string_attribute_value(&aspect.aspect_address.to_string()), ); aspect_item.insert( String::from(ASPECT_ADDRESS_KEY), string_attribute_value(&aspect.aspect_address.to_string()), ); aspect_item.insert( String::from(ASPECT_TYPE_HINT_KEY), string_attribute_value(&aspect.type_hint), ); aspect_item.insert( String::from(ASPECT_KEY), blob_attribute_value(&aspect.aspect), ); aspect_item.insert( String::from(ASPECT_PUBLISH_TS_KEY), number_attribute_value(&aspect.publish_ts), ); if should_put_item_retry( log_context, client .put_item(PutItemInput { table_name: table_name.to_string(), item: aspect_item, ..Default::default() }) .sync(), )? { put_aspect(log_context, client, table_name, aspect) } else { Ok(()) } } pub fn append_aspect_list_to_entry( log_context: &LogContext, client: &Client, table_name: &TableName, entry_address: &Address, aspect_list: &Vec<EntryAspectData>, ) -> BbDhtResult<()> { tracer(&log_context, "append_aspects"); for aspect in aspect_list { put_aspect(&log_context, &client, &table_name, &aspect)?; } let mut aspect_addresses_key = HashMap::new(); aspect_addresses_key.insert( String::from(ADDRESS_KEY), string_attribute_value(&String::from(entry_address.to_owned())), ); let mut expression_attribute_values = HashMap::new(); expression_attribute_values.insert( ":aspects".to_string(), aspect_list_to_attribute(&aspect_list), ); let mut expression_attribute_names = HashMap::new(); expression_attribute_names.insert("#aspect_list".to_string(), ASPECT_LIST_KEY.to_string()); let update_expression = "ADD #aspect_list :aspects"; let aspect_list_update = UpdateItemInput { table_name: table_name.to_string(), key: aspect_addresses_key, update_expression: Some(update_expression.to_string()), expression_attribute_names: Some(expression_attribute_names), expression_attribute_values: Some(expression_attribute_values), ..Default::default() }; client.update_item(aspect_list_update).sync()?; Ok(()) } #[cfg(test)] pub mod tests { use crate::aspect::fixture::aspect_list_fresh; use crate::aspect::fixture::entry_aspect_data_fresh; use crate::dht::bbdht::dynamodb::api::aspect::write::append_aspect_list_to_entry; use crate::dht::bbdht::dynamodb::api::aspect::write::aspect_list_to_attribute; use crate::dht::bbdht::dynamodb::api::aspect::write::put_aspect; use crate::dht::bbdht::dynamodb::api::item::read::get_item_by_address; use crate::dht::bbdht::dynamodb::api::table::create::ensure_cas_table; use crate::dht::bbdht::dynamodb::api::table::exist::table_exists; use crate::dht::bbdht::dynamodb::api::table::fixture::table_name_fresh; use crate::dht::bbdht::dynamodb::client::local::local_client; use crate::dht::bbdht::dynamodb::schema::cas::ADDRESS_KEY; use crate::dht::bbdht::dynamodb::schema::cas::ASPECT_LIST_KEY; use crate::dht::bbdht::dynamodb::schema::string_attribute_value; use crate::entry::fixture::entry_hash_fresh; use crate::trace::tracer; use std::collections::HashMap; #[test] fn put_aspect_test() { let log_context = "put_aspect_test"; tracer(&log_context, "fixtures"); let local_client = local_client(); let table_name = table_name_fresh(); let entry_aspect = entry_aspect_data_fresh(); assert!(ensure_cas_table(&log_context, &local_client, &table_name).is_ok()); assert!(table_exists(&log_context, &local_client, &table_name).is_ok()); assert!(put_aspect(&log_context, &local_client, &table_name, &entry_aspect).is_ok()); } #[test] fn append_aspects_to_entry_test() { let log_context = "append_aspects_to_entry_test"; tracer(&log_context, "fixtures"); let local_client = local_client(); let table_name = table_name_fresh(); let entry_address = entry_hash_fresh(); let aspect_list = aspect_list_fresh(); let mut expected = HashMap::new(); expected.insert( ASPECT_LIST_KEY.to_string(), aspect_list_to_attribute(&aspect_list), ); expected.insert( ADDRESS_KEY.to_string(), string_attribute_value(&String::from(entry_address.clone())),
&log_context, &local_client, &table_name, &entry_address, &aspect_list ) .is_ok()); match get_item_by_address(&log_context, &local_client, &table_name, &entry_address) { Ok(get_item_output) => match get_item_output { Some(item) => { assert_eq!(expected["address"], item["address"],); assert_eq!( expected["aspect_list"].ss.iter().count(), item["aspect_list"].ss.iter().count(), ); } None => { tracer(&log_context, "get matches None"); panic!("None"); } }, Err(err) => { tracer(&log_context, "get matches err"); panic!("{:?}", err); } } } } }
); assert!(ensure_cas_table(&log_context, &local_client, &table_name).is_ok()); assert!(table_exists(&log_context, &local_client, &table_name).is_ok()); for _ in 0..3 { assert!(append_aspect_list_to_entry(
function_block-random_span
[ { "content": "pub fn try_aspect_list_from_item(item: Item) -> BbDhtResult<Vec<Address>> {\n\n let addresses = match get_or_err(&item, ASPECT_LIST_KEY)?.ss.clone() {\n\n Some(addresses) => addresses.iter().map(|s| Address::from(s.clone())).collect(),\n\n None => {\n\n return Err(BbDhtError::MissingData(format!(\n\n \"Missing aspect_list: {:?}\",\n\n item\n\n )))\n\n }\n\n };\n\n\n\n Ok(addresses)\n\n}\n\n\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/api/aspect/read.rs", "rank": 0, "score": 256479.27454539263 }, { "content": "pub fn entry_aspect_data_fresh() -> EntryAspectData {\n\n EntryAspectData {\n\n aspect_address: aspect_hash_fresh(),\n\n type_hint: type_hint_fresh(),\n\n aspect: opaque_aspect_fresh(),\n\n publish_ts: publish_ts_fresh(),\n\n }\n\n}\n\n\n", "file_path": "crates/sim1h/src/aspect/fixture.rs", "rank": 1, "score": 214134.17882640823 }, { "content": "pub fn aspect_list_fresh() -> Vec<EntryAspectData> {\n\n let mut aspects = Vec::new();\n\n\n\n for _ in 0..10 {\n\n aspects.push(entry_aspect_data_fresh())\n\n }\n\n\n\n aspects.into()\n\n}\n\n\n", "file_path": "crates/sim1h/src/aspect/fixture.rs", "rank": 2, "score": 210156.6580966066 }, { "content": "/// check database connection\n\n/// optional\n\npub fn bootstrap(log_context: &LogContext, client: &Client) -> BbDhtResult<ClientToLib3hResponse> {\n\n tracer(&log_context, \"bootstrap\");\n\n // touch the database to check our connection is good\n\n describe_limits(&log_context, &client)?;\n\n Ok(ClientToLib3hResponse::BootstrapSuccess)\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n\n\n use super::*;\n\n use crate::dht::bbdht::dynamodb::client::fixture::bad_client;\n\n use crate::dht::bbdht::dynamodb::client::local::local_client;\n\n use crate::trace::tracer;\n\n use crate::workflow::from_client::bootstrap::bootstrap;\n\n\n\n #[test]\n\n fn bootstrap_test() {\n\n let log_context = \"bootstrap_test\";\n\n\n", "file_path": "crates/sim1h/src/workflow/from_client/bootstrap.rs", "rank": 3, "score": 209812.74412907186 }, { "content": "pub fn query_fresh(_entry_address: &Address) -> Opaque {\n\n let query = NetworkQuery::GetEntry;\n\n let json: JsonString = query.into();\n\n json.to_bytes().into()\n\n}\n\n\n", "file_path": "crates/sim1h/src/workflow/from_client/fixture.rs", "rank": 4, "score": 209221.68096078376 }, { "content": "pub fn get_entry_aspect_filter_fn(aspect: &EntryAspectData) -> bool {\n\n let keep = vec![\"content\".to_string(), \"header\".to_string()];\n\n keep.contains(&aspect.type_hint)\n\n}\n\n\n", "file_path": "crates/sim1h/src/workflow/from_client/query_entry.rs", "rank": 5, "score": 205323.6854274614 }, { "content": "fn try_aspect_from_item(item: Item) -> BbDhtResult<EntryAspectData> {\n\n let aspect_hash = match get_or_err(&item, ASPECT_ADDRESS_KEY)?.s.clone() {\n\n Some(address) => AspectHash::from(address),\n\n None => {\n\n return Err(BbDhtError::MissingData(format!(\n\n \"Missing aspect_hash: {:?}\",\n\n item\n\n )))\n\n }\n\n };\n\n\n\n let aspect = match get_or_err(&item, ASPECT_KEY)?.b.clone() {\n\n Some(binary_data) => binary_data.to_vec().into(),\n\n None => {\n\n return Err(BbDhtError::MissingData(format!(\n\n \"Missing aspect: {:?}\",\n\n item\n\n )))\n\n }\n\n };\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/api/aspect/read.rs", "rank": 6, "score": 204674.67641593475 }, { "content": "pub fn entry_aspect_to_entry_aspect_data(entry_aspect: EntryAspect) -> EntryAspectData {\n\n EntryAspectData {\n\n aspect_address: AspectHash::from(entry_aspect.address()),\n\n type_hint: entry_aspect.type_hint(),\n\n aspect: JsonString::from(entry_aspect).to_bytes().into(),\n\n publish_ts: SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .expect(\"Time went backwards\")\n\n .as_millis() as u64,\n\n }\n\n}\n", "file_path": "crates/sim1h/src/aspect/mod.rs", "rank": 7, "score": 202568.12076028372 }, { "content": "// result of no-op is no-op\n\npub fn handle_store_entry_aspect_result(log_context: &LogContext) {\n\n tracer(&log_context, &format!(\"handle_store_entry_aspect_result\"));\n\n // TODO: update held_aspects. But, need the protocol message to tell us which aspect was held!\n\n}\n", "file_path": "crates/sim1h/src/workflow/to_client_response/handle_store_entry_aspect_result.rs", "rank": 8, "score": 189858.15925491706 }, { "content": "pub fn header_address_fresh() -> Address {\n\n Uuid::new_v4().to_string().into()\n\n}\n\n\n", "file_path": "crates/sim1h/src/entry/fixture.rs", "rank": 9, "score": 189081.7015473979 }, { "content": "pub fn client(region: Region) -> Client {\n\n DynamoDbClient::new(region).with_retries(Policy::Exponential(10, Duration::from_millis(100)))\n\n}\n\n\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/client/mod.rs", "rank": 10, "score": 186776.1923699332 }, { "content": "pub fn bad_client() -> Client {\n\n client(bad_region())\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use crate::dht::bbdht::dynamodb::client::fixture::bad_client;\n\n use crate::dht::bbdht::dynamodb::client::fixture::bad_region;\n\n use crate::dht::bbdht::dynamodb::client::fixture::BAD_ENDPOINT;\n\n use crate::dht::bbdht::dynamodb::client::fixture::BAD_REGION;\n\n\n\n use crate::trace::tracer;\n\n use rusoto_core::region::Region;\n\n\n\n #[test]\n\n /// check the value is what we want\n\n fn bad_region_test() {\n\n let log_context = \"bad_region_test\";\n\n\n\n tracer(&log_context, \"compare values\");\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/client/fixture.rs", "rank": 11, "score": 186196.35362213035 }, { "content": "pub fn link_add_aspect_fresh(entry: &Entry) -> EntryAspect {\n\n let link_data = LinkData::new_add(\n\n &entry.address(),\n\n &entry_hash_fresh(),\n\n &link_tag_fresh(),\n\n &link_type_fresh(),\n\n chain_header_fresh(&entry_fresh()),\n\n core_agent_id_fresh(),\n\n );\n\n EntryAspect::LinkAdd(link_data, chain_header_fresh(entry))\n\n}\n\n\n", "file_path": "crates/sim1h/src/aspect/fixture.rs", "rank": 12, "score": 185957.94998403246 }, { "content": "/// -- Connection -- //\n\n/// Notification of successful connection to a network\n\n/// no-op\n\npub fn connected(log_context: &LogContext, _client: &Client, connected_data: &ConnectedData) {\n\n tracer(&log_context, &format!(\"connected {:?}\", connected_data));\n\n}\n", "file_path": "crates/sim1h/src/workflow/to_client/connected.rs", "rank": 13, "score": 179243.1136965312 }, { "content": "pub fn until_table_exists(log_context: &LogContext, client: &Client, table_name: &str) {\n\n until_table_exists_or_not(log_context, client, table_name, true);\n\n}\n\n\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/api/table/exist.rs", "rank": 14, "score": 179026.79422242835 }, { "content": "pub fn until_table_not_exists(log_context: &LogContext, client: &Client, table_name: &str) {\n\n until_table_exists_or_not(log_context, client, table_name, false);\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n\n\n use crate::dht::bbdht::dynamodb::api::table::create::ensure_table;\n\n use crate::dht::bbdht::dynamodb::api::table::delete::delete_table;\n\n use crate::dht::bbdht::dynamodb::api::table::exist::table_exists;\n\n use crate::dht::bbdht::dynamodb::api::table::fixture::table_name_fresh;\n\n use crate::dht::bbdht::dynamodb::client::local::local_client;\n\n use crate::dht::bbdht::dynamodb::schema::fixture::attribute_definitions_a;\n\n use crate::dht::bbdht::dynamodb::schema::fixture::key_schema_a;\n\n use crate::trace::tracer;\n\n\n\n #[test]\n\n fn table_exists_test() {\n\n let log_context = \"table_exists_test\";\n\n\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/api/table/exist.rs", "rank": 15, "score": 179026.79422242835 }, { "content": "pub fn content_to_item(content: &dyn AddressableContent) -> Item {\n\n let mut item = HashMap::new();\n\n item.insert(\n\n String::from(ADDRESS_KEY),\n\n string_attribute_value(&String::from(content.address())),\n\n );\n\n item.insert(\n\n String::from(CONTENT_KEY),\n\n string_attribute_value(&String::from(content.content())),\n\n );\n\n item\n\n}\n\n\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/api/item/write.rs", "rank": 16, "score": 172685.34512872438 }, { "content": "pub fn aspects_to_opaque(aspects: &Vec<EntryAspectData>) -> Opaque {\n\n let json = JsonString::from(aspects.clone());\n\n json.to_bytes().into()\n\n}\n\n\n\nimpl Sim1hState {\n\n /// 90% (need query logic to be finalised)\n\n /// fetch all entry aspects from entry address\n\n /// do some kind of filter based on the non-opaque query struct\n\n /// familiar to rehydrate the opaque query struct\n\n pub fn query_entry(\n\n &mut self,\n\n log_context: &LogContext,\n\n _client: &Client,\n\n query_entry_data: &QueryEntryData,\n\n ) -> BbDhtResult<()> {\n\n tracer(&log_context, \"query_entry\");\n\n\n\n // Just mirror the request back, since we are a full-sync bbDHT\n\n self.client_request_outbox\n", "file_path": "crates/sim1h/src/workflow/from_client/query_entry.rs", "rank": 17, "score": 172162.26293834954 }, { "content": "pub fn client_from_endpoint(endpoint: String, region: String) -> Client {\n\n client(Region::Custom {\n\n name: region,\n\n endpoint,\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\npub mod test {\n\n use crate::dht::bbdht::dynamodb::client::client;\n\n use crate::trace::tracer;\n\n use rusoto_core::region::Region;\n\n\n\n #[test]\n\n fn client_smoke_test() {\n\n let log_context = \"client_smoke_test\";\n\n\n\n tracer(&log_context, \"smoke test\");\n\n client(Region::SaEast1);\n\n }\n\n\n\n}\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/client/mod.rs", "rank": 18, "score": 169520.10913910734 }, { "content": "pub fn inbox_key(agent_id: &Address) -> String {\n\n format!(\"{}{}\", INBOX_KEY_PREFIX, agent_id)\n\n}\n\n\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/schema/cas.rs", "rank": 19, "score": 164908.98338351113 }, { "content": "pub fn item_to_direct_message_data(item: &Item) -> BbDhtResult<(DirectMessageData, bool)> {\n\n let content = match item[MESSAGE_CONTENT_KEY].b.clone() {\n\n Some(v) => v.to_vec(),\n\n None => {\n\n return Err(BbDhtError::MissingData(format!(\n\n \"message item missing content {:?}\",\n\n &item\n\n )))\n\n }\n\n };\n\n\n\n let from_agent_id = match item[MESSAGE_FROM_KEY].s.clone() {\n\n Some(v) => v,\n\n None => {\n\n return Err(BbDhtError::MissingData(format!(\n\n \"message item missing from {:?}\",\n\n &item\n\n )))\n\n }\n\n };\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/api/agent/inbox.rs", "rank": 20, "score": 163944.34347365765 }, { "content": "fn get_or_err<'a, V: Debug>(item: &'a HashMap<String, V>, key: &'a str) -> BbDhtResult<&'a V> {\n\n item.get(&key.to_string()).ok_or_else(|| {\n\n BbDhtError::MissingData(format!(\n\n \"Key not present in hashmap! key={}, hashmap={:?}\",\n\n key, item\n\n ))\n\n })\n\n}\n\n\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/api/aspect/read.rs", "rank": 22, "score": 163275.67177990827 }, { "content": "pub fn content_aspect_fresh() -> EntryAspect {\n\n let entry = entry_fresh();\n\n EntryAspect::Content(entry.clone(), chain_header_fresh(&entry))\n\n}\n\n\n", "file_path": "crates/sim1h/src/aspect/fixture.rs", "rank": 23, "score": 161227.65714497826 }, { "content": "pub fn aspect_hash_fresh() -> AspectHash {\n\n AspectHash::from(Uuid::new_v4().to_string())\n\n}\n\n\n", "file_path": "crates/sim1h/src/aspect/fixture.rs", "rank": 24, "score": 161227.65714497826 }, { "content": "pub fn opaque_aspect_fresh() -> Opaque {\n\n JsonString::from(content_aspect_fresh()).to_bytes().into()\n\n}\n\n\n", "file_path": "crates/sim1h/src/aspect/fixture.rs", "rank": 25, "score": 159516.17458974247 }, { "content": "pub fn tracer(log_context: &LogContext, msg: &str) {\n\n setup();\n\n trace!(\"{}: {}\", log_context, msg);\n\n}\n\n\n\npub type LogContext = &'static str;\n", "file_path": "crates/sim1h/src/trace/mod.rs", "rank": 26, "score": 157111.60784804792 }, { "content": "pub fn list_tables(client: &Client) -> BbDhtResult<Option<Vec<TableName>>> {\n\n Ok(client\n\n .list_tables(ListTablesInput {\n\n ..Default::default()\n\n })\n\n .sync()?\n\n .table_names)\n\n}\n\n\n\n#[cfg(test)]\n\npub mod test {\n\n use crate::dht::bbdht::dynamodb::api::table::list::list_tables;\n\n use crate::dht::bbdht::dynamodb::client::local::local_client;\n\n use crate::trace::tracer;\n\n\n\n #[test]\n\n pub fn list_tables_test() {\n\n let log_context = \"list_tables_test\";\n\n\n\n tracer(&log_context, \"fixtures\");\n\n let local_client = local_client();\n\n\n\n // list\n\n assert!(list_tables(&local_client).is_ok());\n\n }\n\n}\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/api/table/list.rs", "rank": 27, "score": 156923.99876213804 }, { "content": "pub fn agent_id_fresh() -> AgentPubKey {\n\n AgentPubKey::from(Uuid::new_v4().to_string())\n\n}\n\n\n", "file_path": "crates/sim1h/src/agent/fixture.rs", "rank": 28, "score": 155377.08995662283 }, { "content": "pub fn get_item_by_address(\n\n log_context: &LogContext,\n\n client: &Client,\n\n table_name: &str,\n\n address: &Address,\n\n) -> BbDhtResult<Option<Item>> {\n\n tracer(&log_context, \"get_item_by_address\");\n\n\n\n let mut key = HashMap::new();\n\n key.insert(\n\n String::from(ADDRESS_KEY),\n\n string_attribute_value(&String::from(address.to_owned())),\n\n );\n\n Ok(client\n\n .get_item(GetItemInput {\n\n consistent_read: Some(true),\n\n table_name: table_name.into(),\n\n key: key,\n\n ..Default::default()\n\n })\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/api/item/read.rs", "rank": 29, "score": 155298.82905736013 }, { "content": "pub fn header_aspect_fresh(entry: &Entry) -> EntryAspect {\n\n EntryAspect::Header(chain_header_fresh(entry))\n\n}\n\n\n", "file_path": "crates/sim1h/src/aspect/fixture.rs", "rank": 30, "score": 155273.73920874659 }, { "content": "pub fn deletion_aspect_fresh(entry: &Entry) -> EntryAspect {\n\n EntryAspect::Deletion(chain_header_fresh(&entry))\n\n}\n\n\n", "file_path": "crates/sim1h/src/aspect/fixture.rs", "rank": 31, "score": 155273.73920874659 }, { "content": "pub fn update_aspect_fresh(entry: &Entry) -> EntryAspect {\n\n EntryAspect::Update(entry.clone(), chain_header_fresh(&entry))\n\n}\n\n\n", "file_path": "crates/sim1h/src/aspect/fixture.rs", "rank": 32, "score": 155273.73920874659 }, { "content": "// result of no-op is no-op\n\npub fn handle_drop_entry_result(log_context: &LogContext) {\n\n tracer(&log_context, &format!(\"handle_drop_entry_result\"));\n\n}\n", "file_path": "crates/sim1h/src/workflow/to_client_response/handle_drop_entry_result.rs", "rank": 33, "score": 154544.57223011536 }, { "content": "pub fn link_remove_aspect_fresh(entry: &Entry) -> EntryAspect {\n\n let link_data = LinkData::new_delete(\n\n &entry.address(),\n\n &entry_hash_fresh(),\n\n &link_tag_fresh(),\n\n &link_type_fresh(),\n\n chain_header_fresh(&entry_fresh()),\n\n core_agent_id_fresh(),\n\n );\n\n EntryAspect::LinkRemove((link_data, Vec::new()), chain_header_fresh(entry))\n\n}\n\n\n", "file_path": "crates/sim1h/src/aspect/fixture.rs", "rank": 35, "score": 153270.00550681367 }, { "content": "pub fn local_client() -> Client {\n\n client(local_region())\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use crate::dht::bbdht::dynamodb::client::local::local_client;\n\n use crate::dht::bbdht::dynamodb::client::local::local_region;\n\n use crate::dht::bbdht::dynamodb::client::local::LOCAL_ENDPOINT;\n\n use crate::dht::bbdht::dynamodb::client::local::LOCAL_REGION;\n\n\n\n use crate::trace::tracer;\n\n use rusoto_core::region::Region;\n\n\n\n #[test]\n\n /// check the value is what we want\n\n fn local_region_test() {\n\n let log_context = \"local_region_test\";\n\n\n\n tracer(&log_context, \"compare values\");\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/client/local.rs", "rank": 36, "score": 153233.07503672715 }, { "content": "/// Store data on a node's dht arc.\n\n/// all entry aspects are in the database\n\n/// no-op\n\npub fn handle_store_entry_aspect(\n\n log_context: &LogContext,\n\n _client: &Client,\n\n store_entry_aspect_data: &StoreEntryAspectData,\n\n) {\n\n tracer(\n\n &log_context,\n\n &format!(\"handle_store_entry_aspect {:?}\", store_entry_aspect_data),\n\n );\n\n}\n", "file_path": "crates/sim1h/src/workflow/to_client/handle_store_entry_aspect.rs", "rank": 37, "score": 150917.11205318075 }, { "content": "pub fn publish_ts_fresh() -> u64 {\n\n 1568858140\n\n}\n", "file_path": "crates/sim1h/src/aspect/fixture.rs", "rank": 38, "score": 150911.5609188232 }, { "content": "pub fn type_hint_fresh() -> String {\n\n \"content\".to_string()\n\n}\n\n\n", "file_path": "crates/sim1h/src/aspect/fixture.rs", "rank": 39, "score": 150911.5609188232 }, { "content": "pub fn address_key_schema() -> KeySchemaElement {\n\n hash_key(ADDRESS_KEY)\n\n}\n\n\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/schema/cas.rs", "rank": 40, "score": 150380.98388322332 }, { "content": "pub fn space_address_fresh() -> SpaceHash {\n\n Address::from(Uuid::new_v4().to_string()).into()\n\n}\n\n\n", "file_path": "crates/sim1h/src/space/fixture.rs", "rank": 41, "score": 149265.91625603376 }, { "content": "pub fn key_schema_a() -> Vec<KeySchemaElement> {\n\n vec![hash_key(&primary_key_attribute_name_a())]\n\n}\n\n\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/schema/fixture.rs", "rank": 42, "score": 148737.72591989418 }, { "content": "pub fn provided_entry_data_fresh(\n\n space_data: &SpaceData,\n\n entry_hash: &EntryHash,\n\n) -> ProvidedEntryData {\n\n ProvidedEntryData {\n\n space_address: space_data.space_address.clone(),\n\n provider_agent_id: agent_id_fresh(),\n\n entry: entry_data_fresh(entry_hash),\n\n }\n\n}\n", "file_path": "crates/sim1h/src/workflow/from_client/fixture.rs", "rank": 43, "score": 148631.36410345748 }, { "content": "pub fn query_entry_aspects(\n\n log_context: &LogContext,\n\n client: &Client,\n\n query_entry_data: &QueryEntryData,\n\n) -> BbDhtResult<Vec<EntryAspectData>> {\n\n tracer(&log_context, \"publish_entry\");\n\n\n\n let table_name = query_entry_data.space_address.to_string();\n\n let entry_address = query_entry_data.entry_address.clone();\n\n\n\n let query_raw = query_entry_data.query.as_slice();\n\n let utf8_result = std::str::from_utf8(&query_raw.clone());\n\n let query_str = match utf8_result {\n\n Ok(v) => v,\n\n Err(err) => Err(BbDhtError::CorruptData(err.to_string()))?,\n\n };\n\n let query_json = JsonString::from_json(&query_str.to_string());\n\n let _query = match NetworkQuery::try_from(query_json.clone()) {\n\n Ok(v) => v,\n\n Err(err) => Err(BbDhtError::CorruptData(err.to_string()))?,\n\n };\n\n\n\n let entry_aspects = get_entry_aspects(log_context, client, &table_name, &entry_address)?;\n\n\n\n Ok(entry_aspects)\n\n}\n\n\n", "file_path": "crates/sim1h/src/workflow/from_client/query_entry.rs", "rank": 44, "score": 148612.0915886058 }, { "content": "pub fn bad_region() -> Region {\n\n Region::Custom {\n\n name: BAD_REGION.into(),\n\n endpoint: BAD_ENDPOINT.into(),\n\n }\n\n}\n\n\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/client/fixture.rs", "rank": 45, "score": 145263.13903637073 }, { "content": "pub fn content_fresh() -> Content {\n\n Content::from(RawString::from(Uuid::new_v4().to_string()))\n\n}\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/api/item/fixture.rs", "rank": 46, "score": 144400.62627126262 }, { "content": "pub fn entry_hash_fresh() -> EntryHash {\n\n EntryHash::from(Uuid::new_v4().to_string())\n\n}\n\n\n", "file_path": "crates/sim1h/src/entry/fixture.rs", "rank": 47, "score": 143636.1568163531 }, { "content": "pub fn hash_key(attribute_name: &str) -> KeySchemaElement {\n\n KeySchemaElement {\n\n attribute_name: attribute_name.into(),\n\n key_type: \"HASH\".into(),\n\n }\n\n}\n\n\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/schema/mod.rs", "rank": 48, "score": 143609.7304783519 }, { "content": "pub fn primary_key_attribute_name_a() -> String {\n\n \"key_a\".into()\n\n}\n\n\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/schema/fixture.rs", "rank": 49, "score": 142412.63972425292 }, { "content": "pub fn table_name_fresh() -> String {\n\n format!(\"table_{}\", Uuid::new_v4())\n\n}\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/api/table/fixture.rs", "rank": 50, "score": 138565.0856997306 }, { "content": "pub fn unordered_vec_compare<T: Hash + Eq>(a: Vec<T>, b: Vec<T>) -> bool {\n\n let mut set_a = HashSet::new();\n\n for i in a {\n\n set_a.insert(i);\n\n }\n\n let mut set_b = HashSet::new();\n\n for j in b {\n\n set_b.insert(j);\n\n }\n\n set_a == set_b\n\n}\n", "file_path": "crates/sim1h/src/test/mod.rs", "rank": 51, "score": 130121.95384609129 }, { "content": "pub fn setup() {\n\n let _ = env_logger::builder().is_test(true).try_init();\n\n}\n\n\n", "file_path": "crates/sim1h/src/trace/mod.rs", "rank": 52, "score": 123155.0916939526 }, { "content": "pub fn scan_aspects(\n\n _log_context: LogContext,\n\n client: &Client,\n\n table_name: &TableName,\n\n exclusive_start_key: Option<Item>,\n\n) -> BbDhtResult<(AspectAddressMap, Option<Item>)> {\n\n client\n\n .scan(ScanInput {\n\n consistent_read: Some(true),\n\n table_name: table_name.to_string(),\n\n projection_expression: projection_expression(vec![ADDRESS_KEY, ASPECT_LIST_KEY]),\n\n exclusive_start_key,\n\n ..Default::default()\n\n })\n\n .sync()\n\n .map_err(|err| err.into())\n\n .map(|result| {\n\n let items = result\n\n .items\n\n .unwrap_or(Vec::new())\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/api/aspect/read.rs", "rank": 53, "score": 123045.15293765759 }, { "content": "pub fn get_aspect(\n\n log_context: &LogContext,\n\n client: &Client,\n\n table_name: &TableName,\n\n aspect_address: &Address,\n\n) -> BbDhtResult<Option<EntryAspectData>> {\n\n tracer(&log_context, \"read_aspect\");\n\n\n\n match get_item_by_address(&log_context, &client, &table_name, &aspect_address) {\n\n Ok(get_output) => match get_output {\n\n Some(aspect_item) => Ok(Some(try_aspect_from_item(aspect_item)?)),\n\n None => Ok(None),\n\n },\n\n Err(err) => Err(err.into()),\n\n }\n\n}\n\n\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/api/aspect/read.rs", "rank": 54, "score": 123045.15293765759 }, { "content": "pub fn query_entry_data_fresh(space_data: &SpaceData, entry_hash: &EntryHash) -> QueryEntryData {\n\n QueryEntryData {\n\n space_address: space_data.space_address.clone(),\n\n entry_address: entry_hash.clone(),\n\n request_id: request_id_fresh(),\n\n requester_agent_id: agent_id_fresh(),\n\n query: query_fresh(&entry_hash),\n\n }\n\n}\n\n\n", "file_path": "crates/sim1h/src/workflow/from_client/fixture.rs", "rank": 55, "score": 122628.80307114634 }, { "content": "pub fn should_put_item_retry(\n\n log_context: &LogContext,\n\n put_item_result: Result<PutItemOutput, RusotoError<PutItemError>>,\n\n) -> BbDhtResult<bool> {\n\n match put_item_result {\n\n // no need to retry any success\n\n Ok(_) => Ok(false),\n\n Err(RusotoError::Service(err)) => match err {\n\n PutItemError::InternalServerError(err) => {\n\n // retry InternalServerErrors as these often seem to be temporary\n\n tracer(\n\n &log_context,\n\n &format!(\"retry Service InternalServerError {:?}\", err),\n\n );\n\n Ok(true)\n\n }\n\n PutItemError::ProvisionedThroughputExceeded(err) => {\n\n // retry throughput issues as these will hopefully recover\n\n tracer(\n\n &log_context,\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/api/item/write.rs", "rank": 56, "score": 122168.53699050249 }, { "content": "pub fn get_entry_aspects(\n\n log_context: &LogContext,\n\n client: &Client,\n\n table_name: &TableName,\n\n entry_address: &Address,\n\n) -> BbDhtResult<Vec<EntryAspectData>> {\n\n match get_item_by_address(log_context, client, table_name, entry_address) {\n\n Ok(get_item_output) => match get_item_output {\n\n Some(item) => {\n\n let aspect_list = try_aspect_list_from_item(item)?;\n\n let mut aspects = Vec::new();\n\n for aspect_address in aspect_list {\n\n aspects.push(\n\n match get_aspect(log_context, client, table_name, &aspect_address) {\n\n Ok(Some(aspect)) => aspect,\n\n Ok(None) => {\n\n return Err(BbDhtError::MissingData(format!(\n\n \"Missing entry aspect data: {:?}\",\n\n &aspect_address\n\n )))\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/api/aspect/read.rs", "rank": 57, "score": 121466.28062295569 }, { "content": "// Notification of disconnection from a network\n\n// no-op\n\npub fn disconnected(\n\n log_context: &LogContext,\n\n _client: &Client,\n\n disconnected_data: &DisconnectedData,\n\n) {\n\n tracer(\n\n &log_context,\n\n &format!(\"disconnected {:?}\", disconnected_data),\n\n );\n\n}\n", "file_path": "crates/sim1h/src/workflow/to_client/disconnected.rs", "rank": 58, "score": 120673.3762275921 }, { "content": "/// mimic lib3h::engine::real_engine::serve_Lib3hClientProtocol\n\npub fn serve_Lib3hClientProtocol(client_msg: Lib3hClientProtocol) {\n\n debug!(\"serving: {:?}\", client_msg);\n\n\n\n /// docs for all sequences at:\n\n /// https://hackmd.io/Rag5au4dQfm1CtcjOK7y5w\n\n match protocol {\n\n Lib3hClientProtocol::Shutdown => {\n\n // ** do nothing **\n\n // this is a hangover from n3h\n\n },\n\n\n\n // this doesn't do anything standalone\n\n Lib3hClientProtocol::SuccessResult(generic_result_data) => { generic_result_data; },\n\n\n\n // this doesn't do anything standalone\n\n Lib3hClientProtocol::FailureResult(generic_result_data) => { generic_result_data; },\n\n\n\n // https://hackmd.io/Rag5au4dQfm1CtcjOK7y5w#Connect\n\n Lib3hClientProtocol::Connect(connect_data) => {\n\n // ??CHECK??\n", "file_path": "crates/sim1h/src/protocol_map/client_protocol.rs", "rank": 60, "score": 119654.96628672948 }, { "content": "pub fn opaque_fresh() -> Opaque {\n\n vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9].into()\n\n}\n\n\n", "file_path": "crates/sim1h/src/network/fixture.rs", "rank": 61, "score": 118627.40582532212 }, { "content": "pub fn timestamp_fresh() -> Iso8601 {\n\n test_iso_8601()\n\n}\n", "file_path": "crates/sim1h/src/network/fixture.rs", "rank": 62, "score": 118627.40582532212 }, { "content": "pub fn provenance_fresh() -> Provenance {\n\n Provenance(agent_id_fresh().hash_string().to_owned(), Signature::fake())\n\n}\n\n\n", "file_path": "crates/sim1h/src/agent/fixture.rs", "rank": 63, "score": 118627.40582532212 }, { "content": "pub fn entry_fresh() -> Entry {\n\n Entry::AgentId(core_agent_id_fresh())\n\n}\n\n\n", "file_path": "crates/sim1h/src/entry/fixture.rs", "rank": 64, "score": 118627.40582532212 }, { "content": "pub fn content_key_schema() -> KeySchemaElement {\n\n hash_key(CONTENT_KEY)\n\n}\n\n\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/schema/cas.rs", "rank": 65, "score": 118377.50192195848 }, { "content": "/// MVP\n\n/// append list of aspect addresses to entry address\n\n/// drop all aspects into database under each of their addresses\n\n/// later:\n\n/// make all this in a transaction\n\npub fn publish_entry(\n\n log_context: &LogContext,\n\n client: &Client,\n\n provided_entry_data: &ProvidedEntryData,\n\n) -> BbDhtResult<()> {\n\n tracer(&log_context, \"publish_entry\");\n\n\n\n append_aspect_list_to_entry(\n\n &log_context,\n\n &client,\n\n &provided_entry_data.space_address.to_string(),\n\n &provided_entry_data.entry.entry_address,\n\n &provided_entry_data.entry.aspect_list,\n\n )?;\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n\n", "file_path": "crates/sim1h/src/workflow/from_client/publish_entry.rs", "rank": 66, "score": 116874.63068257418 }, { "content": "/// MVP (needs tests, wrapping query atm)\n\n/// query entry but hardcoded to entry query right?\n\npub fn fetch_entry(\n\n log_context: &LogContext,\n\n client: &Client,\n\n fetch_entry_data: &FetchEntryData,\n\n) -> BbDhtResult<ClientToLib3hResponse> {\n\n let query_entry_data = QueryEntryData {\n\n request_id: fetch_entry_data.request_id.clone(),\n\n // seems weird but the two structs don't line up 1:1\n\n requester_agent_id: fetch_entry_data.provider_agent_id.clone(),\n\n space_address: fetch_entry_data.space_address.clone(),\n\n entry_address: fetch_entry_data.entry_address.clone(),\n\n query: JsonString::from(NetworkQuery::GetEntry).to_bytes().into(),\n\n };\n\n let query_aspect_list = query_entry_aspects(log_context, client, &query_entry_data)?;\n\n let fetch_entry_result_data = FetchEntryResultData {\n\n // i think this works??\n\n entry: EntryData {\n\n aspect_list: query_aspect_list,\n\n entry_address: query_entry_data.entry_address,\n\n },\n\n provider_agent_id: query_entry_data.requester_agent_id,\n\n request_id: query_entry_data.request_id,\n\n space_address: query_entry_data.space_address,\n\n };\n\n Ok(ClientToLib3hResponse::FetchEntryResult(\n\n fetch_entry_result_data,\n\n ))\n\n}\n", "file_path": "crates/sim1h/src/workflow/from_client/fetch_entry.rs", "rank": 67, "score": 116869.49492633437 }, { "content": "/// no-op\n\npub fn leave_space(\n\n log_context: &LogContext,\n\n _client: &Client,\n\n _leave_space_data: &SpaceData,\n\n) -> BbDhtResult<ClientToLib3hResponse> {\n\n tracer(&log_context, \"leave_space\");\n\n // leave space is a no-op in a simulation\n\n Ok(ClientToLib3hResponse::LeaveSpaceResult)\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n\n\n use crate::dht::bbdht::dynamodb::client::local::local_client;\n\n use crate::space::fixture::space_data_fresh;\n\n use crate::trace::tracer;\n\n use crate::workflow::from_client::leave_space::leave_space;\n\n use lib3h_protocol::protocol::ClientToLib3hResponse;\n\n\n\n #[test]\n", "file_path": "crates/sim1h/src/workflow/from_client/leave_space.rs", "rank": 68, "score": 116866.05763409381 }, { "content": "pub fn link_tag_fresh() -> String {\n\n Uuid::new_v4().to_string()\n\n}\n\n\n", "file_path": "crates/sim1h/src/entry/fixture.rs", "rank": 69, "score": 116742.59623748505 }, { "content": "pub fn core_nick_fresh() -> String {\n\n Uuid::new_v4().to_string()\n\n}\n\n\n", "file_path": "crates/sim1h/src/agent/fixture.rs", "rank": 70, "score": 116742.59623748505 }, { "content": "pub fn request_id_fresh() -> String {\n\n Uuid::new_v4().to_string()\n\n}\n\n\n", "file_path": "crates/sim1h/src/network/fixture.rs", "rank": 71, "score": 116742.59623748505 }, { "content": "pub fn link_type_fresh() -> String {\n\n Uuid::new_v4().to_string()\n\n}\n", "file_path": "crates/sim1h/src/entry/fixture.rs", "rank": 72, "score": 116742.59623748505 }, { "content": "pub fn key_schema_cas() -> Vec<KeySchemaElement> {\n\n vec![address_key_schema()]\n\n}\n\n\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/schema/cas.rs", "rank": 73, "score": 116006.55883538752 }, { "content": "pub fn provenances_fresh() -> Vec<Provenance> {\n\n vec![provenance_fresh(), provenance_fresh()]\n\n}\n\n\n", "file_path": "crates/sim1h/src/agent/fixture.rs", "rank": 74, "score": 115821.23624609355 }, { "content": "pub fn describe_limits(\n\n log_context: &LogContext,\n\n client: &Client,\n\n) -> BbDhtResult<DescribeLimitsOutput> {\n\n tracer(&log_context, \"describe_limits\");\n\n Ok(client.describe_limits().sync()?)\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n\n\n use crate::dht::bbdht::dynamodb::account::describe_limits;\n\n use crate::dht::bbdht::dynamodb::client::fixture::bad_client;\n\n use crate::dht::bbdht::dynamodb::client::local::local_client;\n\n use crate::trace::tracer;\n\n\n\n #[test]\n\n fn describe_limits_ok_test() {\n\n let log_context = \"describe_limits_ok_test\";\n\n\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/account/mod.rs", "rank": 75, "score": 115498.26346145973 }, { "content": "pub fn space_data_fresh() -> SpaceData {\n\n SpaceData {\n\n request_id: request_id_fresh(),\n\n space_address: space_address_fresh(),\n\n agent_id: agent_id_fresh(),\n\n }\n\n}\n", "file_path": "crates/sim1h/src/space/fixture.rs", "rank": 76, "score": 114926.57069802667 }, { "content": "pub fn ensure_content(\n\n log_context: &LogContext,\n\n client: &Client,\n\n table_name: &TableName,\n\n content: &dyn AddressableContent,\n\n) -> BbDhtResult<()> {\n\n tracer(&log_context, \"ensure_content\");\n\n\n\n if should_put_item_retry(\n\n log_context,\n\n client\n\n .put_item(PutItemInput {\n\n item: content_to_item(content),\n\n table_name: table_name.to_string(),\n\n ..Default::default()\n\n })\n\n .sync(),\n\n )? {\n\n ensure_content(log_context, client, table_name, content)\n\n } else {\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/api/item/write.rs", "rank": 77, "score": 114748.24766211197 }, { "content": "pub fn message_content_fresh() -> Vec<u8> {\n\n JsonString::from(RawString::from(\"foo\")).to_bytes()\n\n}\n", "file_path": "crates/sim1h/src/agent/fixture.rs", "rank": 78, "score": 114005.21070663517 }, { "content": "/// Local client does not need to hold that entry anymore.\n\n/// Local client doesn't 'have to' comply.\n\n/// all entries are in the database\n\n/// no-op\n\npub fn handle_drop_entry(\n\n log_context: &LogContext,\n\n _client: &Client,\n\n drop_entry_data: &DropEntryData,\n\n) {\n\n tracer(\n\n &log_context,\n\n &format!(\"handle_drop_entry {:?}\", drop_entry_data),\n\n );\n\n}\n", "file_path": "crates/sim1h/src/workflow/to_client/handle_drop_entry.rs", "rank": 79, "score": 113335.26087598632 }, { "content": "/// A: append message to inbox in database\n\npub fn send_direct_message(\n\n log_context: &LogContext,\n\n client: &Client,\n\n direct_message_data: &DirectMessageData,\n\n) -> BbDhtResult<ClientToLib3hResponse> {\n\n tracer(&log_context, \"send_direct_message\");\n\n send_to_agent_inbox(\n\n &log_context,\n\n &client,\n\n &direct_message_data.space_address.to_string(),\n\n &direct_message_data.request_id,\n\n &direct_message_data.from_agent_id,\n\n &direct_message_data.to_agent_id,\n\n &direct_message_data.content,\n\n false,\n\n )?;\n\n Ok(ClientToLib3hResponse::SendDirectMessageResult(\n\n direct_message_data.clone(),\n\n ))\n\n}\n", "file_path": "crates/sim1h/src/workflow/from_client/send_direct_message.rs", "rank": 80, "score": 113331.64709224281 }, { "content": "// -- Entry -- //\n\n// Another node, or the network module itself is requesting data from us\n\n// all entries are in the database\n\n// no-op\n\npub fn handle_fetch_entry(\n\n log_context: &LogContext,\n\n _client: &Client,\n\n fetch_entry_data: &FetchEntryResultData,\n\n) {\n\n tracer(\n\n &log_context,\n\n &format!(\"handle_fetch_entry {:?}\", fetch_entry_data),\n\n );\n\n}\n", "file_path": "crates/sim1h/src/workflow/to_client/handle_fetch_entry.rs", "rank": 81, "score": 113331.64709224281 }, { "content": "// Request a node to handle a QueryEntry request\n\n// queries are simulated on the outgoing side\n\n// no-op\n\npub fn handle_query_entry(\n\n log_context: &LogContext,\n\n _client: &Client,\n\n query_entry_data: &QueryEntryResultData,\n\n) {\n\n tracer(\n\n &log_context,\n\n &format!(\"handle_query_entry {:?}\", query_entry_data),\n\n );\n\n}\n", "file_path": "crates/sim1h/src/workflow/to_client/handle_query_entry.rs", "rank": 82, "score": 113331.64709224281 }, { "content": "pub fn core_agent_id_fresh() -> AgentId {\n\n AgentId {\n\n nick: core_nick_fresh(),\n\n pub_sign_key: agent_id_fresh().into(),\n\n }\n\n}\n\n\n", "file_path": "crates/sim1h/src/agent/fixture.rs", "rank": 83, "score": 113175.63138389253 }, { "content": "/// mimic lib3h::engine::real_engine::serve_Lib3hClientProtocol\n\npub fn serve_Lib3hServerProtocol(client_msg: Lib3hClientProtocol) {\n\n debug!(\"serving: {:?}\", client_msg);\n\n\n\n /// docs for all sequences at:\n\n /// https://hackmd.io/Rag5au4dQfm1CtcjOK7y5w\n\n match protocol {\n\n pub enum Lib3hServer(InFromNetwork)Protocol {\n\n\n\n // this doesn't do anything standalone\n\n SuccessResult(GenericResultData),\n\n\n\n // this doesn't do anything standalone\n\n FailureResult(GenericResultData),\n\n\n\n Connected(ConnectedData) {\n\n\n\n // short term:\n\n // this never happens! it's just returned to A if B in db\n\n\n\n // ???CHECK???\n", "file_path": "crates/sim1h/src/protocol_map/server_protocol.rs", "rank": 84, "score": 112637.92420215592 }, { "content": "pub fn local_region() -> Region {\n\n Region::Custom {\n\n name: LOCAL_REGION.into(),\n\n endpoint: LOCAL_ENDPOINT.into(),\n\n }\n\n}\n\n\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/client/local.rs", "rank": 85, "score": 112299.86045096752 }, { "content": "pub fn attribute_name_fresh() -> String {\n\n format!(\"key_{}\", Uuid::new_v4())\n\n}\n\n\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/schema/fixture.rs", "rank": 86, "score": 111486.34086521866 }, { "content": "pub fn address_attribute_definition() -> AttributeDefinition {\n\n string_attribute_definition(ADDRESS_KEY)\n\n}\n\n\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/schema/cas.rs", "rank": 87, "score": 110589.88634187162 }, { "content": "// -- Direct Messaging -- //\n\n// the response received from a previous `SendDirectMessage`\n\n// B puts a message back to A\n\n// works exactly the same as the original send\n\npub fn send_direct_message_result(\n\n log_context: &LogContext,\n\n client: &Client,\n\n direct_message_data: &DirectMessageData,\n\n) -> BbDhtResult<()> {\n\n tracer(\n\n &log_context,\n\n &format!(\"send_direct_message_result {:?}\", direct_message_data),\n\n );\n\n send_to_agent_inbox(\n\n &log_context,\n\n &client,\n\n &direct_message_data.space_address.to_string(),\n\n &direct_message_data.request_id,\n\n &direct_message_data.from_agent_id,\n\n &direct_message_data.to_agent_id,\n\n &direct_message_data.content,\n\n true,\n\n )?;\n\n Ok(())\n\n}\n", "file_path": "crates/sim1h/src/workflow/to_client/send_direct_message_result.rs", "rank": 88, "score": 110041.81685149038 }, { "content": "/// Request to handle a direct message another agent has sent us.\n\n/// A has put something in inbox for B\n\n/// B needs to query to find it and pass to core\n\npub fn handle_send_direct_message(\n\n log_context: &LogContext,\n\n _client: &Client,\n\n direct_message_data: &DirectMessageData,\n\n) {\n\n tracer(\n\n &log_context,\n\n &format!(\"handle_send_direct_message {:?}\", direct_message_data),\n\n );\n\n}\n", "file_path": "crates/sim1h/src/workflow/to_client/handle_send_direct_message.rs", "rank": 89, "score": 110041.8168514904 }, { "content": "pub fn chain_header_fresh(entry: &Entry) -> ChainHeader {\n\n ChainHeader::new(\n\n &entry.entry_type(),\n\n &entry.address(),\n\n &provenances_fresh(),\n\n &Some(header_address_fresh()),\n\n &Some(header_address_fresh()),\n\n &Some(header_address_fresh()),\n\n &timestamp_fresh(),\n\n )\n\n}\n\n\n", "file_path": "crates/sim1h/src/entry/fixture.rs", "rank": 90, "score": 109737.22042277457 }, { "content": "pub fn attribute_definitions_a() -> Vec<AttributeDefinition> {\n\n vec![string_attribute_definition(&primary_key_attribute_name_a())]\n\n}\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/schema/fixture.rs", "rank": 91, "score": 108934.13959001265 }, { "content": "/// Successful data response for a `HandleFetchEntryData` request\n\n/// result of no-op is no-op\n\npub fn handle_fetch_entry_result(\n\n log_context: &LogContext,\n\n client: &Client,\n\n fetch_entry_result_data: &FetchEntryResultData,\n\n) -> BbDhtResult<()> {\n\n tracer(\n\n &log_context,\n\n &format!(\"handle_fetch_entry_result {:?}\", fetch_entry_result_data),\n\n );\n\n\n\n if fetch_entry_result_data.request_id == String::from(\"fetch-and-publish\") {\n\n publish_entry(\n\n log_context,\n\n client,\n\n &ProvidedEntryData {\n\n space_address: fetch_entry_result_data.space_address.clone(),\n\n provider_agent_id: fetch_entry_result_data.provider_agent_id.clone(),\n\n entry: fetch_entry_result_data.entry.clone(),\n\n },\n\n )?;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "crates/sim1h/src/workflow/to_client_response/handle_fetch_entry_result.rs", "rank": 92, "score": 108480.82071574348 }, { "content": "// -- Entry lists -- //\n\n// database stored everything\n\n// no-op\n\npub fn handle_get_authoring_entry_list(\n\n log_context: &LogContext,\n\n _client: &Client,\n\n get_list_data: &EntryListData,\n\n) {\n\n tracer(\n\n &log_context,\n\n &format!(\"handle_get_gossiping_entry_list {:?}\", get_list_data),\n\n );\n\n}\n", "file_path": "crates/sim1h/src/workflow/to_client/handle_get_authoring_entry_list.rs", "rank": 93, "score": 106972.02859353376 }, { "content": "/// -- Entry lists -- //\n\n/// database stored everything\n\n/// no-op\n\npub fn handle_get_gossiping_entry_list(\n\n log_context: &LogContext,\n\n _client: &Client,\n\n get_list_data: &EntryListData,\n\n) {\n\n tracer(\n\n &log_context,\n\n &format!(\"handle_get_gossiping_entry_list {:?}\", get_list_data),\n\n );\n\n}\n", "file_path": "crates/sim1h/src/workflow/to_client/handle_get_gossiping_entry_list.rs", "rank": 94, "score": 106972.02859353376 }, { "content": "pub fn entry_data_fresh(entry_hash: &EntryHash) -> EntryData {\n\n EntryData {\n\n entry_address: entry_hash.clone(),\n\n aspect_list: aspect_list_fresh(),\n\n }\n\n}\n\n\n", "file_path": "crates/sim1h/src/entry/fixture.rs", "rank": 95, "score": 106531.00517295585 }, { "content": "/// Our response to a direct message from another agent.\n\n/// A sends message to B\n\n/// B told A it received the message\n\npub fn handle_send_direct_message_result(\n\n log_context: &LogContext,\n\n direct_message_data: &DirectMessageData,\n\n) {\n\n tracer(\n\n &log_context,\n\n &format!(\n\n \"handle_send_direct_message_result {:?}\",\n\n direct_message_data\n\n ),\n\n );\n\n}\n", "file_path": "crates/sim1h/src/workflow/to_client_response/handle_send_direct_message_result.rs", "rank": 96, "score": 105512.86478495036 }, { "content": "pub fn number_attribute_value(value: &u64) -> AttributeValue {\n\n AttributeValue {\n\n n: Some(value.to_string()),\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/schema/mod.rs", "rank": 97, "score": 104586.68919563686 }, { "content": "pub fn bool_attribute_value(value: bool) -> AttributeValue {\n\n AttributeValue {\n\n bool: Some(value),\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/schema/mod.rs", "rank": 98, "score": 104586.68919563686 }, { "content": "pub fn string_attribute_value(value: &str) -> AttributeValue {\n\n AttributeValue {\n\n s: Some(value.to_string()),\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "crates/sim1h/src/dht/bbdht/dynamodb/schema/mod.rs", "rank": 99, "score": 104586.68919563686 } ]
Rust
src/process.rs
berkus/fectl
bcd4137848f1c4fdbf24753e23f6030be79cce9c
#![allow(dead_code)] use std; use std::io; use std::error::Error; use std::os::unix::io::RawFd; use std::time::{Duration, Instant}; use serde_json as json; use byteorder::{ByteOrder, BigEndian}; use bytes::{BytesMut, BufMut}; use tokio_io::AsyncRead; use tokio_io::io::WriteHalf; use tokio_io::codec::{FramedRead, Encoder, Decoder}; use nix::sys::signal::{kill, Signal}; use nix::unistd::{close, pipe, fork, ForkResult, Pid}; use actix::prelude::*; use config::ServiceConfig; use io::PipeFile; use worker::{WorkerMessage, WorkerCommand}; use event::Reason; use exec::exec_worker; use service::{self, FeService}; const HEARTBEAT: u64 = 2; const WORKER_TIMEOUT: i32 = 98; pub const WORKER_INIT_FAILED: i32 = 99; pub const WORKER_BOOT_FAILED: i32 = 100; pub struct Process { idx: usize, pid: Pid, state: ProcessState, hb: Instant, addr: Addr<Unsync, FeService>, timeout: Duration, startup_timeout: u64, shutdown_timeout: u64, framed: actix::io::FramedWrite<WriteHalf<PipeFile>, TransportCodec>, } impl Actor for Process { type Context = Context<Self>; fn stopping(&mut self, ctx: &mut Context<Self>) -> Running { self.kill(ctx, false); Running::Stop } } impl StreamHandler<ProcessMessage, io::Error> for Process { fn finished(&mut self, ctx: &mut Context<Self>) { self.kill(ctx, false); ctx.stop(); } fn handle(&mut self, msg: ProcessMessage, ctx: &mut Self::Context) { ctx.notify(msg); } } #[derive(Debug)] enum ProcessState { Starting, Failed, Running, Stopping, } #[derive(PartialEq, Debug, Message)] pub enum ProcessMessage { Message(WorkerMessage), StartupTimeout, StopTimeout, Heartbeat, Kill, } #[derive(Debug, Clone)] pub enum ProcessError { Heartbeat, FailedToStart(Option<String>), StartupTimeout, StopTimeout, ConfigError(String), InitFailed, BootFailed, Signal(usize), ExitCode(i8), } impl ProcessError { pub fn from(code: i8) -> ProcessError { match code as i32 { WORKER_TIMEOUT => ProcessError::StartupTimeout, WORKER_INIT_FAILED => ProcessError::InitFailed, WORKER_BOOT_FAILED => ProcessError::BootFailed, code => ProcessError::ExitCode(code as i8), } } } impl<'a> std::convert::From<&'a ProcessError> for Reason { fn from(ob: &'a ProcessError) -> Self { match *ob { ProcessError::Heartbeat => Reason::HeartbeatFailed, ProcessError::FailedToStart(ref err) => Reason::FailedToStart( if let &Some(ref e) = err { Some(format!("{}", e))} else {None}), ProcessError::StartupTimeout => Reason::StartupTimeout, ProcessError::StopTimeout => Reason::StopTimeout, ProcessError::ConfigError(ref err) => Reason::WorkerError(err.clone()), ProcessError::InitFailed => Reason::InitFailed, ProcessError::BootFailed => Reason::BootFailed, ProcessError::Signal(sig) => Reason::Signal(sig), ProcessError::ExitCode(code) => Reason::ExitCode(code), } } } impl Process { pub fn start(idx: usize, cfg: &ServiceConfig, addr: Addr<Unsync, FeService>) -> (Pid, Option<Addr<Unsync, Process>>) { let (pid, pipe) = match Process::fork(idx, cfg) { Ok(res) => res, Err(err) => { let pid = Pid::from_raw(-1); addr.do_send( service::ProcessFailed( idx, pid, ProcessError::FailedToStart(Some(format!("{}", err))))); return (pid, None) } }; let timeout = Duration::new(u64::from(cfg.timeout), 0); let startup_timeout = u64::from(cfg.startup_timeout); let shutdown_timeout = u64::from(cfg.shutdown_timeout); let addr = Process::create(move |ctx| { let (r, w) = pipe.split(); ctx.add_stream(FramedRead::new(r, TransportCodec)); ctx.notify_later(ProcessMessage::StartupTimeout, Duration::new(startup_timeout as u64, 0)); Process { idx, pid, addr, timeout, startup_timeout, shutdown_timeout, state: ProcessState::Starting, hb: Instant::now(), framed: actix::io::FramedWrite::new(w, TransportCodec, ctx) }}); (pid, Some(addr)) } fn fork(idx: usize, cfg: &ServiceConfig) -> Result<(Pid, PipeFile), io::Error> { let (p_read, p_write, ch_read, ch_write) = Process::create_pipes()?; let pid = match fork() { Ok(ForkResult::Parent{ child }) => child, Ok(ForkResult::Child) => { let _ = close(p_write); let _ = close(ch_read); exec_worker(idx, cfg, p_read, ch_write); unreachable!(); }, Err(err) => { error!("Fork failed: {}", err.description()); return Err(io::Error::new(io::ErrorKind::Other, err.description())) } }; let _ = close(p_read); let _ = close(ch_write); let pipe = PipeFile::new(ch_read, p_write, Arbiter::handle()); Ok((pid, pipe)) } fn create_pipes() -> Result<(RawFd, RawFd, RawFd, RawFd), io::Error> { let (p_read, p_write) = match pipe() { Ok((r, w)) => (r, w), Err(err) => { error!("Can not create pipe: {}", err); return Err(io::Error::new( io::ErrorKind::Other, format!("Can not create pipe: {}", err))) } }; let (ch_read, ch_write) = match pipe() { Ok((r, w)) => (r, w), Err(err) => { error!("Can not create pipe: {}", err); return Err(io::Error::new( io::ErrorKind::Other, format!("Can not create pipe: {}", err))) } }; Ok((p_read, p_write, ch_read, ch_write)) } fn kill(&self, ctx: &mut Context<Self>, graceful: bool) { if graceful { ctx.notify_later(ProcessMessage::Kill, Duration::new(1, 0)); } else { let _ = kill(self.pid, Signal::SIGKILL); ctx.terminate(); } } } impl Drop for Process { fn drop(&mut self) { let _ = kill(self.pid, Signal::SIGKILL); } } impl actix::io::WriteHandler<io::Error> for Process {} impl Handler<ProcessMessage> for Process { type Result = (); fn handle(&mut self, msg: ProcessMessage, ctx: &mut Context<Self>) { match msg { ProcessMessage::Message(msg) => match msg { WorkerMessage::forked => { debug!("Worker forked (pid:{})", self.pid); self.framed.write(WorkerCommand::prepare); } WorkerMessage::loaded => { match self.state { ProcessState::Starting => { debug!("Worker loaded (pid:{})", self.pid); self.addr.do_send( service::ProcessLoaded(self.idx, self.pid)); self.state = ProcessState::Running; self.hb = Instant::now(); ctx.notify_later( ProcessMessage::Heartbeat, Duration::new(HEARTBEAT, 0)); }, _ => { warn!("Received `loaded` message from worker (pid:{})", self.pid); } } } WorkerMessage::hb => { self.hb = Instant::now(); } WorkerMessage::reload => { info!("Worker requests reload (pid:{})", self.pid); self.addr.do_send( service::ProcessMessage( self.idx, self.pid, WorkerMessage::reload)); } WorkerMessage::restart => { info!("Worker requests restart (pid:{})", self.pid); self.addr.do_send( service::ProcessMessage( self.idx, self.pid, WorkerMessage::restart)); } WorkerMessage::cfgerror(msg) => { error!("Worker config error: {} (pid:{})", msg, self.pid); self.addr.do_send( service::ProcessFailed( self.idx, self.pid, ProcessError::ConfigError(msg))); } } ProcessMessage::StartupTimeout => { if let ProcessState::Starting = self.state { error!("Worker startup timeout after {} secs", self.startup_timeout); self.addr.do_send( service::ProcessFailed( self.idx, self.pid, ProcessError::StartupTimeout)); self.state = ProcessState::Failed; let _ = kill(self.pid, Signal::SIGKILL); ctx.stop(); return } } ProcessMessage::StopTimeout => { if let ProcessState::Stopping = self.state { info!("Worker shutdown timeout aftre {} secs", self.shutdown_timeout); self.addr.do_send( service::ProcessFailed( self.idx, self.pid, ProcessError::StopTimeout)); self.state = ProcessState::Failed; let _ = kill(self.pid, Signal::SIGKILL); ctx.stop(); return } } ProcessMessage::Heartbeat => { if let ProcessState::Running = self.state { if Instant::now().duration_since(self.hb) > self.timeout { error!("Worker heartbeat failed (pid:{}) after {:?} secs", self.pid, self.timeout); self.addr.do_send( service::ProcessFailed( self.idx, self.pid, ProcessError::Heartbeat)); } else { self.framed.write(WorkerCommand::hb); ctx.notify_later( ProcessMessage::Heartbeat, Duration::new(HEARTBEAT, 0)); } } } ProcessMessage::Kill => { let _ = kill(self.pid, Signal::SIGKILL); ctx.stop(); return } } } } #[derive(Message)] pub struct SendCommand(pub WorkerCommand); impl Handler<SendCommand> for Process { type Result = (); fn handle(&mut self, msg: SendCommand, _: &mut Context<Process>) { self.framed.write(msg.0); } } #[derive(Message)] pub struct StartProcess; impl Handler<StartProcess> for Process { type Result = (); fn handle(&mut self, _: StartProcess, _: &mut Context<Process>) { self.framed.write(WorkerCommand::start); } } #[derive(Message)] pub struct PauseProcess; impl Handler<PauseProcess> for Process { type Result = (); fn handle(&mut self, _: PauseProcess, _: &mut Context<Process>) { self.framed.write(WorkerCommand::pause); } } #[derive(Message)] pub struct ResumeProcess; impl Handler<ResumeProcess> for Process { type Result = (); fn handle(&mut self, _: ResumeProcess, _: &mut Context<Process>) { self.framed.write(WorkerCommand::resume); } } #[derive(Message)] pub struct StopProcess; impl Handler<StopProcess> for Process { type Result = (); fn handle(&mut self, _: StopProcess, ctx: &mut Context<Process>) { info!("Stopping worker: (pid:{})", self.pid); match self.state { ProcessState::Running => { self.state = ProcessState::Stopping; self.framed.write(WorkerCommand::stop); ctx.notify_later( ProcessMessage::StopTimeout, Duration::new(self.shutdown_timeout, 0)); let _ = kill(self.pid, Signal::SIGTERM); }, _ => { let _ = kill(self.pid, Signal::SIGQUIT); ctx.terminate(); } } } } #[derive(Message)] pub struct QuitProcess(pub bool); impl Handler<QuitProcess> for Process { type Result = (); fn handle(&mut self, msg: QuitProcess, ctx: &mut Context<Process>) { if msg.0 { let _ = kill(self.pid, Signal::SIGQUIT); self.kill(ctx, true); } else { self.kill(ctx, false); let _ = kill(self.pid, Signal::SIGKILL); ctx.terminate(); } } } pub struct TransportCodec; impl Decoder for TransportCodec { type Item = ProcessMessage; type Error = io::Error; fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> { let size = { if src.len() < 2 { return Ok(None) } BigEndian::read_u16(src.as_ref()) as usize }; if src.len() >= size + 2 { src.split_to(2); let buf = src.split_to(size); Ok(Some(ProcessMessage::Message(json::from_slice::<WorkerMessage>(&buf)?))) } else { Ok(None) } } } impl Encoder for TransportCodec { type Item = WorkerCommand; type Error = io::Error; fn encode(&mut self, msg: WorkerCommand, dst: &mut BytesMut) -> Result<(), Self::Error> { let msg = json::to_string(&msg).unwrap(); let msg_ref: &[u8] = msg.as_ref(); dst.reserve(msg_ref.len() + 2); dst.put_u16::<BigEndian>(msg_ref.len() as u16); dst.put(msg_ref); Ok(()) } }
#![allow(dead_code)] use std; use std::io; use std::error::Error; use std::os::unix::io::RawFd; use std::time::{Duration, Instant}; use serde_json as json; use byteorder::{ByteOrder, BigEndian}; use bytes::{BytesMut, BufMut}; use tokio_io::AsyncRead; use tokio_io::io::WriteHalf; use tokio_io::codec::{FramedRead, Encoder, Decoder}; use nix::sys::signal::{kill, Signal}; use nix::unistd::{close, pipe, fork, ForkResult, Pid}; use actix::prelude::*; use config::ServiceConfig; use io::PipeFile; use worker::{WorkerMessage, WorkerCommand}; use event::Reason; use exec::exec_worker; use service::{self, FeService}; const HEARTBEAT: u64 = 2; const WORKER_TIMEOUT: i32 = 98; pub const WORKER_INIT_FAILED: i32 = 99; pub const WORKER_BOOT_FAILED: i32 = 100; pub struct Process { idx: usize, pid: Pid, state: ProcessState, hb: Instant, addr: Addr<Unsync, FeService>, timeout: Duration, startup_timeout: u64, shutdown_timeout: u64, framed: actix::io::FramedWrite<WriteHalf<PipeFile>, TransportCodec>, } impl Actor for Process { type Context = Context<Self>; fn stopping(&mut self, ctx: &mut Context<Self>) -> Running { self.kill(ctx, false); Running::Stop } } impl StreamHandler<ProcessMessage, io::Error> for Process { fn finished(&mut self, ctx: &mut Context<Self>) { self.kill(ctx, false); ctx.stop(); } fn handle(&mut self, msg: ProcessMessage, ctx: &mut Self::Context) { ctx.notify(msg); } } #[derive(Debug)] enum ProcessState { Starting, Failed, Running, Stopping, } #[derive(PartialEq, Debug, Message)] pub enum ProcessMessage { Message(WorkerMessage), StartupTimeout, StopTimeout, Heartbeat, Kill, } #[derive(Debug, Clone)] pub enum ProcessError { Heartbeat, FailedToStart(Option<String>), StartupTimeout, StopTimeout, ConfigError(String), InitFailed, BootFailed, Signal(usize), ExitCode(i8), } impl ProcessError { pub fn from(code: i8) -> ProcessError { match code as i32 { WORKER_TIMEOUT => ProcessError::StartupTimeout, WORKER_INIT_FAILED => ProcessError::InitFailed, WORKER_BOOT_FAILED => ProcessError::BootFailed, code => ProcessError::ExitCode(code as i8), } } } impl<'a> std::convert::From<&'a ProcessError> for Reason { fn from(ob: &'a ProcessError) -> Self { match *ob { ProcessError::Heartbeat => Reason::HeartbeatFailed, ProcessError::FailedToStart(ref err) => Reason::FailedToStart( if let &Some(ref e) = err { Some(format!("{}", e))} else {None}), ProcessError::StartupTimeout => Reason::StartupTimeout, ProcessError::StopTimeout => Reason::StopTimeout, ProcessError::ConfigError(ref err) => Reason::WorkerError(err.clone()), ProcessError::InitFailed => Reason::InitFailed, ProcessError::BootFailed => Reason::BootFailed, ProcessError::Signal(sig) => Reason::Signal(sig), ProcessError::ExitCode(code) => Reason::ExitCode(code), } } } impl Process { pub fn start(idx: usize, cfg: &ServiceConfig, addr: Addr<Unsync, FeService>) -> (Pid, Option<Addr<Unsync, Process>>) { let (pid, pipe) = match Process::fork(idx, cfg) { Ok(res) => res, Err(err) => { let pid = Pid::from_raw(-1); addr.do_send( service::ProcessFailed( idx, pid, ProcessError::FailedToStart(Some(format!("{}", err))))); return (pid, None) } }; let timeout = Duration::new(u64::from(cfg.timeout), 0); let startup_timeout = u64::from(cfg.startup_timeout); let shutdown_timeout = u64::from(cfg.shutdown_timeout); let addr = Process::create(move |ctx| { let (r, w) = pipe.split(); ctx.add_stream(FramedRead::new(r, TransportCodec)); ctx.notify_later(ProcessMessage::StartupTimeout, Duration::new(startup_timeout as u64, 0)); Process { idx, pid, addr, timeout, startup_timeout, shutdown_timeout, state: ProcessState::Starting, hb: Instant::now(), framed: actix::io::FramedWrite::new(w, TransportCodec, ctx) }}); (pid, Some(addr)) } fn fork(idx: usize, cfg: &ServiceConfig) -> Result<(Pid, PipeFile), io::Error> { let (p_read, p_write, ch_read, ch_write) = Process::create_pipes()?; let pid = match fork() { Ok(ForkResult::Parent{ child }) => child, Ok(ForkResult::Child) => { let _ = close(p_write); let _ = close(ch_read); exec_worker(idx, cfg, p_read, ch_write); unreachable!(); }, Err(err) => { error!("Fork failed: {}", err.description()); return Err(io::Error::new(io::ErrorKind::Other, err.description())) } }; let _ = close(p_read); let _ = close(ch_write); let pipe = PipeFile::new(ch_read, p_write, Arbiter::handle()); Ok((pid, pipe)) } fn create_pipes() -> Result<(RawFd, RawFd, RawFd, RawFd), io::Error> { let (p_read, p_write) = match pipe() { Ok((r, w)) => (r, w), Err(err) => { error!("Can not create pipe: {}", err); return Err(io::Error::new( io::ErrorKind::Other, format!("Can not create pipe: {}", err))) } }; let (ch_read, ch_write) = match pipe() { Ok((r, w)) => (r, w), Err(err) => { error!("Can not create pipe: {}", err); return Err(io::Error::new( io::ErrorKind::Other, format!("Can not create pipe: {}", err))) } }; Ok((p_read, p_write, ch_read, ch_write)) } fn kill(&self, ctx: &mut Context<Self>, graceful: bool) { if graceful { ctx.notify_later(ProcessMessage::Kill, Duration::new(1, 0)); } else { let _ = kill(self.pid, Signal::SIGKILL); ctx.terminate(); } } } impl Drop for Process { fn drop(&mut self) { let _ = kill(self.pid, Signal::SIGKILL); } } impl actix::io::WriteHandler<io::Error> for Process {} impl Handler<ProcessMessage> for Process { type Result = (); fn handle(&mut self, msg: ProcessMessage, ctx: &mut Context<Self>) { match msg { ProcessMessage::Message(msg) => match msg { WorkerMessage::forked => { debug!("Worker forked (pid:{})", self.pid); self.framed.write(WorkerCommand::prepare); } WorkerMessage::loaded => { match self.state { ProcessState::Starting => { debug!("Worker loaded (pid:{})", self.pid); self.addr.do_send( service::ProcessLoaded(self.idx, self.pid)); self.state = ProcessState::Running; self.hb = Instant::now(); ctx.notify_later( ProcessMessage::Heartbeat, Duration::new(HEARTBEAT, 0)); }, _ => { warn!("Received `loaded` message from worker (pid:{})", self.pid); } } } WorkerMessage::hb => { self.hb = Instant::now(); } WorkerMessage::reload => { info!("Worker requests reload (pid:{})", self.pid); self.addr.do_send( service::ProcessMessage( self.idx, self.pid, WorkerMessage::reload)); } WorkerMessage::restart => { info!("Worker requests restart (pid:{})", self.pid); self.addr.do_send( service::ProcessMessage( self.idx, self.pid, WorkerMessage::restart)); } WorkerMessage::cfgerror(msg) => { error!("Worker config error: {} (pid:{})", msg, self.pid); self.addr.do_send( service::ProcessFailed( self.idx, self.pid, ProcessError::ConfigError(msg))); } } ProcessMessage::StartupTimeout => { if let ProcessState::Starting = self.state { error!("Worker startup timeout after {} secs", self.startup_timeout); self.addr.do_send( service::ProcessFailed( self.idx, self.pid, ProcessError::StartupTimeout)); self.state = ProcessState::Failed; let _ = kill(self.pid, Signal::SIGKILL); ctx.stop(); return } } ProcessMessage::StopTimeout => { if let ProcessState::Stopping = self.state { info!("Worker shutdown timeout aftre {} secs", self.shutdown_timeout); self.addr.do_send( service::ProcessFailed( self.idx, self.pid, ProcessError::StopTimeout)); self.state = ProcessState::Failed; let _ = kill(self.pid, Signal::SIGKILL); ctx.stop(); return } } ProcessMessage::Heartbeat => { if let ProcessState::Running = self.state { if Instant::now().duration_since(self.hb) > self.timeout { error!("Worker heartbeat failed (pid:{}) after {:?} secs", self.pid, self.timeout); self.addr.do_send( service::ProcessFailed( self.idx, self.pid, ProcessError::Heartbeat)); } else { self.framed.write(WorkerCommand::hb); ctx.notify_later( ProcessMessage::Heartbeat, Duration::new(HEARTBEAT, 0)); } } } ProcessMessage::Kill => { let _ = kill(self.pid, Signal::SIGKILL); ctx.stop(); return } } } } #[derive(Message)] pub struct SendCommand(pub WorkerCommand); impl Handler<SendCommand> for Process { type Result = (); fn handle(&mut self, msg: SendCommand, _: &mut Context<Process>) { self.framed.write(msg.0); } } #[derive(Message)] pub struct StartProcess; impl Handler<StartProcess> for Process { type Result = (); fn handle(&mut self, _: StartProcess, _: &mut Context<Process>) { self.framed.write(WorkerCommand::start); } } #[derive(Message)] pub struct PauseProcess; impl Handler<PauseProcess> for Process { type Result = (); fn handle(&mut self, _: PauseProcess, _: &mut Context<Process>) { self.framed.write(WorkerCommand::pause); } } #[derive(Message)] pub struct ResumeProcess; impl Handler<ResumeProcess> for Process { type Result = (); fn handle(&mut self, _: ResumeProcess, _: &mut Context<Process>) { self.framed.write(WorkerCommand::resume); } } #[derive(Message)] pub struct StopProcess; impl Handler<StopProcess> for Process { type Result = (); fn handle(&mut self, _: StopProcess, ctx: &mut Context<Process>) { info!("Stopping worker: (pid:{})", self.pid); match self.state { ProcessState::Running => { self.state = ProcessState::Stopping; self.framed.write(WorkerCommand::stop); ctx.notify_later( ProcessMessage::StopTimeout, Duration::new(self.shutdown_timeout, 0)); let _ = kill(self.pid, Signal::SIGTERM); }, _ => { let _ = kill(self.pid, Signal::SIGQUIT); ctx.terminate(); } } } } #[derive(Message)] pub struct QuitProcess(pub bool); impl Handler<QuitProcess> for Process { type Result = (); fn handle(&mut self, msg: QuitProcess, ctx: &mut Context<Process>) { if msg.0 { let _ = kill(self.pid, Signal::SIGQUIT);
} pub struct TransportCodec; impl Decoder for TransportCodec { type Item = ProcessMessage; type Error = io::Error; fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> { let size = { if src.len() < 2 { return Ok(None) } BigEndian::read_u16(src.as_ref()) as usize }; if src.len() >= size + 2 { src.split_to(2); let buf = src.split_to(size); Ok(Some(ProcessMessage::Message(json::from_slice::<WorkerMessage>(&buf)?))) } else { Ok(None) } } } impl Encoder for TransportCodec { type Item = WorkerCommand; type Error = io::Error; fn encode(&mut self, msg: WorkerCommand, dst: &mut BytesMut) -> Result<(), Self::Error> { let msg = json::to_string(&msg).unwrap(); let msg_ref: &[u8] = msg.as_ref(); dst.reserve(msg_ref.len() + 2); dst.put_u16::<BigEndian>(msg_ref.len() as u16); dst.put(msg_ref); Ok(()) } }
self.kill(ctx, true); } else { self.kill(ctx, false); let _ = kill(self.pid, Signal::SIGKILL); ctx.terminate(); } }
function_block-function_prefix_line
[ { "content": "/// Start master process\n\npub fn start(cfg: Config) -> bool {\n\n // init logging\n\n logging::init_logging(&cfg.logging);\n\n\n\n info!(\"Starting fectl process\");\n\n\n\n // change working dir\n\n if let Err(err) = nix::unistd::chdir::<OsStr>(cfg.master.directory.as_ref()) {\n\n error!(\"Can not change directory {:?} err: {}\", cfg.master.directory, err);\n\n return false\n\n }\n\n\n\n // check if other app is running\n\n for idx in 0..10 {\n\n match std::net::TcpListener::bind(HOST) {\n\n Ok(listener) => {\n\n std::mem::forget(listener);\n\n break\n\n }\n\n Err(_) => {\n", "file_path": "src/master.rs", "rank": 0, "score": 238747.09527617702 }, { "content": "pub fn exec_worker(idx: usize, cfg: &ServiceConfig, read: RawFd, write: RawFd) {\n\n // notify master\n\n let mut file = unsafe{ std::fs::File::from_raw_fd(write) };\n\n send_msg(&mut file, WorkerMessage::forked);\n\n\n\n // read master response\n\n let mut buffer = [0; 2];\n\n let mut file = unsafe{ std::fs::File::from_raw_fd(read) };\n\n if let Err(err) = file.read_exact(&mut buffer) {\n\n error!(\"Failed to read master response: {}\", err);\n\n std::process::exit(WORKER_INIT_FAILED as i32);\n\n }\n\n let size = buffer.into_buf().get_u16::<BigEndian>();\n\n let mut buffer = Vec::with_capacity(size as usize);\n\n unsafe {buffer.set_len(size as usize)};\n\n if let Err(err) = file.read_exact(&mut buffer) {\n\n error!(\"Failed to read master response: {}\", err);\n\n std::process::exit(WORKER_INIT_FAILED as i32);\n\n }\n\n match json::from_slice::<WorkerCommand>(&buffer) {\n", "file_path": "src/exec.rs", "rank": 1, "score": 184143.64116786554 }, { "content": "fn send_msg(file: &mut std::fs::File, msg: WorkerMessage) {\n\n let msg = json::to_string(&msg).unwrap();\n\n let msg_ref: &[u8] = msg.as_ref();\n\n\n\n let mut buf = BytesMut::with_capacity(msg_ref.len() + 2);\n\n buf.put_u16::<BigEndian>(msg_ref.len() as u16);\n\n buf.put(msg_ref);\n\n if let Err(err) = file.write_all(buf.as_ref()) {\n\n error!(\"Failed to notify master: {}\", err);\n\n std::process::exit(WORKER_INIT_FAILED as i32);\n\n }\n\n}\n\n\n", "file_path": "src/exec.rs", "rank": 2, "score": 176314.95078524927 }, { "content": "pub fn load_config() -> Option<Config> {\n\n let args = Cli::from_args();\n\n\n\n let mut cfg_str = String::new();\n\n if let Err(err) = std::fs::File::open(args.config)\n\n .and_then(|mut f| f.read_to_string(&mut cfg_str))\n\n {\n\n println!(\"Can not read configuration file due to: {}\", err.description());\n\n return None\n\n }\n\n\n\n let cfg: TomlConfig = match toml::from_str(&cfg_str) {\n\n Ok(cfg) => cfg,\n\n Err(err) => {\n\n println!(\"Can not parse config file: {}\", err);\n\n return None\n\n }\n\n };\n\n\n\n // master config\n", "file_path": "src/config.rs", "rank": 3, "score": 165031.00761860845 }, { "content": "pub fn default_startup_timeout() -> u32 {\n\n 30\n\n}\n\n\n", "file_path": "src/config_helpers.rs", "rank": 4, "score": 158116.6370443177 }, { "content": "pub fn default_shutdown_timeout() -> u32 {\n\n 30\n\n}\n\n\n\n/// Deserialize `gid` field into `Gid`\n\npub(crate) fn deserialize_gid_field<'de, D>(de: D) -> Result<Option<Gid>, D::Error>\n\n where D: serde::Deserializer<'de>\n\n{\n\n let deser_result: json::Value = serde::Deserialize::deserialize(de)?;\n\n match deser_result {\n\n json::Value::String(ref s) =>\n\n if let Ok(name) = CString::new(s.as_str()) {\n\n unsafe {\n\n let ptr = libc::getgrnam(name.as_ptr());\n\n return if ptr.is_null() {\n\n Err(serde::de::Error::custom(\"Can not convert group name to group id\"))\n\n } else {\n\n Ok(Some(Gid::from_raw((*ptr).gr_gid)))\n\n };\n\n }\n", "file_path": "src/config_helpers.rs", "rank": 5, "score": 158107.32352069928 }, { "content": "/// Send command to master\n\npub fn send_command(stream: &mut UnixStream, req: MasterRequest) -> Result<(), io::Error> {\n\n let mut buf = BytesMut::new();\n\n ClientTransportCodec.encode(req, &mut buf)?;\n\n\n\n stream.write_all(buf.as_ref())\n\n}\n\n\n", "file_path": "client/client.rs", "rank": 6, "score": 155929.80205000233 }, { "content": "/// Send command to master\n\npub fn send_command(stream: &mut UnixStream, req: MasterRequest) -> Result<(), io::Error> {\n\n let mut buf = BytesMut::new();\n\n ClientTransportCodec.encode(req, &mut buf)?;\n\n\n\n stream.write_all(buf.as_ref())\n\n}\n\n\n", "file_path": "src/client.rs", "rank": 7, "score": 155929.80205000233 }, { "content": "pub fn init_logging(cfg: &LoggingConfig) {\n\n let level = cfg.level.as_ref().and_then(\n\n |s| match LevelFilter::from_str(&s) {\n\n Ok(lvl) => Some(lvl),\n\n Err(_) => {\n\n println!(\"Can not parse log level value, using `info` level\");\n\n Some(LevelFilter::Info)\n\n }\n\n }).unwrap_or(LevelFilter::Info);\n\n\n\n Builder::new()\n\n .format(|buf, record| {\n\n let t = time::now();\n\n write!(buf, \"{},{:03} - {} - {}\\n\",\n\n time::strftime(\"%Y-%m-%d %H:%M:%S\", &t).unwrap(),\n\n t.tm_nsec / 1000_000,\n\n record.level(),\n\n record.args()\n\n )})\n\n .filter(Some(PKG_INFO.name), level)\n\n .init();\n\n}\n", "file_path": "src/logging.rs", "rank": 8, "score": 144670.6704720874 }, { "content": "pub fn default_restarts() -> u16 {\n\n 3\n\n}\n\n\n", "file_path": "src/config_helpers.rs", "rank": 9, "score": 138236.17653944058 }, { "content": "pub fn default_timeout() -> u32 {\n\n 10\n\n}\n\n\n", "file_path": "src/config_helpers.rs", "rank": 10, "score": 138199.4421572495 }, { "content": "/// Run client command\n\npub fn run(cmd: ClientCommand, sock: &str) -> bool {\n\n // create commands listener and also check if service process is running\n\n let mut buf = BytesMut::new();\n\n let mut stream = match UnixStream::connect(&sock) {\n\n Ok(mut conn) => {\n\n conn.set_read_timeout(Some(Duration::new(1, 0))).expect(\"Couldn't set read timeout\");\n\n let _ = send_command(&mut conn, MasterRequest::Ping);\n\n\n\n if try_read_response(&mut conn, &mut buf).is_ok() {\n\n conn\n\n } else {\n\n error!(\"Master process is not responding.\");\n\n return false\n\n }\n\n }\n\n Err(err) => {\n\n match err.kind() {\n\n io::ErrorKind::PermissionDenied => {\n\n error!(\"Can not connect to master. Permission denied. {}\", sock);\n\n },\n", "file_path": "client/client.rs", "rank": 11, "score": 135884.62324180204 }, { "content": "/// Check if master process is alive. Try to connect over unix socket\n\n/// and send `Ping` command\n\npub fn is_alive(cfg: &MasterConfig) -> AliveStatus {\n\n match UnixStream::connect(&cfg.sock) {\n\n Ok(mut conn) => {\n\n conn.set_read_timeout(Some(Duration::new(1, 0))).expect(\"Couldn't set read timeout\");\n\n let _ = send_command(&mut conn, MasterRequest::Ping);\n\n\n\n if try_read_response(&mut conn, &mut BytesMut::new()).is_ok() {\n\n AliveStatus::Alive\n\n } else {\n\n AliveStatus::NotResponding\n\n }\n\n }\n\n Err(_) => {\n\n AliveStatus::NotAlive\n\n }\n\n }\n\n}\n\n\n\npub struct ClientTransportCodec;\n\n\n", "file_path": "src/client.rs", "rank": 12, "score": 134904.2068651173 }, { "content": "pub fn load_config() -> Option<(ClientCommand, String)> {\n\n // cmd arguments\n\n let args = Cli::from_args();\n\n let cmd = args.command.to_lowercase().trim().to_owned();\n\n let sock = args.sock.clone();\n\n\n\n // check client args\n\n match cmd.as_str() {\n\n \"pid\" =>\n\n return Some((ClientCommand::Pid, sock)),\n\n \"quit\" =>\n\n return Some((ClientCommand::Quit, sock)),\n\n \"version\" =>\n\n return Some((ClientCommand::Version, sock)),\n\n \"version-check\" =>\n\n return Some((ClientCommand::VersionCheck, sock)),\n\n _ => ()\n\n }\n\n\n\n let name = match args.name {\n", "file_path": "client/config.rs", "rank": 13, "score": 130740.1441807991 }, { "content": "pub fn str(pid: Pid) -> Option<String> {\n\n Some(format!(\"{}\", pid))\n\n}\n", "file_path": "src/utils.rs", "rank": 14, "score": 130416.66028964438 }, { "content": "enum WorkerState {\n\n Initial,\n\n Starting(ProcessInfo),\n\n Reloading(ProcessInfo, ProcessInfo),\n\n Restarting(ProcessInfo, ProcessInfo),\n\n Running(ProcessInfo),\n\n StoppingOld(ProcessInfo, ProcessInfo),\n\n Stopping(ProcessInfo),\n\n Failed,\n\n Stopped,\n\n}\n\n\n", "file_path": "src/worker.rs", "rank": 16, "score": 126506.3805794533 }, { "content": "/// Deserialize `uid` field into `Uid`\n\npub fn deserialize_uid_field<'de, D>(de: D) -> Result<Option<Uid>, D::Error>\n\n where D: serde::Deserializer<'de>\n\n{\n\n let deser_result: json::Value = serde::Deserialize::deserialize(de)?;\n\n match deser_result {\n\n json::Value::String(ref s) =>\n\n if let Ok(name) = CString::new(s.as_str()) {\n\n unsafe {\n\n let ptr = libc::getpwnam(name.as_ptr());\n\n return if ptr.is_null() {\n\n Err(serde::de::Error::custom(\"Can not convert user name to user id\"))\n\n } else {\n\n Ok(Some(Uid::from_raw((*ptr).pw_uid)))\n\n };\n\n }\n\n } else {\n\n return Err(serde::de::Error::custom(\"Can not convert to plain string\"))\n\n }\n\n json::Value::Number(num) => {\n\n if let Some(num) = num.as_u64() {\n\n if num <= u64::from(u32::max_value()) {\n\n return Ok(Some(Uid::from_raw(num as u32)));\n\n }\n\n }\n\n }\n\n _ => (),\n\n }\n\n Err(serde::de::Error::custom(\"Unexpected value\"))\n\n}\n", "file_path": "src/config_helpers.rs", "rank": 17, "score": 121807.51131075427 }, { "content": "#[derive(PartialEq, Debug)]\n\nenum State {\n\n Starting,\n\n Running,\n\n Stopping,\n\n}\n\n\n\npub struct CommandCenter {\n\n cfg: Rc<Config>,\n\n state: State,\n\n system: Addr<Syn, System>,\n\n services: HashMap<String, Addr<Unsync, FeService>>,\n\n stop_waiter: Option<actix::Condition<bool>>,\n\n stopping: usize,\n\n}\n\n\n\nimpl CommandCenter {\n\n\n\n pub fn start(cfg: Rc<Config>) -> Addr<Unsync, CommandCenter> {\n\n CommandCenter {\n\n cfg,\n", "file_path": "src/cmd.rs", "rank": 18, "score": 116601.17316424396 }, { "content": "struct ProcessInfo {\n\n pid: Pid,\n\n addr: Option<Addr<Unsync, Process>>,\n\n}\n\n\n\nimpl ProcessInfo {\n\n fn stop(&self) {\n\n if let Some(ref addr) = self.addr {\n\n addr.do_send(process::StopProcess);\n\n }\n\n }\n\n fn quit(&self, graceful: bool) {\n\n if let Some(ref addr) = self.addr {\n\n addr.do_send(process::QuitProcess(graceful));\n\n }\n\n }\n\n fn start(&self) {\n\n if let Some(ref addr) = self.addr {\n\n addr.do_send(process::StartProcess);\n\n }\n", "file_path": "src/worker.rs", "rank": 19, "score": 116199.86942707625 }, { "content": "/// read master response\n\npub fn read_response(stream: &mut UnixStream, buf: &mut BytesMut)\n\n -> Result<MasterResponse, io::Error>\n\n{\n\n loop {\n\n buf.reserve(1024);\n\n\n\n unsafe {\n\n match stream.read(buf.bytes_mut()) {\n\n Ok(n) => {\n\n buf.advance_mut(n);\n\n\n\n if let Some(resp) = ClientTransportCodec.decode(buf)? {\n\n return Ok(resp)\n\n } else {\n\n if n == 0 {\n\n return Err(io::Error::new(io::ErrorKind::Other, \"closed\"))\n\n }\n\n }\n\n },\n\n Err(e) => return Err(e),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "client/client.rs", "rank": 20, "score": 114657.61196438217 }, { "content": "/// read master response\n\npub fn read_response(stream: &mut UnixStream, buf: &mut BytesMut)\n\n -> Result<MasterResponse, io::Error>\n\n{\n\n loop {\n\n buf.reserve(1024);\n\n\n\n unsafe {\n\n match stream.read(buf.bytes_mut()) {\n\n Ok(n) => {\n\n buf.advance_mut(n);\n\n\n\n if let Some(resp) = ClientTransportCodec.decode(buf)? {\n\n return Ok(resp)\n\n } else {\n\n if n == 0 {\n\n return Err(io::Error::new(io::ErrorKind::Other, \"closed\"))\n\n }\n\n }\n\n },\n\n Err(e) => return Err(e),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/client.rs", "rank": 21, "score": 114657.61196438217 }, { "content": "pub fn default_sock() -> String {\n\n \"fectld.sock\".to_owned()\n\n}\n\n\n", "file_path": "src/config_helpers.rs", "rank": 22, "score": 112172.09950463538 }, { "content": "pub fn default_proto() -> Proto {\n\n Proto::tcp4\n\n}\n\n\n", "file_path": "src/config_helpers.rs", "rank": 23, "score": 112172.09950463538 }, { "content": "pub fn default_backlog() -> u16 {\n\n 256\n\n}\n\n\n", "file_path": "src/config_helpers.rs", "rank": 24, "score": 112172.09950463538 }, { "content": "fn sockaddr_to_addr(storage: &libc::sockaddr_storage, len: usize) -> io::Result<SocketAddr> {\n\n match storage.ss_family as c_int {\n\n libc::AF_INET => {\n\n assert!(len as usize >= mem::size_of::<libc::sockaddr_in>());\n\n Ok(\n\n unsafe {\n\n let sock = *(storage as *const _ as *const libc::sockaddr_in);\n\n let ip = &*(&sock.sin_addr as *const libc::in_addr as *const Ipv4Addr);\n\n SocketAddr::V4(SocketAddrV4::new(ip.clone(), u16::from_be(sock.sin_port)))\n\n }\n\n )\n\n }\n\n libc::AF_INET6 => {\n\n assert!(len as usize >= mem::size_of::<libc::sockaddr_in6>());\n\n Ok(\n\n unsafe {\n\n let sock = *(storage as *const _ as *const libc::sockaddr_in6);\n\n let ip = &*(&sock.sin6_addr as *const libc::in6_addr as *const Ipv6Addr);\n\n SocketAddr::V6(SocketAddrV6::new(\n\n ip.clone(), u16::from_be(sock.sin6_port),\n", "file_path": "src/addrinfo.rs", "rank": 25, "score": 106312.44573968371 }, { "content": "pub fn get_env_vars(all: bool) -> Vec<CString> {\n\n let mut env = Vec::new();\n\n for (k, v) in env::vars() {\n\n if (all && !k.starts_with('_')) || (\n\n k.starts_with(\"FECTL_FD\") || k.starts_with(\"LANG\") || k.starts_with(\"LC_\"))\n\n {\n\n env.push(CString::new(format!(\"{}={}\", k, v)).unwrap());\n\n }\n\n }\n\n env\n\n}\n\n\n\n\n", "file_path": "src/utils.rs", "rank": 26, "score": 105239.24922473743 }, { "content": "/// Lookup a addr info via dns, return an iterator of addr infos.\n\npub fn lookup_addrinfo(\n\n host: Option<String>, port: Option<String>,\n\n family: c_int, flags: c_int, socktype: SocketType) -> Result<LookupAddrInfo, LookupError> {\n\n let mut res = ptr::null_mut();\n\n let hints = libc::addrinfo {\n\n ai_flags: flags,\n\n ai_family: family,\n\n ai_socktype: socktype.to_int(),\n\n ai_protocol: 0,\n\n ai_addrlen: 0,\n\n ai_canonname: ptr::null_mut(),\n\n ai_addr: ptr::null_mut(),\n\n ai_next: ptr::null_mut(),\n\n };\n\n\n\n let tmp_h;\n\n let c_host = if let Some(host) = host {\n\n tmp_h = CString::new(host)?;\n\n tmp_h.as_ptr()\n\n } else {\n", "file_path": "src/addrinfo.rs", "rank": 27, "score": 99468.43349520252 }, { "content": "pub fn default_vec<T>() -> Vec<T> {\n\n Vec::new()\n\n}\n\n\n", "file_path": "src/config_helpers.rs", "rank": 28, "score": 97121.31257821506 }, { "content": "#[derive(Deserialize, Debug)]\n\nstruct TomlConfig {\n\n master: Option<TomlMasterConfig>,\n\n logging: Option<LoggingConfig>,\n\n #[serde(default = \"config_helpers::default_vec\")]\n\n socket: Vec<SocketConfig>,\n\n #[serde(default = \"config_helpers::default_vec\")]\n\n service: Vec<ServiceConfig>,\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 29, "score": 94959.14578475141 }, { "content": "#[derive(Deserialize, Debug)]\n\nstruct TomlMasterConfig {\n\n #[serde(default = \"config_helpers::default_sock\")]\n\n pub sock: String,\n\n pub pid: Option<String>,\n\n pub directory: Option<String>,\n\n\n\n #[serde(default)]\n\n #[serde(deserialize_with=\"config_helpers::deserialize_gid_field\")]\n\n pub gid: Option<Gid>,\n\n\n\n #[serde(default)]\n\n #[serde(deserialize_with=\"config_helpers::deserialize_uid_field\")]\n\n pub uid: Option<Uid>,\n\n\n\n pub stdout: Option<String>,\n\n pub stderr: Option<String>,\n\n}\n\n\n\n\n\n#[derive(Deserialize, Debug, PartialEq)]\n", "file_path": "src/config.rs", "rank": 30, "score": 91771.56662341635 }, { "content": "#[derive(StructOpt, Debug)]\n\nstruct Cli {\n\n /// Sets a custom config file for fectld\n\n #[structopt(long=\"config\", short=\"c\", default_value=\"fectld.toml\")]\n\n config: String,\n\n\n\n /// Run in background\n\n #[structopt(long=\"daemon\", short=\"d\")]\n\n daemon: bool,\n\n}\n\n\n\n\n", "file_path": "src/config.rs", "rank": 31, "score": 88953.7396149947 }, { "content": "#[derive(StructOpt, Debug)]\n\nstruct Cli {\n\n /// Master process unix socket file path\n\n #[structopt(long=\"sock\", short=\"m\", default_value=\"fectld.sock\")]\n\n sock: String,\n\n\n\n /// Run command (Supported commands: status, start, reload, restart, stop)\n\n command: String,\n\n\n\n /// Service name\n\n name: Option<String>,\n\n}\n\n\n\n\n", "file_path": "client/config.rs", "rank": 32, "score": 88953.7396149947 }, { "content": "/// Service state\n\nenum ServiceState {\n\n Running,\n\n Failed,\n\n Stopped,\n\n Starting(actix::Condition<StartStatus>),\n\n Reloading(actix::Condition<ReloadStatus>),\n\n Stopping(actix::Condition<()>),\n\n}\n\n\n\nimpl ServiceState {\n\n\n\n fn description(&self) -> &'static str {\n\n match *self {\n\n ServiceState::Running => \"running\",\n\n ServiceState::Failed => \"failed\",\n\n ServiceState::Stopped => \"stopped\",\n\n ServiceState::Starting(_) => \"starting\",\n\n ServiceState::Reloading(_) => \"reloading\",\n\n ServiceState::Stopping(_) => \"stopping\",\n\n }\n", "file_path": "src/service.rs", "rank": 33, "score": 87009.31262645403 }, { "content": "fn try_read_response(stream: &mut UnixStream, buf: &mut BytesMut)\n\n -> Result<MasterResponse, io::Error>\n\n{\n\n let mut retry = 5;\n\n loop {\n\n match read_response(stream, buf) {\n\n Ok(resp) => {\n\n debug!(\"Master response: {:?}\", resp);\n\n return Ok(resp);\n\n }\n\n Err(err) => match err.kind() {\n\n io::ErrorKind::TimedOut =>\n\n if retry > 0 {\n\n retry -= 1;\n\n continue\n\n }\n\n io::ErrorKind::WouldBlock => {\n\n thread::sleep(Duration::from_millis(100));\n\n continue\n\n }\n\n _ => return Err(err)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/client.rs", "rank": 34, "score": 86665.48538777663 }, { "content": "fn try_read_response(stream: &mut UnixStream, buf: &mut BytesMut)\n\n -> Result<MasterResponse, io::Error>\n\n{\n\n let mut retry = 5;\n\n loop {\n\n match read_response(stream, buf) {\n\n Ok(resp) => {\n\n debug!(\"Master response: {:?}\", resp);\n\n return Ok(resp);\n\n }\n\n Err(err) => match err.kind() {\n\n io::ErrorKind::TimedOut =>\n\n if retry > 0 {\n\n retry -= 1;\n\n continue\n\n }\n\n io::ErrorKind::WouldBlock => {\n\n thread::sleep(Duration::from_millis(100));\n\n continue\n\n }\n\n _ => return Err(err)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "client/client.rs", "rank": 35, "score": 86665.48538777663 }, { "content": "class Reload(Setting):\n\n name = \"reload\"\n\n section = 'Debugging'\n\n cli = ['--reload']\n\n validator = validate_bool\n\n action = 'store_true'\n\n default = False\n\n\n\n desc = '''\\\n\n Restart workers when code changes.\n\n\n\n This setting is intended for development. It will cause workers to be\n\n restarted whenever application code changes.\n\n\n\n The reloader is incompatible with application preloading. When using a\n\n paste configuration be sure that the server block does not import any\n\n application code or the reload will not work as designed.\n\n\n\n The default behavior is to attempt inotify with a fallback to file\n\n system polling. Generally, inotify should be preferred if available\n\n because it consumes less system resources.\n\n\n\n .. note::\n\n In order to use the inotify reloader, you must have the ``inotify``\n\n package installed.\n", "file_path": "fectl/config.py", "rank": 36, "score": 85634.55658964775 }, { "content": "#[derive(Message)]\n\nstruct NetStream(UnixStream, std::os::unix::net::SocketAddr);\n\n\n\nimpl StreamHandler<NetStream, io::Error> for Master {\n\n\n\n fn handle(&mut self, msg: NetStream, _: &mut Context<Self>) {\n\n let cmd = self.cmd.clone();\n\n\n\n MasterClient::create(|ctx| {\n\n let (r, w) = msg.0.split();\n\n ctx.add_stream(FramedRead::new(r, MasterTransportCodec));\n\n\n\n MasterClient{\n\n cmd,\n\n framed: actix::io::FramedWrite::new(w, MasterTransportCodec, ctx)}\n\n })\n\n }\n\n}\n\n\n\nimpl Drop for Master {\n\n fn drop(&mut self) {\n\n self.cfg.master.remove_files();\n\n }\n\n}\n\n\n", "file_path": "src/master.rs", "rank": 37, "score": 85460.79909403806 }, { "content": " def heartbeat(self):\n", "file_path": "fectl/workers/base.py", "rank": 38, "score": 83495.60481755422 }, { "content": " def load(cls):\n\n socks = {}\n\n apps = {}\n\n arguments = {}\n\n\n\n for key, value in os.environ.items():\n\n if key.startswith(\"FECTL_FD_\"):\n\n params = value.split(',')\n\n try:\n\n fd = int(params[0])\n\n params = dict(map(lambda s: s.split(':', 1), params[1:]))\n\n family = int(params.get('FAMILY', 0))\n\n socktype = int(params.get('SOCKETTYPE', 0))\n\n proto = int(params.get('PROTO', 0))\n\n sock = socket.fromfd(fd, family, socktype, proto)\n\n socks[key[9:]] = sock\n\n except OSError:\n\n raise\n\n except:\n\n raise RuntimeError(\"Can not decode %s: %s\" % (key, value))\n\n\n\n if key.startswith(\"FECTL_APP_\"):\n\n apps[key[10:]] = value.strip()\n\n\n\n if key.startswith(\"FECTL_ARGS_\"):\n\n args = {}\n\n for arg in json.loads(value.strip()):\n\n arg = [s.strip() for s in arg.split('=', 1)]\n\n if len(arg) == 1:\n\n args[arg[0]] = None\n\n else:\n\n args[arg[0]] = arg[1]\n\n\n\n arguments[key[11:]] = args\n\n\n\n sockets = {}\n\n for name, sock in socks.items():\n\n sockets[name] = Socket(\n\n name, sock, apps.get(name), arguments.get(name, {}))\n\n\n", "file_path": "fectl/workers/socket.py", "rank": 39, "score": 83450.04284958052 }, { "content": "class GracefulTimeout(Setting):\n\n name = \"graceful_timeout\"\n\n section = \"Worker Processes\"\n\n cli = [\"--graceful-timeout\"]\n\n meta = \"INT\"\n\n validator = validate_pos_int\n\n type = int\n\n default = 30\n\n desc = \"\"\"\\\n\n Timeout for graceful workers restart.\n\n\n\n After receiving a restart signal, workers have this much time to finish\n\n serving requests. Workers still alive after the timeout (starting from\n\n the receipt of the restart signal) are force killed.\n", "file_path": "fectl/config.py", "rank": 40, "score": 76680.53904026473 }, { "content": "class SlowRequestTimeout(Setting):\n\n name = \"slow_request_timeout\"\n\n section = \"Worker Processes\"\n\n cli = [\"--slow-request-timeout\"]\n\n meta = \"INT\"\n\n validator = validate_pos_int\n\n type = int\n\n default = 0\n\n desc = \"\"\"\\\n\n Slow request timeout.\n\n\n\n The maximum duration for request headers reading.\n\n If this is set to zero (the default) then the timeout is disabled\n", "file_path": "fectl/config.py", "rank": 41, "score": 72905.99730801673 }, { "content": "def run():\n\n try:\n\n args = ARGS.parse_args()\n\n except:\n\n sys.exit(WORKER_INIT_FAILED)\n\n\n\n sys.argv[1:] = []\n\n try:\n\n worker_cls = utils.load_class(args.worker)\n\n except ConfigurationError as exc:\n\n logging.error(\n\n \"Can not load worker class '%s': %s\", args.worker, exc)\n\n sys.exit(WORKER_INIT_FAILED)\n\n except:\n\n logging.exception(\"Can not load worker class: %s\", args.worker)\n\n sys.exit(WORKER_INIT_FAILED)\n\n\n\n if args.app:\n\n try:\n\n app_cls = DottedNameResolver().resolve(args.app)\n\n except ConfigurationError as exc:\n\n logging.error(\n\n \"Can not load application class '%s': %s\", args.app, exc)\n\n sys.exit(WORKER_INIT_FAILED)\n\n except:\n\n logging.exception(\"Can not load application class: %s\", args.app)\n\n sys.exit(WORKER_INIT_FAILED)\n\n else:\n\n app_cls = None\n\n\n\n try:\n\n worker = worker_cls(app_cls, os.getppid(), args)\n\n worker._init_process()\n\n except SystemExit:\n\n raise\n\n except ConfigurationError:\n\n sys.exit(WORKER_INIT_FAILED)\n\n except BaseException as exc:\n\n logging.exception(\"Can not initialize worker %r: %s\", worker_cls, exc)\n\n sys.exit(WORKER_BOOT_FAILED)\n\n\n\n try:\n\n worker._run()\n\n except SystemExit:\n\n raise\n\n except ConfigurationError:\n\n sys.exit(WORKER_INIT_FAILED)\n\n except BaseException as exc:\n\n logging.exception(\"Can not run worker: %s\", exc)\n\n sys.exit(WORKER_BOOT_FAILED)\n\n else:\n", "file_path": "fectl/run.py", "rank": 42, "score": 70194.48533766708 }, { "content": "class Worker(object):\n\n\n\n TYPE = None\n\n\n\n MSG_LOADED = 'loaded'\n\n MSG_RELOAD = 'reload'\n\n MSG_RESTART = 'restart'\n\n MSG_HEARTBEAT = 'hb'\n\n MSG_CFG_ERROR = 'cfgerror'\n\n\n\n CMD_PREPARE = 'prepare'\n\n CMD_START = 'start'\n\n CMD_PAUSE = 'pause'\n\n CMD_RESUME = 'resume'\n\n CMD_STOP = 'stop'\n\n CMD_HEARTBEAT = 'hb'\n\n\n\n ALL_COMMANDS = (CMD_PREPARE, CMD_START,\n\n CMD_PAUSE, CMD_RESUME, CMD_STOP, CMD_HEARTBEAT)\n\n\n\n SIGNALS = [getattr(signal, \"SIG%s\" % x)\n\n for x in \"ABRT HUP QUIT INT TERM USR1 WINCH CHLD\".split()]\n\n\n\n def __init__(self, application, ppid, args):\n\n self._application = application\n\n self._ppid = ppid\n\n self._alive = True\n\n self._pipe = None\n\n self._sockets = {}\n\n self._args = args\n\n self._on_msg = []\n\n self._on_shutdown = []\n\n\n\n # service name\n\n self._name = os.environ.get('FECTL_SRV_NAME')\n\n\n\n # extract master communication pipe\n\n fd = os.environ.get('FECTL_FD')\n\n if fd is None:\n\n raise utils.ConfigurationError(\n\n \"Can not get master process communication FD\")\n\n\n\n try:\n\n self._master_pipe = tuple(int(v) for v in fd.split(':', 1))\n\n except:\n\n raise utils.ConfigurationError(\n\n \"Can not decode FECTL_FD_R: %s\" % fd)\n\n\n\n self._sockets = Socket.load()\n\n\n\n def get_socket(self, name, default=_sentinel):\n\n sock = self._sockets.get(name, default)\n\n if sock is _sentinel:\n\n raise KeyError(name)\n\n\n\n return sock.socket\n\n\n\n def get_socket_fd(self, name, default=_sentinel):\n\n try:\n\n sock = self._sockets[name]\n\n return sock.socket.fileno()\n\n except KeyError:\n\n if default is not _sentinel:\n\n return default\n\n raise\n\n\n\n def notify(self, cmd, data=None):\n\n raise NotImplementedError()\n\n\n\n def heartbeat(self):\n\n self.notify(self.MSG_HEARTBEAT)\n\n\n\n def on_shutdown(self, cb):\n\n \"\"\" register callback for graceful shutdown process \"\"\"\n\n self._on_shutdown.append(cb)\n\n\n\n def _run(self):\n\n \"\"\"This is the mainloop of a worker process.\"\"\"\n\n raise NotImplementedError()\n\n\n\n def _init_process(self):\n\n try:\n\n import setproctitle\n\n if self._name is not None:\n\n setproctitle.setproctitle('fectl %s worker' % self._name)\n\n except:\n\n pass\n\n\n\n try:\n\n random.seed(os.urandom(64))\n\n except NotImplementedError:\n\n random.seed('%s.%s' % (time.time(), os.getpid()))\n\n\n\n self._pipe = os.pipe()\n\n for fd in itertools.chain(self._pipe, self._master_pipe):\n\n utils.close_on_exec(fd)\n\n utils.set_non_blocking(fd)\n\n\n\n self._f_read = os.fdopen(self._master_pipe[0], 'r')\n\n self._f_write = os.fdopen(self._master_pipe[1], 'w')\n\n\n\n self._init_signals()\n\n\n\n def _init_signals(self):\n\n # reset signaling\n\n [signal.signal(s, signal.SIG_DFL) for s in self.SIGNALS]\n\n\n\n # init new signaling\n\n signal.signal(signal.SIGQUIT, self._handle_quit)\n\n signal.signal(signal.SIGTERM, self._handle_exit)\n\n signal.signal(signal.SIGINT, self._handle_quit)\n\n signal.signal(signal.SIGWINCH, self._handle_winch)\n\n signal.signal(signal.SIGUSR1, self._handle_usr1)\n\n signal.signal(signal.SIGABRT, self._handle_abort)\n\n\n\n # Don't let SIGTERM and SIGUSR1 disturb active requests\n\n # by interrupting system calls\n\n signal.siginterrupt(signal.SIGTERM, False)\n\n signal.siginterrupt(signal.SIGUSR1, False)\n\n\n\n if hasattr(signal, 'set_wakeup_fd'):\n\n signal.set_wakeup_fd(self._pipe[1])\n\n\n\n def _handle_usr1(self, sig, frame):\n\n pass\n\n\n\n def _handle_exit(self, sig, frame):\n\n self._alive = False\n\n\n\n def _handle_quit(self, sig, frame):\n\n self._alive = False\n\n time.sleep(0.1)\n\n sys.exit(0)\n\n\n\n def _handle_abort(self, sig, frame):\n\n self._alive = False\n\n sys.exit(1)\n\n\n\n def _handle_winch(self, sig, fname):\n\n # Ignore SIGWINCH in worker. Fixes a crash on OpenBSD.\n", "file_path": "fectl/workers/base.py", "rank": 43, "score": 67652.65903200852 }, { "content": " def run(self):\n\n import subprocess\n\n import sys\n\n errno = subprocess.call([sys.executable, '-m', 'pytest', 'tests'])\n", "file_path": "setup.py", "rank": 44, "score": 61171.63215094819 }, { "content": "fn main() {\n\n let out = env::var(\"OUT_DIR\").expect(\"should not fail\");\n\n let dst = PathBuf::from(out).join(\"version.rs\");\n\n let mut f = std::fs::OpenOptions::new()\n\n .write(true).truncate(true).create(true).open(dst).expect(\"\");\n\n\n\n f.write_all(format!(\n\n\"pub struct PkgInfo {{\n\n pub name: &'static str,\n\n pub description: &'static str,\n\n pub authors: &'static str,\n\n pub version: &'static str,\n\n pub version_major: u8,\n\n pub version_minor: u8,\n\n pub version_patch: u8,\n\n pub version_pre: &'static str,\n\n}}\n\n\n\npub const PKG_INFO: PkgInfo = PkgInfo {{\n\n name: \\\"{}\\\",\n", "file_path": "build.rs", "rank": 45, "score": 59714.54357510613 }, { "content": "class WorkerType(Enum):\n\n Gevent = 'gevent'\n", "file_path": "fectl/workers/__init__.py", "rank": 46, "score": 58879.96312779653 }, { "content": " def stop(self):\n\n if self.server is not None:\n\n # stop accepting connections\n\n logging.info(\"Stopping aiohttp server: %s, connections: %s\",\n\n os.getpid(), len(self.handler.connections))\n\n self.server.close()\n\n yield from self.server.wait_closed()\n\n self.server = None\n\n\n\n if self.handler is not None:\n\n # stop alive connections\n\n yield from self.handler.shutdown(\n\n timeout=self.cfg.graceful_timeout / 100 * 95)\n\n self.handler = None\n\n\n\n # send on_shutdown event\n\n yield from self.app.shutdown()\n\n\n\n # cleanup application\n", "file_path": "fectl/apps/aiohttp.py", "rank": 47, "score": 58400.82636608318 }, { "content": " def start(self):\n\n if self.server is None:\n\n if (hasattr(socket, 'AF_UNIX') and\n\n self.sock.family == socket.AF_UNIX):\n\n self.server = yield from self.loop.create_unix_server(\n\n self.handler, sock=self.sock.dup(), ssl=self.ssl)\n\n else:\n\n self.server = yield from self.loop.create_server(\n", "file_path": "fectl/apps/aiohttp.py", "rank": 48, "score": 58360.153524858484 }, { "content": "fn main() {\n\n let sys = actix::System::new(\"fectl\");\n\n let loaded = match config::load_config() {\n\n Some(cfg) => master::start(cfg),\n\n None => false,\n\n };\n\n let code = if loaded {\n\n sys.run()\n\n } else {\n\n 1\n\n };\n\n std::process::exit(code);\n\n}\n", "file_path": "src/main.rs", "rank": 49, "score": 57543.76102897036 }, { "content": "fn main() {\n\n let _ = env_logger::init();\n\n\n\n let success = match config::load_config() {\n\n Some((cmd, sock)) => client::run(cmd, &sock),\n\n None => false,\n\n };\n\n std::process::exit(if success {0} else {1});\n\n}\n", "file_path": "client/main.rs", "rank": 50, "score": 57543.76102897036 }, { "content": "struct MasterClient {\n\n cmd: Addr<Unsync, CommandCenter>,\n\n framed: actix::io::FramedWrite<WriteHalf<UnixStream>, MasterTransportCodec>,\n\n}\n\n\n\nimpl Actor for MasterClient {\n\n type Context = Context<Self>;\n\n\n\n fn started(&mut self, ctx: &mut Self::Context) {\n\n self.hb(ctx);\n\n }\n\n}\n\n\n\nimpl actix::io::WriteHandler<io::Error> for MasterClient {}\n\n\n\nimpl StreamHandler<MasterRequest, io::Error> for MasterClient {\n\n\n\n fn handle(&mut self, msg: MasterRequest, ctx: &mut Self::Context) {\n\n ctx.notify(msg);\n\n }\n", "file_path": "src/master.rs", "rank": 51, "score": 56235.34729237148 }, { "content": "/// Codec for Master transport\n\nstruct MasterTransportCodec;\n\n\n\nimpl Decoder for MasterTransportCodec\n\n{\n\n type Item = MasterRequest;\n\n type Error = io::Error;\n\n\n\n fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {\n\n let size = {\n\n if src.len() < 2 {\n\n return Ok(None)\n\n }\n\n BigEndian::read_u16(src.as_ref()) as usize\n\n };\n\n\n\n if src.len() >= size + 2 {\n\n src.split_to(2);\n\n let buf = src.split_to(size);\n\n Ok(Some(json::from_slice::<MasterRequest>(&buf)?))\n\n } else {\n", "file_path": "src/master.rs", "rank": 52, "score": 54522.78984974858 }, { "content": "class OnStarting(Setting):\n\n name = \"on_starting\"\n\n section = \"Server Hooks\"\n\n validator = validate_callable(1)\n\n type = six.callable\n\n\n\n def on_starting(server):\n\n pass\n\n default = staticmethod(on_starting)\n\n desc = \"\"\"\\\n\n Called just before the master process is initialized.\n\n\n\n The callable needs to accept a single instance variable for the Arbiter.\n", "file_path": "fectl/config.py", "rank": 53, "score": 53306.652880057074 }, { "content": " def on_starting(server):\n", "file_path": "fectl/config.py", "rank": 54, "score": 53306.652880057074 }, { "content": " def on_reload(server):\n", "file_path": "fectl/config.py", "rank": 55, "score": 53288.025263958705 }, { "content": "class OnReload(Setting):\n\n name = \"on_reload\"\n\n section = \"Server Hooks\"\n\n validator = validate_callable(1)\n\n type = six.callable\n\n\n\n def on_reload(server):\n\n pass\n\n default = staticmethod(on_reload)\n\n desc = \"\"\"\\\n\n Called to recycle workers during a reload via SIGHUP.\n\n\n\n The callable needs to accept a single instance variable for the Arbiter.\n", "file_path": "fectl/config.py", "rank": 56, "score": 53288.025263958705 }, { "content": " def on_shutdown(self, cb):\n\n \"\"\" register callback for graceful shutdown process \"\"\"\n", "file_path": "fectl/workers/base.py", "rank": 57, "score": 51150.28547766919 }, { "content": " def _stop(self):\n\n # stop accepting connections\n\n try:\n\n tasks = [asyncio.ensure_future(app.pause(), loop=self._loop)\n\n for app in self._apps]\n\n yield from asyncio.gather(*tasks, loop=self.loop)\n\n except:\n\n pass\n\n\n\n # stop apps\n\n try:\n\n tasks = [asyncio.ensure_future(app.stop(), loop=self._loop)\n\n for app in self._apps]\n\n yield from asyncio.gather(*tasks, loop=self.loop)\n\n except:\n\n pass\n\n\n\n # on_stop callbacks\n\n try:\n\n tasks = [asyncio.ensure_future(cb(), loop=self._loop)\n\n for cb in self._on_shutdown]\n\n yield from asyncio.gather(*tasks, loop=self.loop)\n\n except:\n\n pass\n\n\n\n yield from asyncio.sleep(0.1, loop=self._loop)\n\n\n\n self._read_task.cancel()\n\n self._read_task = None\n\n self._write_task.cancel()\n", "file_path": "fectl/workers/asyncio.py", "rank": 58, "score": 51139.291497454105 }, { "content": "class UnsupportedWorker(ConfigurationError):\n", "file_path": "fectl/errors.py", "rank": 59, "score": 51098.345682406325 }, { "content": " def _run(self):\n\n \"\"\"This is the mainloop of a worker process.\"\"\"\n", "file_path": "fectl/workers/base.py", "rank": 60, "score": 51075.530666044215 }, { "content": " def _run(self):\n\n self._read_task = self._loop.create_task(self._read_loop())\n\n self._write_task = self._loop.create_task(self._write_loop())\n\n\n\n self._runner = asyncio.ensure_future(\n\n self._try_run_loop(), loop=self._loop)\n\n try:\n\n self._loop.run_until_complete(self._runner)\n\n finally:\n\n self._loop.close()\n\n\n", "file_path": "fectl/workers/asyncio.py", "rank": 61, "score": 51068.24903596854 }, { "content": " def _run(self):\n\n gevent.spawn(self._read_loop)\n\n gevent.spawn(self._write_loop)\n\n\n\n try:\n\n self._application(self)\n\n except BaseException as exc:\n\n logging.exception(\"Application init exception: %s\", exc)\n\n raise\n\n\n\n self.notify(self.MSG_LOADED)\n\n\n\n while self._alive:\n\n self.heartbeat()\n\n gevent.sleep(1.0)\n\n\n\n if self._ppid != os.getppid():\n\n logging.info(\"Parent changed, shutting down\")\n\n self._alive = False\n\n break\n\n\n\n for cb in self._on_shutdown:\n\n try:\n\n cb()\n\n except BaseException as exc:\n", "file_path": "fectl/workers/gevent.py", "rank": 62, "score": 51068.24903596854 }, { "content": "def validate_bool(val):\n\n if val is None:\n\n return\n\n\n\n if isinstance(val, bool):\n\n return val\n\n if not isinstance(val, six.string_types):\n\n raise TypeError(\"Invalid type for casting: %s\" % val)\n\n if val.lower().strip() == \"true\":\n\n return True\n\n elif val.lower().strip() == \"false\":\n\n return False\n\n else:\n", "file_path": "fectl/config.py", "rank": 63, "score": 50625.325065576195 }, { "content": "class ReloadEngine(Setting):\n\n name = \"reload_engine\"\n\n section = \"Debugging\"\n\n cli = [\"--reload-engine\"]\n\n meta = \"STRING\"\n\n validator = validate_reload_engine\n\n default = \"auto\"\n\n desc = \"\"\"\\\n\n The implementation that should be used to power :ref:`reload`.\n\n\n\n Valid engines are:\n\n\n\n * 'auto'\n\n * 'poll'\n\n * 'inotify' (requires inotify)\n\n\n\n .. versionadded:: 19.7\n", "file_path": "fectl/config.py", "rank": 64, "score": 50607.63441967693 }, { "content": "class ErrorLog(Setting):\n\n name = \"errorlog\"\n\n section = \"Logging\"\n\n cli = [\"--error-logfile\", \"--log-file\"]\n\n meta = \"FILE\"\n\n validator = validate_string\n\n default = '-'\n\n desc = \"\"\"\\\n\n The Error log file to write to.\n\n\n\n Using ``'-'`` for FILE makes log to stderr.\n", "file_path": "fectl/config.py", "rank": 65, "score": 50598.82085676154 }, { "content": "class PostRequest(Setting):\n\n name = \"post_request\"\n\n section = \"Server Hooks\"\n\n validator = validate_post_request\n\n type = six.callable\n\n\n\n def post_request(worker, req, environ, resp):\n\n pass\n\n default = staticmethod(post_request)\n\n desc = \"\"\"\\\n\n Called after a worker processes the request.\n\n\n\n The callable needs to accept two instance variables for the Worker and\n\n the Request.\n", "file_path": "fectl/config.py", "rank": 66, "score": 50545.83066182536 }, { "content": "class PreRequest(Setting):\n\n name = \"pre_request\"\n\n section = \"Server Hooks\"\n\n validator = validate_callable(2)\n\n type = six.callable\n\n\n\n def pre_request(worker, req):\n\n worker.log.debug(\"%s %s\" % (req.method, req.path))\n\n default = staticmethod(pre_request)\n\n desc = \"\"\"\\\n\n Called just before a worker processes the request.\n\n\n\n The callable needs to accept two instance variables for the Worker and\n\n the Request.\n", "file_path": "fectl/config.py", "rank": 67, "score": 50545.83066182536 }, { "content": "class MaxRequests(Setting):\n\n name = \"max_requests\"\n\n section = \"Worker Processes\"\n\n cli = [\"--max-requests\"]\n\n meta = \"INT\"\n\n validator = validate_pos_int\n\n type = int\n\n default = 0\n\n desc = \"\"\"\\\n\n The maximum number of requests a worker will process before restarting.\n\n\n\n Any value greater than zero will limit the number of requests a work\n\n will process before automatically restarting. This is a simple method\n\n to help limit the damage of memory leaks.\n\n\n\n If this is set to zero (the default) then the automatic worker\n\n restarts are disabled.\n", "file_path": "fectl/config.py", "rank": 68, "score": 50545.83066182536 }, { "content": " def pre_request(worker, req):\n", "file_path": "fectl/config.py", "rank": 69, "score": 50545.83066182536 }, { "content": " def post_request(worker, req, environ, resp):\n", "file_path": "fectl/config.py", "rank": 70, "score": 50545.83066182536 }, { "content": " def worker_exit(server, worker):\n", "file_path": "fectl/config.py", "rank": 71, "score": 49674.115809905394 }, { "content": "class WorkerAbort(Setting):\n\n name = \"worker_abort\"\n\n section = \"Server Hooks\"\n\n validator = validate_callable(1)\n\n type = six.callable\n\n\n\n def worker_abort(worker):\n\n pass\n\n\n\n default = staticmethod(worker_abort)\n\n desc = \"\"\"\\\n\n Called when a worker received the SIGABRT signal.\n\n\n\n This call generally happens on timeout.\n\n\n\n The callable needs to accept one instance variable for the initialized\n\n Worker.\n", "file_path": "fectl/config.py", "rank": 72, "score": 49674.115809905394 }, { "content": " def worker_int(worker):\n", "file_path": "fectl/config.py", "rank": 73, "score": 49674.115809905394 }, { "content": " def worker_abort(worker):\n", "file_path": "fectl/config.py", "rank": 74, "score": 49674.115809905394 }, { "content": "class WorkerInt(Setting):\n\n name = \"worker_int\"\n\n section = \"Server Hooks\"\n\n validator = validate_callable(1)\n\n type = six.callable\n\n\n\n def worker_int(worker):\n\n pass\n\n\n\n default = staticmethod(worker_int)\n\n desc = \"\"\"\\\n\n Called just after a worker exited on SIGINT or SIGQUIT.\n\n\n\n The callable needs to accept one instance variable for the initialized\n\n Worker.\n", "file_path": "fectl/config.py", "rank": 75, "score": 49674.115809905394 }, { "content": "class WorkerExit(Setting):\n\n name = \"worker_exit\"\n\n section = \"Server Hooks\"\n\n validator = validate_callable(2)\n\n type = six.callable\n\n\n\n def worker_exit(server, worker):\n\n pass\n\n default = staticmethod(worker_exit)\n\n desc = \"\"\"\\\n\n Called just after a worker has been exited, in the worker process.\n\n\n\n The callable needs to accept two instance variables for the Arbiter and\n\n the just-exited Worker.\n", "file_path": "fectl/config.py", "rank": 76, "score": 49674.115809905394 }, { "content": " def _init_signals(self):\n\n # Set up signals through the event loop API.\n\n self._loop.add_signal_handler(\n\n signal.SIGQUIT, self._handle_quit, signal.SIGQUIT, None)\n\n\n\n self._loop.add_signal_handler(\n\n signal.SIGTERM, self._handle_exit, signal.SIGTERM, None)\n\n\n\n self._loop.add_signal_handler(\n\n signal.SIGINT, self._handle_quit, signal.SIGINT, None)\n\n\n\n self._loop.add_signal_handler(\n\n signal.SIGWINCH, self._handle_winch, signal.SIGWINCH, None)\n\n\n\n self._loop.add_signal_handler(\n\n signal.SIGUSR1, self._handle_usr1, signal.SIGUSR1, None)\n\n\n\n self._loop.add_signal_handler(\n\n signal.SIGABRT, self._handle_abort, signal.SIGABRT, None)\n\n\n\n # Don't let SIGTERM and SIGUSR1 disturb active requests\n\n # by interrupting system calls\n\n signal.siginterrupt(signal.SIGTERM, False)\n", "file_path": "fectl/workers/asyncio.py", "rank": 77, "score": 48690.17290085601 }, { "content": " def _init_signals(self):\n\n # reset signaling\n\n [signal.signal(s, signal.SIG_DFL) for s in self.SIGNALS]\n\n\n\n # init new signaling\n\n signal.signal(signal.SIGQUIT, self._handle_quit)\n\n signal.signal(signal.SIGTERM, self._handle_exit)\n\n signal.signal(signal.SIGINT, self._handle_quit)\n\n signal.signal(signal.SIGWINCH, self._handle_winch)\n\n signal.signal(signal.SIGUSR1, self._handle_usr1)\n\n signal.signal(signal.SIGABRT, self._handle_abort)\n\n\n\n # Don't let SIGTERM and SIGUSR1 disturb active requests\n\n # by interrupting system calls\n\n signal.siginterrupt(signal.SIGTERM, False)\n\n signal.siginterrupt(signal.SIGUSR1, False)\n\n\n\n if hasattr(signal, 'set_wakeup_fd'):\n", "file_path": "fectl/workers/base.py", "rank": 78, "score": 48690.17290085601 }, { "content": " def load_app(self, worker):\n\n if self.app is None:\n\n return None\n\n\n\n app = utils.load_app(self.app)\n\n try:\n\n return app(worker, self.socket, self.arguments)\n\n except utils.ConfigurationError:\n\n raise\n\n except:\n\n logging.exception('Can not initialize app: %s', self.app)\n\n raise utils.ConfigurationError\n\n\n", "file_path": "fectl/workers/socket.py", "rank": 79, "score": 48656.10647075619 }, { "content": " def _run_loop(self):\n\n # init main application\n\n if self._application is not None:\n\n try:\n\n self._application(self)\n\n except utils.ConfigurationError:\n\n raise\n\n except BaseException as exc:\n\n logging.exception(\"Application init exception: %s\", exc)\n\n raise\n\n\n\n # load apps\n\n for sock in self._sockets.values():\n\n app = sock.load_app(self)\n\n if app is not None:\n\n yield from app.init()\n\n self._apps.append(app)\n\n\n\n self.notify(self.MSG_LOADED)\n\n\n\n try:\n\n while self._alive:\n\n self.heartbeat()\n\n\n\n # If our parent changed then we shutdown.\n\n if self._ppid != os.getppid():\n\n self._alive = False\n\n logging.info(\"Parent changed, shutting down: %s\", self)\n\n else:\n\n yield from self._wait_next_notify()\n\n except BaseException:\n\n logging.exception(\"Worker run loop exeception\")\n", "file_path": "fectl/workers/asyncio.py", "rank": 80, "score": 48622.532743322605 }, { "content": " def _init_process(self):\n\n # load event loop\n\n if self._loop_type == 'default':\n\n # use default event loop\n\n pass\n\n elif self._loop_type == 'uvloop':\n\n try:\n\n import uvloop\n\n except ImportError:\n\n raise utils.ConfigurationError('uvloop is not available')\n\n\n\n # Setup uvloop policy, so that every\n\n # asyncio.get_event_loop() will create an instance\n\n # of uvloop event loop.\n\n asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())\n\n elif self._loop_type == 'tokio':\n\n try:\n\n import tokio\n\n except ImportError:\n\n raise utils.ConfigurationError('tokio is not available')\n\n\n\n # Setup tokio policy, so that every\n\n # asyncio.get_event_loop() will create an instance\n\n # of uvloop event loop.\n\n asyncio.set_event_loop_policy(tokio.EventLoopPolicy())\n\n else:\n\n raise utils.ConfigurationError(\n\n 'Unknown loop type: %s' % self._loop_type)\n\n\n\n # create new event_loop after fork\n\n asyncio.get_event_loop().close()\n\n\n\n loop = asyncio.new_event_loop()\n\n if self._args.debug:\n\n loop.set_debug(True)\n\n\n\n self._loop = loop\n\n\n\n # read/write queues to master\n\n self._read_queue = asyncio.Queue(loop=loop)\n\n self._write_queue = asyncio.Queue(loop=loop)\n\n\n\n # convert callbacks to coroutine\n\n self._on_msg = [asyncio.coroutine(cb) for cb in self._on_msg]\n\n self._on_shutdown = [asyncio.coroutine(cb) for cb in self._on_shutdown]\n\n\n\n for sock in self._sockets.values():\n\n sock.set_nonblocking()\n\n\n\n asyncio.set_event_loop(loop)\n", "file_path": "fectl/workers/asyncio.py", "rank": 81, "score": 48477.446581181706 }, { "content": " def _init_process(self):\n\n try:\n\n import setproctitle\n\n if self._name is not None:\n\n setproctitle.setproctitle('fectl %s worker' % self._name)\n\n except:\n\n pass\n\n\n\n try:\n\n random.seed(os.urandom(64))\n\n except NotImplementedError:\n\n random.seed('%s.%s' % (time.time(), os.getpid()))\n\n\n\n self._pipe = os.pipe()\n\n for fd in itertools.chain(self._pipe, self._master_pipe):\n\n utils.close_on_exec(fd)\n\n utils.set_non_blocking(fd)\n\n\n\n self._f_read = os.fdopen(self._master_pipe[0], 'r')\n\n self._f_write = os.fdopen(self._master_pipe[1], 'w')\n\n\n", "file_path": "fectl/workers/base.py", "rank": 82, "score": 48477.446581181706 }, { "content": " def _init_process(self):\n\n # monkey patch here\n\n self._patch()\n\n\n\n # reinit the hub\n\n from gevent import hub\n\n hub.reinit()\n\n\n\n self._write_queue = Queue()\n\n self._read_queue = Queue()\n\n\n\n # then initialize the process\n", "file_path": "fectl/workers/gevent.py", "rank": 83, "score": 48477.446581181706 }, { "content": "class ReloadExtraFiles(Setting):\n\n name = \"reload_extra_files\"\n\n action = \"append\"\n\n section = \"Debugging\"\n\n cli = [\"--reload-extra-file\"]\n\n meta = \"FILES\"\n\n validator = validate_list_of_existing_files\n\n default = []\n\n desc = \"\"\"\\\n\n Extends :ref:`reload` option to also watch and reload on additional files\n\n (e.g., templates, configurations, specifications, etc.).\n\n\n\n .. versionadded:: 19.8\n", "file_path": "fectl/config.py", "rank": 84, "score": 48183.977482755174 }, { "content": "def validate_reload_engine(val):\n\n if val not in reloader_engines:\n\n raise ConfigurationError(\"Invalid reload_engine: %r\" % val)\n\n\n", "file_path": "fectl/config.py", "rank": 85, "score": 48183.977482755174 }, { "content": "def validate_post_request(val):\n\n val = validate_callable(-1)(val)\n\n\n\n largs = _compat.get_arity(val)\n\n if largs == 4:\n\n return val\n\n elif largs == 3:\n\n return lambda worker, req, env, _r: val(worker, req, env)\n\n elif largs == 2:\n\n return lambda worker, req, _e, _r: val(worker, req)\n\n else:\n", "file_path": "fectl/config.py", "rank": 86, "score": 48125.13357687382 }, { "content": "class LimitRequestLine(Setting):\n\n name = \"limit_request_line\"\n\n section = \"Security\"\n\n cli = [\"--limit-request-line\"]\n\n meta = \"INT\"\n\n validator = validate_pos_int\n\n type = int\n\n default = 4094\n\n desc = \"\"\"\\\n\n The maximum size of HTTP request line in bytes.\n\n\n\n This parameter is used to limit the allowed size of a client's\n\n HTTP request-line. Since the request-line consists of the HTTP\n\n method, URI, and protocol version, this directive places a\n\n restriction on the length of a request-URI allowed for a request\n\n on the server. A server needs this value to be large enough to\n\n hold any of its resource names, including any information that\n\n might be passed in the query part of a GET request. Value is a number\n\n from 0 (unlimited) to 8190.\n\n\n\n This parameter can be used to prevent any DDOS attack.\n", "file_path": "fectl/config.py", "rank": 87, "score": 48125.13357687382 }, { "content": "class MaxRequestsJitter(Setting):\n\n name = \"max_requests_jitter\"\n\n section = \"Worker Processes\"\n\n cli = [\"--max-requests-jitter\"]\n\n meta = \"INT\"\n\n validator = validate_pos_int\n\n type = int\n\n default = 0\n\n desc = \"\"\"\\\n\n The maximum jitter to add to the *max_requests* setting.\n\n\n\n The jitter causes the restart per worker to be randomized by\n\n ``randint(0, max_requests_jitter)``. This is intended to stagger worker\n\n restarts to avoid all workers restarting at the same time.\n", "file_path": "fectl/config.py", "rank": 88, "score": 48125.13357687382 }, { "content": "class LimitRequestFields(Setting):\n\n name = \"limit_request_fields\"\n\n section = \"Security\"\n\n cli = [\"--limit-request-fields\"]\n\n meta = \"INT\"\n\n validator = validate_pos_int\n\n type = int\n\n default = 100\n\n desc = \"\"\"\\\n\n Limit the number of HTTP headers fields in a request.\n\n\n\n This parameter is used to limit the number of headers in a request to\n\n prevent DDOS attack. Used with the *limit_request_field_size* it allows\n\n more safety. By default this value is 100 and can't be larger than\n\n 32768.\n", "file_path": "fectl/config.py", "rank": 89, "score": 48125.13357687382 }, { "content": " def post_worker_init(worker):\n", "file_path": "fectl/config.py", "rank": 90, "score": 47295.16613662602 }, { "content": "class PostWorkerInit(Setting):\n\n name = \"post_worker_init\"\n\n section = \"Server Hooks\"\n\n validator = validate_callable(1)\n\n type = six.callable\n\n\n\n def post_worker_init(worker):\n\n pass\n\n\n\n default = staticmethod(post_worker_init)\n\n desc = \"\"\"\\\n\n Called just after a worker has initialized the application.\n\n\n\n The callable needs to accept one instance variable for the initialized\n\n Worker.\n", "file_path": "fectl/config.py", "rank": 91, "score": 47295.16613662602 }, { "content": "class WorkerTmpDir(Setting):\n\n name = \"worker_tmp_dir\"\n\n section = \"Server Mechanics\"\n\n cli = [\"--worker-tmp-dir\"]\n\n meta = \"DIR\"\n\n validator = validate_string\n\n default = None\n\n desc = \"\"\"\\\n\n A directory to use for the worker heartbeat temporary file.\n\n\n\n If not set, the default temporary directory will be used.\n\n\n\n .. note::\n\n The current heartbeat system involves calling ``os.fchmod`` on\n\n temporary file handlers and may block a worker for arbitrary time\n\n if the directory is on a disk-backed filesystem.\n\n\n\n See :ref:`blocking-os-fchmod` for more detailed information\n\n and a solution for avoiding this problem.\n", "file_path": "fectl/config.py", "rank": 92, "score": 47295.16613662602 }, { "content": " def _try_run_loop(self):\n\n exc = None\n\n try:\n\n yield from self._run_loop()\n\n except utils.ConfigurationError as e:\n\n exc = e\n\n self.notify(self.MSG_CFG_ERROR, str(e))\n\n except BaseException as e:\n\n exc = e\n\n\n\n if self._stopping is None:\n\n self._stopping = asyncio.ensure_future(\n\n self._stop(), loop=self._loop)\n\n\n\n yield from self._stopping\n\n\n\n if exc is not None:\n", "file_path": "fectl/workers/asyncio.py", "rank": 93, "score": 46400.366626687086 }, { "content": "class LimitRequestFieldSize(Setting):\n\n name = \"limit_request_field_size\"\n\n section = \"Security\"\n\n cli = [\"--limit-request-field_size\"]\n\n meta = \"INT\"\n\n validator = validate_pos_int\n\n type = int\n\n default = 8190\n\n desc = \"\"\"\\\n\n Limit the allowed size of an HTTP request header field.\n\n\n\n Value is a positive number or 0. Setting it to 0 will allow unlimited\n\n header field sizes.\n\n\n\n .. warning::\n\n Setting this parameter to a very high or unlimited value can open\n\n up for DDOS attacks.\n", "file_path": "fectl/config.py", "rank": 94, "score": 45925.699792590465 }, { "content": "class ConfigurationError(Exception):\n", "file_path": "fectl/errors.py", "rank": 95, "score": 36614.775416506876 }, { "content": "class ConfigCheck(Setting):\n\n name = \"check_config\"\n\n section = \"Debugging\"\n\n cli = [\"--check-config\"]\n\n validator = validate_bool\n\n action = \"store_true\"\n\n default = False\n\n desc = \"\"\"\\\n\n Check the configuration.\n", "file_path": "fectl/config.py", "rank": 96, "score": 34608.20054503773 }, { "content": "class LogConfig(Setting):\n\n name = \"logconfig\"\n\n section = \"Logging\"\n\n cli = [\"--log-config\"]\n\n meta = \"FILE\"\n\n validator = validate_file_exists\n\n default = None\n\n desc = \"\"\"\\\n\n The log config file to use.\n\n FECTL uses the standard Python logging module's Configuration\n\n file format.\n", "file_path": "fectl/config.py", "rank": 97, "score": 34608.20054503773 }, { "content": "class ConfigFile(Setting):\n\n name = \"config\"\n\n section = \"Config File\"\n\n cli = [\"-c\", \"--config\"]\n\n meta = \"CONFIG\"\n\n validator = validate_string\n\n default = None\n\n desc = \"\"\"\\\n\n The fectl config file.\n\n\n\n A string of the form ``PATH``, ``file:PATH``, or ``python:MODULE_NAME``.\n\n\n\n Only has an effect when specified on the command line or as part of an\n\n application specific configuration.\n", "file_path": "fectl/config.py", "rank": 98, "score": 34608.20054503773 } ]
Rust
day18b/src/main.rs
LinAGKar/advent-of-code-2019-rust
f1de4d408f1c331f49a0928cf5abd733a3e15bb6
use std::cmp::Reverse; use std::collections::{BinaryHeap, HashSet}; use std::io::Read; #[derive(PartialEq)] enum Tile { Wall, Floor, Door(usize), Key(usize), } fn get_shortest_paths(map: &Vec<Vec<Tile>>, start: (i8, i8)) -> Vec<Vec<(u16, u32)>> { let mut queue = BinaryHeap::new(); let mut visited: Vec<Vec<_>> = map.iter().map(|row| { row.iter().map(|_| Vec::new()).collect() }).collect(); let mut shortest_paths = vec![Vec::new(); 26]; queue.push((Reverse(0), 0, 0, start)); while let Some((_, mut needed_keys, cost, pos)) = queue.pop() { let (y, x) = pos; let visited_this = &mut visited[y as usize][x as usize]; if visited_this.iter().any(|&old_needed_keys| needed_keys & old_needed_keys == old_needed_keys) { continue; } visited_this.push(needed_keys); let tile = &map[y as usize][x as usize]; match *tile { Tile::Door(door) => { needed_keys |= 1 << door; } Tile::Wall => { continue; } Tile::Floor => {} Tile::Key(key) => { shortest_paths[key as usize].push((cost, needed_keys)); needed_keys |= 1 << key; } } for (dy, dx) in &[ (-1, 0), (1, 0), (0, -1), (0, 1), ] { let (y, x) = (y + dy, x + dx); queue.push((Reverse(cost + 1), needed_keys, cost + 1, (y, x))); } } shortest_paths } fn main() { let mut input = String::new(); std::io::stdin().read_to_string(&mut input).unwrap(); let mut entrance = (0, 0); let mut keys = vec![(0, 0); 26]; let mut doors = vec![(0, 0); 26]; let mut map: Vec<Vec<_>> = input.lines().enumerate().map(|(y, line)| { line.chars().enumerate().map(|(x, c)| { match c { '@' => { entrance = (y as i8, x as i8); Tile::Floor } '.' => Tile::Floor, '#' => Tile::Wall, _ => { let ord = c as usize; if c.is_ascii_uppercase() { let index = ord - 'A' as usize; doors[index] = (y as i8, x as i8); Tile::Door(index) } else { let index = ord - 'a' as usize; keys[index] = (y as i8, x as i8); Tile::Key(index) } } } }).collect() }).collect(); map[entrance.0 as usize][entrance.1 as usize] = Tile::Wall; map[entrance.0 as usize - 1][entrance.1 as usize] = Tile::Wall; map[entrance.0 as usize + 1][entrance.1 as usize] = Tile::Wall; map[entrance.0 as usize][entrance.1 as usize - 1] = Tile::Wall; map[entrance.0 as usize][entrance.1 as usize + 1] = Tile::Wall; keys.push((entrance.0 - 1, entrance.1 - 1)); keys.push((entrance.0 - 1, entrance.1 + 1)); keys.push((entrance.0 + 1, entrance.1 - 1)); keys.push((entrance.0 + 1, entrance.1 + 1)); let costs_from_key: Vec<_> = keys.iter().map(|&pos| { get_shortest_paths(&map, pos) }).collect(); let mut queue = BinaryHeap::new(); queue.push((Reverse(0), 0, [26, 27, 28, 29], 0)); let mut visited = HashSet::new(); while let Some((_, cost, poses, keys)) = queue.pop() { if visited.contains(&(poses, keys)) { continue; } visited.insert((poses, keys)); if keys == 0x3FFFFFF { println!("{}", cost); break; } queue.extend(poses.iter().enumerate().flat_map(|(n, &pos)| { costs_from_key[pos].iter().enumerate().filter_map(move |( key, possibilities, )| { possibilities.iter().find_map(|&(new_cost, needed_keys)| { if key != pos && keys & 1 << key == 0 && needed_keys & keys == needed_keys { let new_cost = new_cost + cost; let mut new_poses = poses; new_poses[n] = key; Some((Reverse(new_cost), new_cost, new_poses, keys | 1 << key)) } else { None } }) }) })); } }
use std::cmp::Reverse; use std::collections::{BinaryHeap, HashSet}; use std::io::Read; #[derive(PartialEq)] enum Tile { Wall, Floor, Door(usize), Key(usize), } fn get_shortest_paths(map: &Vec<Vec<Tile>>, start: (i8, i8)) -> Vec<Vec<(u16, u32)>> { let mut queue = BinaryHeap::new(); let mut visited: Vec<Vec<_>> = map.iter().map(|row| { row.iter().map(|_| Vec::new()).collect() }).collect(); let mut shortest_paths = vec![Vec::new(); 26]; queue.push((Reverse(0), 0, 0, s
d_keys |= 1 << door; } Tile::Wall => { continue; } Tile::Floor => {} Tile::Key(key) => { shortest_paths[key as usize].push((cost, needed_keys)); needed_keys |= 1 << key; } } for (dy, dx) in &[ (-1, 0), (1, 0), (0, -1), (0, 1), ] { let (y, x) = (y + dy, x + dx); queue.push((Reverse(cost + 1), needed_keys, cost + 1, (y, x))); } } shortest_paths } fn main() { let mut input = String::new(); std::io::stdin().read_to_string(&mut input).unwrap(); let mut entrance = (0, 0); let mut keys = vec![(0, 0); 26]; let mut doors = vec![(0, 0); 26]; let mut map: Vec<Vec<_>> = input.lines().enumerate().map(|(y, line)| { line.chars().enumerate().map(|(x, c)| { match c { '@' => { entrance = (y as i8, x as i8); Tile::Floor } '.' => Tile::Floor, '#' => Tile::Wall, _ => { let ord = c as usize; if c.is_ascii_uppercase() { let index = ord - 'A' as usize; doors[index] = (y as i8, x as i8); Tile::Door(index) } else { let index = ord - 'a' as usize; keys[index] = (y as i8, x as i8); Tile::Key(index) } } } }).collect() }).collect(); map[entrance.0 as usize][entrance.1 as usize] = Tile::Wall; map[entrance.0 as usize - 1][entrance.1 as usize] = Tile::Wall; map[entrance.0 as usize + 1][entrance.1 as usize] = Tile::Wall; map[entrance.0 as usize][entrance.1 as usize - 1] = Tile::Wall; map[entrance.0 as usize][entrance.1 as usize + 1] = Tile::Wall; keys.push((entrance.0 - 1, entrance.1 - 1)); keys.push((entrance.0 - 1, entrance.1 + 1)); keys.push((entrance.0 + 1, entrance.1 - 1)); keys.push((entrance.0 + 1, entrance.1 + 1)); let costs_from_key: Vec<_> = keys.iter().map(|&pos| { get_shortest_paths(&map, pos) }).collect(); let mut queue = BinaryHeap::new(); queue.push((Reverse(0), 0, [26, 27, 28, 29], 0)); let mut visited = HashSet::new(); while let Some((_, cost, poses, keys)) = queue.pop() { if visited.contains(&(poses, keys)) { continue; } visited.insert((poses, keys)); if keys == 0x3FFFFFF { println!("{}", cost); break; } queue.extend(poses.iter().enumerate().flat_map(|(n, &pos)| { costs_from_key[pos].iter().enumerate().filter_map(move |( key, possibilities, )| { possibilities.iter().find_map(|&(new_cost, needed_keys)| { if key != pos && keys & 1 << key == 0 && needed_keys & keys == needed_keys { let new_cost = new_cost + cost; let mut new_poses = poses; new_poses[n] = key; Some((Reverse(new_cost), new_cost, new_poses, keys | 1 << key)) } else { None } }) }) })); } }
tart)); while let Some((_, mut needed_keys, cost, pos)) = queue.pop() { let (y, x) = pos; let visited_this = &mut visited[y as usize][x as usize]; if visited_this.iter().any(|&old_needed_keys| needed_keys & old_needed_keys == old_needed_keys) { continue; } visited_this.push(needed_keys); let tile = &map[y as usize][x as usize]; match *tile { Tile::Door(door) => { neede
function_block-random_span
[ { "content": "fn get_shortest_paths(map: &Vec<Vec<Tile>>, start: (i8, i8)) -> Vec<Vec<(u16, u32)>> {\n\n let mut queue = BinaryHeap::new();\n\n let mut visited: Vec<Vec<_>> = map.iter().map(|row| {\n\n row.iter().map(|_| Vec::new()).collect()\n\n }).collect();\n\n let mut shortest_paths = vec![Vec::new(); 26];\n\n\n\n queue.push((Reverse(0), 0, 0, start));\n\n\n\n while let Some((_, mut needed_keys, cost, pos)) = queue.pop() {\n\n let (y, x) = pos;\n\n\n\n let visited_this = &mut visited[y as usize][x as usize];\n\n if visited_this.iter().any(|&old_needed_keys| needed_keys & old_needed_keys == old_needed_keys) {\n\n continue;\n\n }\n\n visited_this.push(needed_keys);\n\n\n\n let tile = &map[y as usize][x as usize];\n\n match *tile {\n", "file_path": "day18a/src/main.rs", "rank": 1, "score": 188814.41691968968 }, { "content": "#[derive(PartialEq)]\n\nenum Tile {\n\n Wall,\n\n Floor,\n\n Door(usize),\n\n Key(usize),\n\n}\n\n\n", "file_path": "day18a/src/main.rs", "rank": 2, "score": 119871.48101470605 }, { "content": "#[derive(PartialEq)]\n\nenum Tile {\n\n Floor,\n\n Wall,\n\n Portal(usize, usize),\n\n}\n\n\n", "file_path": "day20a/src/main.rs", "rank": 3, "score": 119871.48101470605 }, { "content": "#[derive(PartialEq)]\n\nenum Tile {\n\n Floor,\n\n Wall,\n\n Portal(usize, usize, bool),\n\n}\n\n\n", "file_path": "day20b/src/main.rs", "rank": 4, "score": 119871.48101470605 }, { "content": "fn draw_tile(window: &Window, x: i64, y: i64, tile: i64) {\n\n window.mvaddstr((y + 1) as i32, x as i32, match tile {\n\n 0 => \" \",\n\n\n\n 1 => \"\\u{2588}\",\n\n\n\n 2 => \"\\u{2592}\",\n\n\n\n 3 => \"\\u{2550}\",\n\n\n\n 4 => \"0\",\n\n\n\n _ => \"\",\n\n });\n\n window.refresh();\n\n}\n\n\n", "file_path": "day13b/src/main.rs", "rank": 6, "score": 85424.08081483826 }, { "content": "fn pass_checkpoint(mut computer: intcode::IntCode, door: &str, mut items: Vec<&str>) {\n\n let mut line = String::new();\n\n\n\n for &item in &items {\n\n computer.put_line(&format!(\"drop {}\", item));\n\n while { computer.get_line(&mut line).unwrap(); line != \"Command?\" } {}\n\n }\n\n\n\n let mut new_items = Vec::new();\n\n\n\n loop {\n\n let mut eliminated_item = false;\n\n\n\n new_items.clear();\n\n\n\n for &item in &items {\n\n computer.put_line(&format!(\"take {}\", item));\n\n while { computer.get_line(&mut line).unwrap(); line != \"Command?\" } {}\n\n\n\n match try_checkpoint(&mut computer, door) {\n", "file_path": "day25/src/main.rs", "rank": 7, "score": 84369.06391761824 }, { "content": "fn try_checkpoint(computer: &mut intcode::IntCode, door: &str) -> CheckpointResult {\n\n computer.put_line(door);\n\n\n\n let mut code = 0;\n\n\n\n let mut line = String::new();\n\n while let Ok(_) = computer.get_line(&mut line) {\n\n for word in line.split_whitespace() {\n\n if let Ok(new_code) = word.parse() {\n\n code = new_code;\n\n }\n\n\n\n if word == \"heavier\" {\n\n while { computer.get_line(&mut line).unwrap(); line != \"Command?\" } {}\n\n return CheckpointResult::Heavier;\n\n }\n\n\n\n if word == \"lighter\" {\n\n while { computer.get_line(&mut line).unwrap(); line != \"Command?\" } {}\n\n return CheckpointResult::Lighter;\n\n }\n\n }\n\n }\n\n\n\n println!(\"{}\", code);\n\n CheckpointResult::Done\n\n}\n\n\n", "file_path": "day25/src/main.rs", "rank": 8, "score": 74186.1805206418 }, { "content": "fn try_take(mut computer: intcode::IntCode, item: &str, door: &str, current_room: &str) -> bool {\n\n computer.put_line(&format!(\"take {}\", item));\n\n computer.put_line(door);\n\n\n\n let mut seen_lines = Vec::new();\n\n\n\n let mut line = String::new();\n\n while let Ok(_) = computer.get_line(&mut line) {\n\n if line.starts_with(\"==\") {\n\n let room = &line[3..line.len() - 3];\n\n return room != current_room;\n\n }\n\n\n\n if line != \"\" {\n\n if seen_lines.contains(&line) {\n\n // Stuck in a loop\n\n return false;\n\n }\n\n seen_lines.push(line.clone());\n\n }\n\n }\n\n\n\n // Dead\n\n false\n\n}\n\n\n", "file_path": "day25/src/main.rs", "rank": 9, "score": 64332.03266015794 }, { "content": "enum Reps {\n\n NoRep,\n\n InternalRep,\n\n RepAtEnd,\n\n Triple,\n\n}\n\n\n", "file_path": "day4b/src/main.rs", "rank": 10, "score": 59836.1391101387 }, { "content": "enum RobotState {\n\n Painting,\n\n Moving,\n\n}\n\n\n", "file_path": "day11b/src/main.rs", "rank": 11, "score": 58664.41673229369 }, { "content": "#[derive(PartialEq)]\n\nenum CheckpointResult {\n\n Heavier,\n\n Lighter,\n\n Done,\n\n}\n\n\n", "file_path": "day25/src/main.rs", "rank": 12, "score": 58664.41673229369 }, { "content": "enum RobotState {\n\n Painting,\n\n Moving,\n\n}\n\n\n", "file_path": "day11a/src/main.rs", "rank": 13, "score": 58664.41673229369 }, { "content": "#[derive(Clone, Copy)]\n\nenum ParseState {\n\n None,\n\n Doors,\n\n Items,\n\n}\n\n\n", "file_path": "day25/src/main.rs", "rank": 14, "score": 58664.41673229369 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_to_string(&mut input).unwrap();\n\n\n\n const STACK_SIZE: i32 = 10007;\n\n\n\n println!(\"{}\", input.lines().fold(2019, |acc, line| {\n\n let mut words = line.split_whitespace();\n\n if words.next().unwrap() == \"cut\" {\n\n let count: i32 = words.next().unwrap().parse().unwrap();\n\n (acc - count).rem_euclid(STACK_SIZE)\n\n } else if words.next().unwrap() == \"with\" {\n\n let increment: i32 = words.nth(1).unwrap().parse().unwrap();\n\n (acc * increment).rem_euclid(STACK_SIZE)\n\n } else {\n\n STACK_SIZE - acc - 1\n\n }\n\n }));\n\n}\n", "file_path": "day22a/src/main.rs", "rank": 15, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n let mut computer = intcode::IntCode::new(input.trim().split(',').map(|x| x.parse().unwrap()).collect());\n\n computer.put_input(2);\n\n while let Some(output) = computer.run() {\n\n println!(\"{}\", output);\n\n }\n\n}\n", "file_path": "day9b/src/main.rs", "rank": 16, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_to_string(&mut input).unwrap();\n\n\n\n let re = regex::Regex::new(r\"(?m)^<x=(-?\\d+), y=(-?\\d+), z=(-?\\d+)>$\").unwrap();\n\n let mut moons: Vec<Moon> = re.captures_iter(&input).map(|x| {\n\n Moon::new(x[1].parse().unwrap(), x[2].parse().unwrap(), x[3].parse().unwrap())\n\n }).collect();\n\n\n\n for _ in 0..1000 {\n\n for i in 0..(moons.len() - 1) {\n\n let (moons_a, moons_b) = moons.split_at_mut(i + 1);\n\n let moon_a = &mut moons_a[i];\n\n for moon_b in moons_b {\n\n moon_a.accelerate(moon_b);\n\n }\n\n }\n\n\n\n for moon in &mut moons {\n\n moon.translate();\n\n }\n\n }\n\n\n\n println!(\"{}\", moons.iter().map(|x| x.energy()).sum::<i32>());\n\n}\n", "file_path": "day12a/src/main.rs", "rank": 17, "score": 49589.92988212962 }, { "content": "fn main() {\n\n const HEIGHT: usize = 6;\n\n const WIDTH: usize = 25;\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n\n\n let pixels: Vec<char> = input.trim().chars().collect();\n\n\n\n for y in 0..HEIGHT {\n\n for x in 0..WIDTH {\n\n for layer in 0..pixels.len() / (HEIGHT * WIDTH) {\n\n let pixel = pixels[layer * HEIGHT * WIDTH + y * WIDTH + x];\n\n if pixel == '1' {\n\n print!(\"\\u{2588}\");\n\n break;\n\n } else if pixel != '2' {\n\n print!(\" \");\n\n break;\n\n }\n\n }\n\n }\n\n println!(\"\");\n\n }\n\n}\n", "file_path": "day8b/src/main.rs", "rank": 18, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut wire_a = HashSet::new();\n\n let mut closest_intersection = std::i32::MAX;\n\n for i in 0..2 {\n\n let mut input = String::new();\n\n io::stdin().read_line(&mut input).unwrap();\n\n let mut pos = Point { x: 0, y: 0 };\n\n for j in input.trim().split(',') {\n\n let length: usize = j.chars().skip(1).collect::<String>().parse().unwrap();\n\n let direction = match j.chars().nth(0).unwrap() {\n\n 'U' => Point { x: 0, y: 1 },\n\n\n\n 'D' => Point { x: 0, y: -1 },\n\n\n\n 'L' => Point { x: -1, y: 0 },\n\n\n\n 'R' => Point { x: 1, y: 0 },\n\n\n\n _ => panic!(\"Unknown token\"),\n\n };\n", "file_path": "day3a/src/main.rs", "rank": 20, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n let initial_memory: Vec<_> = input.trim().split(',').map(|x| x.parse().unwrap()).collect();\n\n\n\n let mut computers: Vec<_> = (0..50).map(|i| {\n\n let mut computer = intcode::IntCode::new(initial_memory.clone());\n\n computer.set_default_input(-1);\n\n computer.put_input(i);\n\n computer\n\n }).collect();\n\n\n\n let mut packets = Vec::new();\n\n let mut last_zero_y = 0;\n\n let mut nat_mem = [0, 0];\n\n\n\n 'outer: loop {\n\n for computer in &mut computers {\n\n computer.iterate();\n\n if let Some(packet) = computer.get_outputs(3) {\n", "file_path": "day23b/src/main.rs", "rank": 21, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n let mut computer = intcode::IntCode::new(input.trim().split(',').map(|x| x.parse().unwrap()).collect());\n\n\n\n let script = \"\\\n\n NOT A J\\n\\\n\n NOT B T\\n\\\n\n OR T J\\n\\\n\n NOT C T\\n\\\n\n OR T J\\n\\\n\n AND D J\\n\\\n\n WALK\\n\\\n\n \";\n\n\n\n for i in script.chars() {\n\n computer.put_input(i as i64);\n\n }\n\n\n\n while let Some(output) = computer.run() {\n\n if output >= 128 {\n\n println!(\"{}\", output);\n\n break;\n\n }\n\n }\n\n}\n", "file_path": "day21a/src/main.rs", "rank": 22, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n io::stdin().read_to_string(&mut input).unwrap();\n\n let mut graph = Graph::new();\n\n for i in input.split_whitespace() {\n\n let mut edge = i.split(')');\n\n let parent = edge.next().unwrap();\n\n let child = edge.next().unwrap();\n\n graph.add_edge(parent, child);\n\n }\n\n \n\n println!(\"{}\", graph.distance(graph.parent_of(\"YOU\"), graph.parent_of(\"SAN\")));\n\n}\n", "file_path": "day6b/src/main.rs", "rank": 23, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n\n\n let mut values: Vec<_> = std::iter::repeat(input.trim().chars().map(|c| c.to_digit(10).unwrap() as u8)).take(10000).flatten().collect();\n\n let offset = values.iter().take(7).fold(0, |acc, &val| acc * 10 + val as usize);\n\n assert!(offset > values.len() / 2);\n\n values = values[offset..].to_vec();\n\n let mut new_values = vec![0; values.len()];\n\n\n\n for _ in 0..100 {\n\n values.iter().zip(new_values.iter_mut()).rev().fold(0, |acc, (&val, new_val)| {\n\n let sum = acc + val as u32;\n\n *new_val = (sum % 10) as u8;\n\n sum\n\n });\n\n std::mem::swap(&mut values, &mut new_values);\n\n }\n\n\n\n println!(\n\n \"{}\",\n\n values.into_iter().take(8).map(|num| std::char::from_digit(num as u32, 10).unwrap()).collect::<String>(),\n\n );\n\n}\n", "file_path": "day16b/src/main.rs", "rank": 24, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n io::stdin().read_to_string(&mut input).unwrap();\n\n println!(\"{}\", input.lines().map(|x| {\n\n let mut fuel = 0;\n\n let mut mass = x.parse::<i32>().unwrap();\n\n while mass > 0 {\n\n mass = mass / 3 - 2;\n\n if mass <= 0 {\n\n break;\n\n }\n\n fuel += mass;\n\n }\n\n fuel\n\n }).fold(0, |acc, x| acc + x));\n\n}\n", "file_path": "day1b/src/main.rs", "rank": 25, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut wire_a = HashMap::new();\n\n let mut closest_intersection = std::i32::MAX;\n\n for i in 0..2 {\n\n let mut input = String::new();\n\n io::stdin().read_line(&mut input).unwrap();\n\n let mut pos = Point { x: 0, y: 0 };\n\n let mut steps = 0;\n\n for j in input.trim().split(',') {\n\n let length: usize = j.chars().skip(1).collect::<String>().parse().unwrap();\n\n let direction = match j.chars().nth(0).unwrap() {\n\n 'U' => Point { x: 0, y: 1 },\n\n\n\n 'D' => Point { x: 0, y: -1 },\n\n\n\n 'L' => Point { x: -1, y: 0 },\n\n\n\n 'R' => Point { x: 1, y: 0 },\n\n\n\n _ => panic!(\"Unknown token\"),\n", "file_path": "day3b/src/main.rs", "rank": 26, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n let mut computer = intcode::IntCode::new(input.trim().split(',').map(|x| x.parse().unwrap()).collect());\n\n computer.put_input(1);\n\n while let Some(output) = computer.run() {\n\n println!(\"{}\", output);\n\n }\n\n}\n", "file_path": "day9a/src/main.rs", "rank": 27, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n let mut computer = intcode::IntCode::new(input.trim().split(',').map(|x| x.parse().unwrap()).collect());\n\n\n\n let mut block_tiles = std::collections::HashSet::new();\n\n\n\n while computer.iterate() {\n\n while let Some(output) = computer.get_outputs(3) {\n\n if output[2] == 2 {\n\n block_tiles.insert((output[0], output[1]));\n\n }\n\n }\n\n }\n\n\n\n println!(\"{}\", block_tiles.len());\n\n}\n", "file_path": "day13a/src/main.rs", "rank": 28, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_to_string(&mut input).unwrap();\n\n\n\n let re = regex::Regex::new(r\"(?m)^<x=(-?\\d+), y=(-?\\d+), z=(-?\\d+)>$\").unwrap();\n\n\n\n let mut axes = [\n\n Vec::new(),\n\n Vec::new(),\n\n Vec::new(),\n\n ];\n\n\n\n for i in re.captures_iter(&input) {\n\n for j in 0..3 {\n\n axes[j].push((i[j + 1].parse::<i32>().unwrap(), 0));\n\n }\n\n }\n\n\n\n let initial: Vec<_> = axes.iter().map(|x| x.to_vec()).collect();\n\n\n", "file_path": "day12b/src/main.rs", "rank": 29, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n let initial_memory: Vec<_> = input.trim().split(',').map(|x| x.parse().unwrap()).collect();\n\n\n\n let mut computers: Vec<_> = (0..50).map(|i| {\n\n let mut computer = intcode::IntCode::new(initial_memory.clone());\n\n computer.set_default_input(-1);\n\n computer.put_input(i);\n\n computer\n\n }).collect();\n\n\n\n let mut packets = Vec::new();\n\n\n\n 'outer: loop {\n\n for computer in &mut computers {\n\n computer.iterate();\n\n if let Some(packet) = computer.get_outputs(3) {\n\n if packet[0] == 255 {\n\n println!(\"{}\", packet[2]);\n", "file_path": "day23a/src/main.rs", "rank": 30, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut args = std::env::args();\n\n let interactive = args.nth(1).unwrap_or_default() == \"--interactive\";\n\n\n\n let mut input = String::new();\n\n\n\n if interactive {\n\n let filename = &args.next().unwrap();\n\n let mut file = std::fs::File::open(filename).unwrap();\n\n file.read_to_string(&mut input).unwrap();\n\n } else {\n\n std::io::stdin().read_to_string(&mut input).unwrap();\n\n };\n\n\n\n let initial_memory: Vec<_> = input.trim().split(',').map(|x| x.parse().unwrap()).collect();\n\n let mut computer = intcode::IntCode::new(initial_memory.clone());\n\n\n\n if interactive {\n\n let mut line = String::new();\n\n while let Ok(_) = computer.get_line(&mut line) {\n", "file_path": "day25/src/main.rs", "rank": 31, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n let initial_program: Vec<i64> = input.trim().split(',').map(|x| x.parse().unwrap()).collect();\n\n let mut greatest_output = 0;\n\n\n\n for i in 0..5 * 4 * 3 * 2 {\n\n let mut selection = [0; 4];\n\n let mut select = i;\n\n for j in 0..4 {\n\n selection[j] = (select % (5 - j)) as u8;\n\n select /= 5 - j;\n\n }\n\n\n\n let mut signal = 0;\n\n\n\n for &phase in &construct_phases(&selection) {\n\n let mut computer = intcode::IntCode::new(initial_program.to_vec());\n\n computer.put_input(phase as i64);\n\n computer.put_input(signal);\n", "file_path": "day7a/src/main.rs", "rank": 32, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let display_simulation = std::env::args().any(|x| x == \"--display\");\n\n\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n let mut computer = intcode::IntCode::new(input.trim().split(',').map(|x| x.parse().unwrap()).collect());\n\n computer.set_at_address(0, 2);\n\n\n\n let mut tiles = HashMap::new();\n\n\n\n let window = if display_simulation { Some(pancurses::initscr()) } else { None };\n\n if let Some(_) = window {\n\n pancurses::curs_set(0);\n\n }\n\n\n\n let mut paddle_pos = (-1, -1);\n\n let mut ball_pos = (-1, -1);\n\n let mut ball_direction = (0, 0);\n\n let mut expected_paddle_x = -1;\n\n let mut score = -1;\n", "file_path": "day13b/src/main.rs", "rank": 33, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n let initial_memory: Vec<_> = input.trim().split(',').map(|x| x.parse().unwrap()).collect();\n\n\n\n let mut visited = HashSet::new();\n\n let mut next = VecDeque::new();\n\n\n\n visited.insert((0, 0));\n\n next.push_back((0i8, 0i8, 0u8, intcode::IntCode::new(initial_memory)));\n\n\n\n 'outer: while let Some((x, y, dist, computer)) = next.pop_front() {\n\n for (dx, dy, dir) in [\n\n (1, 0, 4),\n\n (-1, 0, 3),\n\n (0, 1, 2),\n\n (0, -1, 1),\n\n ] {\n\n let (x, y) = (x + dx, y + dy);\n\n if visited.contains(&(x, y)) {\n", "file_path": "day15a/src/main.rs", "rank": 34, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_to_string(&mut input).unwrap();\n\n\n\n let mut letters = HashMap::new();\n\n\n\n let mut map: Vec<Vec<_>> = input.lines().enumerate().map(|(y, line)| {\n\n line.chars().enumerate().map(|(x, tile)| {\n\n match tile {\n\n '#' => Tile::Wall,\n\n '.' => Tile::Floor,\n\n ' ' => Tile::Wall,\n\n\n\n c => {\n\n letters.insert((y, x), c);\n\n Tile::Wall\n\n }\n\n }\n\n }).collect()\n\n }).collect();\n", "file_path": "day20b/src/main.rs", "rank": 35, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n io::stdin().read_to_string(&mut input).unwrap();\n\n println!(\"{}\", input.lines().map(|x| x.parse::<i32>().unwrap() / 3 - 2).fold(0, |acc, x| acc + x));\n\n}\n", "file_path": "day1a/src/main.rs", "rank": 36, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_to_string(&mut input).unwrap();\n\n\n\n const STACK_SIZE: i128 = 119315717514047;\n\n const REPETIONS: u64 = 101741582076661;\n\n\n\n let (k, a) = input.lines().fold((1, 0), |(k, a), line| {\n\n let mut words = line.split_whitespace();\n\n if words.next().unwrap() == \"cut\" {\n\n let count: i128 = words.next().unwrap().parse().unwrap();\n\n (k, (a - count).rem_euclid(STACK_SIZE))\n\n } else if words.next().unwrap() == \"with\" {\n\n let increment: i128 = words.nth(1).unwrap().parse().unwrap();\n\n ((k * increment).rem_euclid(STACK_SIZE), (a * increment).rem_euclid(STACK_SIZE))\n\n } else {\n\n ((-k).rem_euclid(STACK_SIZE), (-a - 1).rem_euclid(STACK_SIZE))\n\n }\n\n });\n\n\n\n let pow = pow_mod(k, REPETIONS, STACK_SIZE);\n\n let (k, a) = (pow, mod_div(a * (pow - 1), k - 1, STACK_SIZE));\n\n println!(\"{}\", mod_div(2020 - a, k, STACK_SIZE));\n\n}\n", "file_path": "day22b/src/main.rs", "rank": 37, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n let initial_memory: Vec<_> = input.trim().split(',').map(|x| x.parse().unwrap()).collect();\n\n\n\n let mut count = 1;\n\n\n\n let (mut x, mut bottom_y) = (1..).find_map(|i| {\n\n (0..i).find_map(|j| {\n\n let x = i - j;\n\n let y = 1 + j;\n\n if point_pulled(initial_memory.clone(), x, y) {\n\n Some((x, y))\n\n } else {\n\n None\n\n }\n\n })\n\n }).unwrap();\n\n\n\n let mut top_y = bottom_y;\n", "file_path": "day19a/src/main.rs", "rank": 38, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n let mut computer = intcode::IntCode::new(input.trim().split(',').map(|x| x.parse().unwrap()).collect());\n\n\n\n computer.set_at_address(1, 12);\n\n computer.set_at_address(2, 2);\n\n\n\n while let Some(output) = computer.run() {\n\n println!(\"{}\", output);\n\n }\n\n\n\n println!(\"{}\", computer.get_at_address(0));\n\n}\n", "file_path": "day2a/src/main.rs", "rank": 39, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_to_string(&mut input).unwrap();\n\n\n\n let mut entrance = (0, 0);\n\n let mut keys = vec![(0, 0); 26];\n\n let mut doors = vec![(0, 0); 26];\n\n\n\n let map: Vec<Vec<_>> = input.lines().enumerate().map(|(y, line)| {\n\n line.chars().enumerate().map(|(x, c)| {\n\n match c {\n\n '@' => {\n\n entrance = (y as i8, x as i8);\n\n Tile::Floor\n\n }\n\n\n\n '.' => Tile::Floor,\n\n '#' => Tile::Wall,\n\n\n\n _ => {\n", "file_path": "day18a/src/main.rs", "rank": 40, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_to_string(&mut input).unwrap();\n\n\n\n fn parse_substance(string: &str) -> (i32, &str) {\n\n let mut split = string.trim().split(\" \");\n\n let count = split.next().unwrap().trim().parse().unwrap();\n\n (count, split.next().unwrap().trim())\n\n }\n\n\n\n let reactions: HashMap<_, _> = input.lines().map(|x| {\n\n let mut sides = x.split(\"=>\");\n\n let sources = sides.next().unwrap().split(\", \").map(|y| parse_substance(y)).collect();\n\n let (count, target) = parse_substance(sides.next().unwrap());\n\n (target, Reaction { count: count, sources: sources })\n\n }).collect();\n\n\n\n let mut ore_needed = 0;\n\n let mut needed = vec![(1, \"FUEL\")];\n\n let mut surplus = HashMap::new();\n", "file_path": "day14a/src/main.rs", "rank": 41, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n let mut computer = intcode::IntCode::new(input.trim().split(',').map(|x| x.parse().unwrap()).collect());\n\n\n\n let mut state = RobotState::Painting;\n\n let mut position = Point { x: 0, y: 0 };\n\n let mut direction = Point { x: 0, y: -1 };\n\n let mut painted_panels = std::collections::HashMap::new();\n\n painted_panels.insert(Point { x: 0, y: 0 }, 1);\n\n computer.set_input(1);\n\n\n\n while let Some(output) = computer.run() {\n\n match state {\n\n RobotState::Painting => {\n\n painted_panels.insert(position, output);\n\n computer.set_input(output);\n\n state = RobotState::Moving;\n\n }\n\n\n", "file_path": "day11b/src/main.rs", "rank": 42, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n io::stdin().read_line(&mut input).unwrap();\n\n\n\n let mut limits = input.trim().split('-').map(|x| x.chars().map(|y| y.to_digit(10).unwrap() as u8).collect());\n\n let mut current: Vec<u8> = limits.next().unwrap();\n\n for i in 1..current.len() {\n\n if current[i] < current[i - 1] {\n\n current[i] = current[i - 1];\n\n }\n\n }\n\n let end: Vec<u8> = limits.next().unwrap();\n\n let mut count: u32 = 0;\n\n\n\n 'mainloop: loop {\n\n for (n, &i) in current.iter().enumerate() {\n\n if i > end[n] {\n\n break 'mainloop;\n\n } else if n == current.len() - 1 {\n\n match repeating(&current) {\n", "file_path": "day4b/src/main.rs", "rank": 43, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n io::stdin().read_to_string(&mut input).unwrap();\n\n let mut graph = Graph::new();\n\n for i in input.split_whitespace() {\n\n let mut edge = i.split(')');\n\n let parent = edge.next().unwrap();\n\n let child = edge.next().unwrap();\n\n graph.add_edge(parent, child);\n\n }\n\n println!(\"{}\", graph.count_orbits(graph.calc_root()));\n\n}\n", "file_path": "day6a/src/main.rs", "rank": 44, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n let initial_memory: Vec<_> = input.trim().split(',').map(|x| x.parse().unwrap()).collect();\n\n\n\n let (mut x, mut y) = (1..).find_map(|i| {\n\n (0..i).find_map(|j| {\n\n let x = i - j;\n\n let y = 1 + j;\n\n if point_pulled(initial_memory.clone(), x, y) {\n\n Some((x, y))\n\n } else {\n\n None\n\n }\n\n })\n\n }).unwrap();\n\n\n\n while !point_pulled(initial_memory.clone(), x + 99, y - 99) {\n\n x += 1;\n\n\n\n while point_pulled(initial_memory.clone(), x, y + 1) {\n\n y += 1;\n\n }\n\n }\n\n\n\n println!(\"{}\", x * 10000 + (y - 99));\n\n}\n", "file_path": "day19b/src/main.rs", "rank": 45, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n let program_orig: Vec<i64> = input.trim().split(',').map(|x| x.parse().unwrap()).collect();\n\n for i in 0..100 {\n\n for j in 0..100 {\n\n let mut computer = intcode::IntCode::new(program_orig.to_vec());\n\n\n\n computer.set_at_address(1, i);\n\n computer.set_at_address(2, j);\n\n\n\n while let Some(output) = computer.run() {\n\n println!(\"{}\", output);\n\n }\n\n if computer.get_at_address(0) == 19690720 {\n\n println!(\"{}\", 100 * i + j);\n\n break;\n\n }\n\n }\n\n }\n\n}\n", "file_path": "day2b/src/main.rs", "rank": 46, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n let mut computer = intcode::IntCode::new(input.trim().split(',').map(|x| x.parse().unwrap()).collect());\n\n\n\n let script = \"\\\n\n NOT A J\\n\\\n\n NOT B T\\n\\\n\n OR T J\\n\\\n\n NOT C T\\n\\\n\n OR T J\\n\\\n\n AND D J\\n\\\n\n NOT E T\\n\\\n\n NOT T T\\n\\\n\n OR H T\\n\\\n\n AND T J\\n\\\n\n RUN\\n\\\n\n \";\n\n\n\n for i in script.chars() {\n", "file_path": "day21b/src/main.rs", "rank": 47, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n let initial_memory: Vec<_> = input.trim().split(',').map(|x| x.parse().unwrap()).collect();\n\n\n\n let mut visited = HashSet::new();\n\n let mut next = VecDeque::new();\n\n let mut walls = HashSet::new();\n\n let mut last_dist = 0;\n\n\n\n visited.insert((0, 0));\n\n next.push_back((0, 0, 0, intcode::IntCode::new(initial_memory)));\n\n\n\n 'outer: while let Some((x, y, dist, computer)) = next.pop_front() {\n\n last_dist = dist;\n\n\n\n for (dx, dy, dir) in [\n\n (1, 0, 4),\n\n (-1, 0, 3),\n\n (0, 1, 2),\n", "file_path": "day15b/src/main.rs", "rank": 48, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n let mut computer = intcode::IntCode::new(input.trim().split(',').map(|x| x.parse().unwrap()).collect());\n\n computer.put_input(5);\n\n\n\n while let Some(output) = computer.run() {\n\n println!(\"{}\", output);\n\n }\n\n}\n", "file_path": "day5b/src/main.rs", "rank": 49, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n io::stdin().read_line(&mut input).unwrap();\n\n\n\n let mut limits = input.trim().split('-').map(|x| x.chars().map(|y| y.to_digit(10).unwrap() as u8).collect());\n\n let mut current: Vec<u8> = limits.next().unwrap();\n\n for i in 1..current.len() {\n\n if current[i] < current[i - 1] {\n\n current[i] = current[i - 1];\n\n }\n\n }\n\n let end: Vec<u8> = limits.next().unwrap();\n\n let mut count: u32 = 0;\n\n 'mainloop: loop {\n\n for (n, &i) in current.iter().enumerate() {\n\n if i > end[n] {\n\n break 'mainloop;\n\n } else if n == current.len() - 1 {\n\n if repeating(&current) {\n\n count += (end[n] - i + 1) as u32;\n", "file_path": "day4a/src/main.rs", "rank": 50, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_to_string(&mut input).unwrap();\n\n\n\n let mut map = [[[false; 5]; 5]; SIZE];\n\n\n\n for (y, line) in input.lines().enumerate() {\n\n for (x, c) in line.chars().enumerate() {\n\n map[MARGIN][y][x] = c == '#';\n\n }\n\n }\n\n\n\n let mut start = MARGIN;\n\n let mut end = MARGIN + 1;\n\n\n\n let adjacent: Vec<Vec<_>> = (0isize..5).map(|y| (0isize..5).map(move |x| {\n\n let mut adjacent = Vec::new();\n\n\n\n for (dy, dx) in [\n\n (0, 1),\n", "file_path": "day24b/src/main.rs", "rank": 51, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n let mut computer = intcode::IntCode::new(input.trim().split(',').map(|x| x.parse().unwrap()).collect());\n\n computer.put_input(1);\n\n let mut prev_output = 0;\n\n\n\n while let Some(output) = computer.run() {\n\n assert_eq!(prev_output, 0);\n\n prev_output = output;\n\n }\n\n\n\n println!(\"{}\", prev_output);\n\n}\n", "file_path": "day5a/src/main.rs", "rank": 52, "score": 49589.92988212962 }, { "content": "fn explore(\n\n mut computer: intcode::IntCode,\n\n path_taken: &mut Vec<String>,\n\n item_paths: &mut Vec<(String, Vec<String>)>,\n\n through_checkpoint: &mut Vec<String>,\n\n) {\n\n let mut parse_state = ParseState::None;\n\n let mut doors = Vec::<String>::new();\n\n let mut items = Vec::<String>::new();\n\n let mut room = String::new();\n\n\n\n let mut line = String::new();\n\n while let Ok(_) = computer.get_line(&mut line) {\n\n match (line.as_str(), parse_state) {\n\n (\"Doors here lead:\", ParseState::None) => {\n\n parse_state = ParseState::Doors;\n\n }\n\n\n\n (\"Items here:\", ParseState::None) => {\n\n parse_state = ParseState::Items;\n", "file_path": "day25/src/main.rs", "rank": 53, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n let initial_memory: Vec<_> = input.trim().split(',').map(|x| x.parse().unwrap()).collect();\n\n\n\n let mut computer = intcode::IntCode::new(initial_memory.clone());\n\n let mut map = Vec::new(); \n\n let mut row = Vec::new();\n\n\n\n let mut pos = (0, 0);\n\n let mut direction = (0, 0);\n\n\n\n while let Some(output) = computer.run() {\n\n let c = output as u8;\n\n\n\n match c as char {\n\n '\\n' => {\n\n if !row.is_empty() {\n\n map.push(row);\n\n row = Vec::new();\n", "file_path": "day17b/src/main.rs", "rank": 54, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_to_string(&mut input).unwrap();\n\n\n\n let mut asteroids: std::collections::HashSet<Point> = input\n\n .lines()\n\n .enumerate()\n\n .flat_map(|(n, x)| x\n\n .chars()\n\n .enumerate()\n\n .filter_map(move |(m, y)| if y == '#' {\n\n Some(Point { x: m as i32, y: n as i32 })\n\n } else {\n\n None\n\n })\n\n )\n\n .collect();\n\n\n\n let station_pos = asteroids.iter().map(|&x| {\n\n (x, reachable_asteroids(x, &asteroids).count())\n", "file_path": "day10b/src/main.rs", "rank": 55, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_to_string(&mut input).unwrap();\n\n\n\n let mut letters = HashMap::new();\n\n\n\n let mut map: Vec<Vec<_>> = input.lines().enumerate().map(|(y, line)| {\n\n line.chars().enumerate().map(|(x, tile)| {\n\n match tile {\n\n '#' => Tile::Wall,\n\n '.' => Tile::Floor,\n\n ' ' => Tile::Wall,\n\n\n\n c => {\n\n letters.insert((y, x), c);\n\n Tile::Wall\n\n }\n\n }\n\n }).collect()\n\n }).collect();\n", "file_path": "day20a/src/main.rs", "rank": 56, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_to_string(&mut input).unwrap();\n\n\n\n let map: Vec<Vec<_>> = input.lines().map(|line| {\n\n line.chars().map(|c| if c == '#' { 1 } else { 0 }).collect()\n\n }).collect();\n\n assert_eq!(map.len(), 5);\n\n for row in &map {\n\n assert_eq!(row.len(), 5);\n\n }\n\n\n\n let lookup_table: Vec<_> = (0..0b100000).map(|state| {\n\n let bits = (0..5).filter(|i| state >> i & 0b1 == 1).count();\n\n if bits == 2 || bits == 1 && state & 0b100 == 0 { 1 } else { 0 }\n\n }).collect();\n\n\n\n let mut map = (0..5).flat_map(|line| (0..5).map(move |col| (line, col))).fold(0u32, |acc, (line, col)| {\n\n acc | map[line][col] << line * 5 + col\n\n });\n", "file_path": "day24a/src/main.rs", "rank": 57, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_to_string(&mut input).unwrap();\n\n\n\n fn parse_substance(string: &str) -> (i64, &str) {\n\n let mut split = string.trim().split(\" \");\n\n let count = split.next().unwrap().trim().parse().unwrap();\n\n (count, split.next().unwrap().trim())\n\n }\n\n\n\n let reactions = input.lines().map(|x| {\n\n let mut sides = x.split(\"=>\");\n\n let sources = sides.next().unwrap().split(\", \").map(|y| parse_substance(y)).collect();\n\n let (count, target) = parse_substance(sides.next().unwrap());\n\n (target, Reaction { count: count, sources: sources })\n\n }).collect();\n\n\n\n const ORE_AVAILABLE: i64 = 1000000000000;\n\n\n\n let mut lower_bound = 0;\n", "file_path": "day14b/src/main.rs", "rank": 58, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let height = 6;\n\n let width = 25;\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n\n\n let mut chars = input.trim().chars().peekable();\n\n\n\n let mut min_zeroes = std::u32::MAX;\n\n let mut product = 0;\n\n\n\n while let Some(_) = chars.peek() {\n\n let mut count = 0;\n\n let mut zeroes = 0;\n\n let mut ones = 0;\n\n let mut twos = 0;\n\n while count < height * width {\n\n if let Some(i) = chars.next() {\n\n match i {\n\n '0' => { zeroes += 1; },\n", "file_path": "day8a/src/main.rs", "rank": 59, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n let initial_memory: Vec<_> = input.trim().split(',').map(|x| x.parse().unwrap()).collect();\n\n\n\n let mut computer = intcode::IntCode::new(initial_memory);\n\n let mut map = Vec::new(); \n\n let mut row = Vec::new();\n\n\n\n while let Some(output) = computer.run() {\n\n let c = output as u8;\n\n if c != '\\n' as u8 {\n\n row.push(c);\n\n } else if !row.is_empty() {\n\n map.push(row);\n\n row = Vec::new();\n\n }\n\n }\n\n\n\n println!(\"{}\", map.iter().enumerate().skip(1).take(map.len() - 2).flat_map(|(y, row)| {\n", "file_path": "day17a/src/main.rs", "rank": 60, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_to_string(&mut input).unwrap();\n\n\n\n let asteroids: std::collections::HashSet<Point> = input\n\n .lines()\n\n .enumerate()\n\n .flat_map(|(n, x)| x\n\n .chars()\n\n .enumerate()\n\n .filter_map(move |(m, y)| if y == '#' {\n\n Some(Point { x: m as i32, y: n as i32 })\n\n } else {\n\n None\n\n })\n\n )\n\n .collect();\n\n\n\n println!(\"{}\", asteroids.iter().map(|&x| {\n\n asteroids.iter().filter(|&&y| x != y).filter(|&&y| {\n\n let diff = y - x;\n\n let step_count = gcd(diff.x.abs(), diff.y.abs());\n\n let step = diff / step_count;\n\n !(1..step_count).any(|z| asteroids.contains(&(x + step * z)))\n\n }).count()\n\n }).max().unwrap());\n\n}\n", "file_path": "day10a/src/main.rs", "rank": 61, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n let mut computer = intcode::IntCode::new(input.trim().split(',').map(|x| x.parse().unwrap()).collect());\n\n\n\n let mut state = RobotState::Painting;\n\n let mut position = Point { x: 0, y: 0 };\n\n let mut direction = Point { x: 0, y: -1 };\n\n let mut painted_panels = std::collections::HashMap::new();\n\n\n\n while let Some(output) = computer.run() {\n\n match state {\n\n RobotState::Painting => {\n\n painted_panels.insert(position, output);\n\n computer.set_input(output);\n\n state = RobotState::Moving;\n\n }\n\n\n\n RobotState::Moving => {\n\n match output {\n", "file_path": "day11a/src/main.rs", "rank": 62, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n\n\n let mut values: Vec<_> = input.trim().chars().map(|c| c.to_digit(10).unwrap() as i16).collect();\n\n\n\n for _ in 0..100 {\n\n values = (1..=values.len()).map(|n| {\n\n values.iter().zip(\n\n [0, 1, 0, -1].iter().flat_map(move |num| std::iter::repeat(num).take(n)).cycle().skip(1)\n\n ).map(|(a, b)| a * b).sum::<i16>().abs() % 10\n\n }).collect();\n\n }\n\n\n\n println!(\n\n \"{}\",\n\n values.into_iter().take(8).map(|num| std::char::from_digit(num as u32, 10).unwrap()).collect::<String>(),\n\n );\n\n}\n", "file_path": "day16a/src/main.rs", "rank": 63, "score": 49589.92988212962 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n std::io::stdin().read_line(&mut input).unwrap();\n\n let initial_program: Vec<i64> = input.trim().split(',').map(|x| x.parse().unwrap()).collect();\n\n let mut greatest_output = 0;\n\n\n\n for i in 0..5 * 4 * 3 * 2 {\n\n let mut selection = [0; 4];\n\n let mut select = i;\n\n for j in 0..4 {\n\n selection[j] = (select % (5 - j)) as u8;\n\n select /= 5 - j;\n\n }\n\n\n\n let mut signal = 0;\n\n\n\n let mut amplifiers: Vec<intcode::IntCode> = construct_phases(&selection).iter().map(|&x| {\n\n let mut amp = intcode::IntCode::new(initial_program.to_vec());\n\n amp.put_input(x as i64);\n\n amp\n", "file_path": "day7b/src/main.rs", "rank": 64, "score": 49589.92988212962 }, { "content": "fn calc_expected_paddle_x(\n\n mut tiles: HashMap<(i64, i64), i64>,\n\n mut ball_pos: (i64, i64),\n\n mut ball_direction: (i64, i64),\n\n paddle_y: i64,\n\n) -> i64 {\n\n while ball_pos.1 < paddle_y - 1 || ball_direction.1 <= 0 {\n\n let next_x_pos = (ball_pos.0 + ball_direction.0, ball_pos.1);\n\n let next_y_pos = (ball_pos.0, ball_pos.1 + ball_direction.1);\n\n let next_xy_pos = (ball_pos.0 + ball_direction.0, ball_pos.1 + ball_direction.1);\n\n if let Some(&tile) = tiles.get(&next_x_pos) {\n\n if tile == 2 {\n\n tiles.remove(&next_x_pos);\n\n }\n\n ball_direction.0 *= -1;\n\n } else if let Some(&tile) = tiles.get(&next_y_pos) {\n\n if tile == 2 {\n\n tiles.remove(&next_y_pos);\n\n }\n\n ball_direction.1 *= -1;\n", "file_path": "day13b/src/main.rs", "rank": 65, "score": 47489.617278035075 }, { "content": "fn reachable_asteroids<'a>(\n\n pos: Point,\n\n asteroids: &'a std::collections::HashSet<Point>,\n\n) -> Box<dyn Iterator<Item=&Point> + 'a> {\n\n Box::new(asteroids.iter().filter(move |&&x| pos != x).filter(move |&&x| {\n\n let diff = x - pos;\n\n let step_count = gcd(diff.x.abs(), diff.y.abs());\n\n let step = diff / step_count;\n\n !(1..step_count).any(|y| asteroids.contains(&(pos + step * y)))\n\n }))\n\n}\n\n\n", "file_path": "day10b/src/main.rs", "rank": 66, "score": 46839.83207386664 }, { "content": "fn opposite_direction(dir: &str) -> &str {\n\n match dir {\n\n \"north\" => \"south\",\n\n \"south\" => \"north\",\n\n \"west\" => \"east\",\n\n \"east\" => \"west\",\n\n _ => panic!(),\n\n }\n\n}\n\n\n", "file_path": "day25/src/main.rs", "rank": 67, "score": 41387.23630351395 }, { "content": "fn repeating(digits: &Vec<u8>) -> Reps {\n\n let mut prev = digits[0];\n\n let mut streak = 0;\n\n for &i in digits.iter().skip(1).take(digits.len() - 2) {\n\n if prev == i {\n\n streak += 1;\n\n } else {\n\n if streak == 1 {\n\n return Reps::InternalRep;\n\n }\n\n streak = 0;\n\n }\n\n prev = i;\n\n }\n\n if streak == 1 {\n\n Reps::RepAtEnd\n\n } else if streak == 0 {\n\n Reps::NoRep\n\n } else {\n\n Reps::Triple\n\n }\n\n}\n\n\n", "file_path": "day4b/src/main.rs", "rank": 68, "score": 40333.561242339434 }, { "content": "fn repeating(digits: &Vec<u8>) -> bool {\n\n let mut prev = digits[0];\n\n for &i in digits.iter().skip(1).take(digits.len() - 2) {\n\n if prev == i {\n\n return true;\n\n }\n\n prev = i;\n\n }\n\n false\n\n}\n\n\n", "file_path": "day4a/src/main.rs", "rank": 69, "score": 40333.561242339434 }, { "content": "fn gcd(a: i32, b: i32) -> i32 {\n\n if b == 0 {\n\n a\n\n } else {\n\n gcd(b, a % b)\n\n }\n\n}\n\n\n", "file_path": "day10a/src/main.rs", "rank": 70, "score": 40287.26097485754 }, { "content": "fn gcd(a: i64, b: i64) -> i64 {\n\n if b == 0 {\n\n a\n\n } else {\n\n gcd(b, a % b)\n\n }\n\n}\n\n\n", "file_path": "day12b/src/main.rs", "rank": 71, "score": 40287.26097485754 }, { "content": "fn lcm(a: i64, b: i64) -> i64 {\n\n (a * b).abs() / gcd(a, b)\n\n}\n\n\n", "file_path": "day12b/src/main.rs", "rank": 72, "score": 40287.26097485754 }, { "content": "fn gcd(a: i32, b: i32) -> i32 {\n\n if b == 0 {\n\n a\n\n } else {\n\n gcd(b, a % b)\n\n }\n\n}\n\n\n", "file_path": "day10b/src/main.rs", "rank": 73, "score": 40287.26097485754 }, { "content": "fn draw_score(window: &Window, score: i64) {\n\n window.mvprintw(0, 0, &format!(\" \"));\n\n window.mvprintw(0, 0, &format!(\"{}\", score));\n\n window.refresh();\n\n}\n\n\n", "file_path": "day13b/src/main.rs", "rank": 74, "score": 39512.71724953406 }, { "content": "fn construct_phases(selection: &[u8; 4]) -> [u8; 5] {\n\n let mut phases = vec![0, 1, 2, 3, 4];\n\n let mut result = [0; 5];\n\n for (n, i) in selection.iter().map(|&x| phases.remove(x as usize)).enumerate() {\n\n result[n] = i;\n\n }\n\n result[4] = phases[0];\n\n result\n\n}\n\n\n", "file_path": "day7a/src/main.rs", "rank": 75, "score": 39409.47106929697 }, { "content": "fn construct_phases(selection: &[u8; 4]) -> [u8; 5] {\n\n let mut phases = vec![5, 6, 7, 8, 9];\n\n let mut result = [0; 5];\n\n for (n, i) in selection.iter().map(|&x| phases.remove(x as usize)).enumerate() {\n\n result[n] = i;\n\n }\n\n result[4] = phases[0];\n\n result\n\n}\n\n\n", "file_path": "day7b/src/main.rs", "rank": 76, "score": 39409.47106929697 }, { "content": "fn pow_mod(base: i128, exp: u64, m: i128) -> i128 {\n\n if exp == 0 {\n\n 1\n\n } else {\n\n let t = pow_mod(base, exp / 2, m);\n\n (if exp % 2 == 0 {\n\n t * t\n\n } else {\n\n (t * t) % m * base\n\n }) % m\n\n }\n\n}\n\n\n", "file_path": "day22b/src/main.rs", "rank": 77, "score": 35552.386568028676 }, { "content": "fn mod_div(numerator: i128, denominator: i128, m: i128) -> i128 {\n\n (numerator.rem_euclid(m) * pow_mod(denominator.rem_euclid(m), (m - 2) as u64, m)).rem_euclid(m)\n\n}\n\n\n", "file_path": "day22b/src/main.rs", "rank": 78, "score": 35552.386568028676 }, { "content": "fn point_pulled(initial_memory: Vec<i64>, x: i64, y: i64) -> bool {\n\n let mut computer = intcode::IntCode::new(initial_memory);\n\n computer.put_input(x);\n\n computer.put_input(y);\n\n computer.run().unwrap() != 0\n\n}\n\n\n", "file_path": "day19a/src/main.rs", "rank": 79, "score": 34220.68747945812 }, { "content": "fn point_pulled(initial_memory: Vec<i64>, x: i64, y: i64) -> bool {\n\n let mut computer = intcode::IntCode::new(initial_memory);\n\n computer.put_input(x);\n\n computer.put_input(y);\n\n computer.run().unwrap() != 0\n\n}\n\n\n", "file_path": "day19b/src/main.rs", "rank": 80, "score": 34220.68747945812 }, { "content": "fn calc_ore_needed(reactions: &HashMap<&str, Reaction>, fuel_count: i64) -> i64 {\n\n let mut ore_needed = 0;\n\n let mut needed = vec![(fuel_count, \"FUEL\")];\n\n let mut surplus = HashMap::new();\n\n\n\n while let Some((needed_count, needed_resource)) = needed.pop() {\n\n let resource_surplus = surplus.remove(needed_resource).unwrap_or_default();\n\n let needed_count = needed_count - resource_surplus;\n\n let reaction = &reactions[needed_resource];\n\n\n\n let mut needed_reactions = needed_count / reaction.count;\n\n if needed_reactions < 0 {\n\n needed_reactions = 0;\n\n } else if needed_reactions * reaction.count < needed_count {\n\n needed_reactions += 1;\n\n }\n\n\n\n let resulting_count = needed_reactions * reaction.count;\n\n if resulting_count > needed_count {\n\n surplus.insert(needed_resource, resulting_count - needed_count);\n", "file_path": "day14b/src/main.rs", "rank": 81, "score": 33055.08608216655 }, { "content": " (y, x + 1),\n\n ] {\n\n if map[y][x] != Tile::Wall && !visited[level as usize][y][x] {\n\n grow_visited(&mut visited, level + 2);\n\n visited[level as usize][y][x] = true;\n\n queue.push((Reverse(new_dist), new_dist, level, y, x));\n\n }\n\n }\n\n\n\n if let Tile::Portal(y, x, inner) = map[y][x] {\n\n let new_level = if inner { level + 1 } else { level - 1 };\n\n\n\n if new_level >= 0 && !visited[new_level as usize][y][x] {\n\n grow_visited(&mut visited, new_level + 2);\n\n visited[new_level as usize][y][x] = true;\n\n queue.push((Reverse(new_dist), new_dist, new_level, y, x));\n\n }\n\n }\n\n }\n\n}\n", "file_path": "day20b/src/main.rs", "rank": 83, "score": 11.126793290761027 }, { "content": "\n\n let new_dist = dist + 1;\n\n\n\n for (y, x) in [\n\n (y - 1, x),\n\n (y + 1, x),\n\n (y, x - 1),\n\n (y, x + 1),\n\n ] {\n\n if map[y][x] != Tile::Wall && !visited[y][x] {\n\n visited[y][x] = true;\n\n queue.push((Reverse(new_dist), new_dist, y, x));\n\n }\n\n }\n\n\n\n if let Tile::Portal(y, x) = map[y][x] {\n\n if !visited[y][x] {\n\n visited[y][x] = true;\n\n queue.push((Reverse(new_dist), new_dist, y, x));\n\n }\n\n }\n\n }\n\n}\n", "file_path": "day20a/src/main.rs", "rank": 84, "score": 11.095297270802275 }, { "content": " map[other_y][other_x] = Tile::Portal(portal_y, portal_x);\n\n map[portal_y][portal_x] = Tile::Portal(other_y, other_x);\n\n } else {\n\n portals.insert(name, (portal_y, portal_x));\n\n }\n\n }\n\n }\n\n\n\n let (ent_y, ent_x) = portals[&('A', 'A')];\n\n let exit = portals[&('Z', 'Z')];\n\n let mut visited: Vec<Vec<_>> = map.iter().map(|line| line.iter().map(|_| false).collect()).collect();\n\n visited[ent_y][ent_x] = true;\n\n let mut queue = BinaryHeap::new();\n\n queue.push((Reverse(0), 0, ent_y, ent_x));\n\n\n\n while let Some((_, dist, y, x)) = queue.pop() {\n\n if (y, x) == exit {\n\n println!(\"{}\", dist);\n\n break;\n\n }\n", "file_path": "day20a/src/main.rs", "rank": 85, "score": 10.852827172454864 }, { "content": " x - 1\n\n }), (c, c_right)))\n\n } else {\n\n None\n\n } {\n\n if let Some(&(other_y, other_x)) = portals.get(&name) {\n\n map[other_y][other_x] = Tile::Portal(portal_y, portal_x, is_inner(other_y, other_x));\n\n map[portal_y][portal_x] = Tile::Portal(other_y, other_x, is_inner(portal_y, portal_x));\n\n } else {\n\n portals.insert(name, (portal_y, portal_x));\n\n }\n\n }\n\n }\n\n\n\n let grow_visited = |visited: &mut Vec<Vec<Vec<_>>>, size| {\n\n while visited.len() < size as usize {\n\n visited.push((0..map_h).map(|_| (0..map_w).map(|_| false).collect()).collect());\n\n }\n\n };\n\n\n", "file_path": "day20b/src/main.rs", "rank": 86, "score": 9.261950029690372 }, { "content": " let (ent_y, ent_x) = portals[&('A', 'A')];\n\n let exit = portals[&('Z', 'Z')];\n\n let mut visited = Vec::new();\n\n grow_visited(&mut visited, 2);\n\n visited[0][ent_y][ent_x] = true;\n\n let mut queue = BinaryHeap::new();\n\n queue.push((Reverse(0), 0, 0, ent_y, ent_x));\n\n\n\n while let Some((_, dist, level, y, x)) = queue.pop() {\n\n if (y, x) == exit && level == 0 {\n\n println!(\"{}\", dist);\n\n break;\n\n }\n\n\n\n let new_dist = dist + 1;\n\n\n\n for (y, x) in [\n\n (y - 1, x),\n\n (y + 1, x),\n\n (y, x - 1),\n", "file_path": "day20b/src/main.rs", "rank": 87, "score": 8.918185120125234 }, { "content": " Tile::Door(door) => {\n\n needed_keys |= 1 << door;\n\n }\n\n\n\n Tile::Wall => { continue; }\n\n Tile::Floor => {}\n\n Tile::Key(key) => {\n\n shortest_paths[key as usize].push((cost, needed_keys));\n\n needed_keys |= 1 << key;\n\n }\n\n }\n\n\n\n for (dy, dx) in &[\n\n (-1, 0),\n\n (1, 0),\n\n (0, -1),\n\n (0, 1),\n\n ] {\n\n let (y, x) = (y + dy, x + dx);\n\n queue.push((Reverse(cost + 1), needed_keys, cost + 1, (y, x)));\n\n }\n\n }\n\n\n\n shortest_paths\n\n}\n\n\n", "file_path": "day18a/src/main.rs", "rank": 89, "score": 8.696372229564613 }, { "content": " next.clear();\n\n next.push_back((x, y, 0, computer));\n\n std::mem::swap(&mut visited, &mut walls);\n\n walls.clear();\n\n visited.insert((x, y));\n\n continue 'outer;\n\n }\n\n _ => panic!(),\n\n }\n\n } else {\n\n panic!();\n\n }\n\n }\n\n }\n\n\n\n println!(\"{}\", last_dist);\n\n}\n", "file_path": "day15b/src/main.rs", "rank": 90, "score": 8.682887843447999 }, { "content": " let ord = c as usize;\n\n if c.is_ascii_uppercase() {\n\n let index = ord - 'A' as usize;\n\n doors[index] = (y as i8, x as i8);\n\n Tile::Door(index)\n\n } else {\n\n let index = ord - 'a' as usize;\n\n keys[index] = (y as i8, x as i8);\n\n Tile::Key(index)\n\n }\n\n }\n\n }\n\n }).collect()\n\n }).collect();\n\n\n\n keys.push(entrance);\n\n\n\n let costs_from_key: Vec<_> = keys.iter().map(|&pos| {\n\n get_shortest_paths(&map, pos)\n\n }).collect();\n", "file_path": "day18a/src/main.rs", "rank": 92, "score": 8.506199998929482 }, { "content": "\n\n let mut queue = BinaryHeap::new();\n\n queue.push((Reverse(0), 0, 26, 0));\n\n\n\n\n\n let mut visited = HashSet::new();\n\n\n\n while let Some((_, cost, pos, keys)) = queue.pop() {\n\n if visited.contains(&(pos, keys)) {\n\n continue;\n\n }\n\n visited.insert((pos, keys));\n\n\n\n if keys == 0x3FFFFFF {\n\n println!(\"{}\", cost);\n\n break;\n\n }\n\n\n\n queue.extend(costs_from_key[pos].iter().enumerate().filter_map(|(\n\n key, possibilities,\n", "file_path": "day18a/src/main.rs", "rank": 93, "score": 8.278260209545524 }, { "content": "\n\n let mut portals = HashMap::<_, (usize, _)>::new();\n\n\n\n for (&(y, x), &c) in &letters {\n\n if let Some(((portal_y, portal_x), name)) = if let Some(&c_below) = letters.get(&(y + 1, x)) {\n\n Some(((if y == 0 || map[y - 1][x] == Tile::Wall {\n\n y + 2\n\n } else {\n\n y - 1\n\n }, x), (c, c_below)))\n\n } else if let Some(&c_right) = letters.get(&(y, x + 1)) {\n\n Some(((y, if x == 0 || map[y][x - 1] == Tile::Wall {\n\n x + 2\n\n } else {\n\n x - 1\n\n }), (c, c_right)))\n\n } else {\n\n None\n\n } {\n\n if let Some(&(other_y, other_x)) = portals.get(&name) {\n", "file_path": "day20a/src/main.rs", "rank": 94, "score": 7.696665398242461 }, { "content": "\n\n let mut portals = HashMap::<_, (usize, _)>::new();\n\n\n\n let map_h = map.len();\n\n let map_w = map[0].len();\n\n let is_inner = |y, x| {\n\n y > 2 && x > 2 && y < map_h - 3 && x < map_w - 3\n\n };\n\n\n\n for (&(y, x), &c) in &letters {\n\n if let Some(((portal_y, portal_x), name)) = if let Some(&c_below) = letters.get(&(y + 1, x)) {\n\n Some(((if y == 0 || map[y - 1][x] == Tile::Wall {\n\n y + 2\n\n } else {\n\n y - 1\n\n }, x), (c, c_below)))\n\n } else if let Some(&c_right) = letters.get(&(y, x + 1)) {\n\n Some(((y, if x == 0 || map[y][x - 1] == Tile::Wall {\n\n x + 2\n\n } else {\n", "file_path": "day20b/src/main.rs", "rank": 95, "score": 7.315112350911443 }, { "content": "use std::collections::HashMap;\n\nuse std::collections::HashSet;\n\nuse std::io;\n\nuse std::io::Read;\n\n\n", "file_path": "day6a/src/main.rs", "rank": 96, "score": 6.875169684900277 }, { "content": "use std::collections::HashMap;\n\nuse std::collections::HashSet;\n\nuse std::io;\n\nuse std::io::Read;\n\n\n", "file_path": "day6b/src/main.rs", "rank": 97, "score": 6.875169684900277 }, { "content": " (0, -1, 1),\n\n ] {\n\n let (x, y) = (x + dx, y + dy);\n\n if visited.contains(&(x, y)) {\n\n continue;\n\n }\n\n visited.insert((x, y));\n\n\n\n let mut computer = computer.clone();\n\n computer.put_input(dir);\n\n\n\n if let Some(output) = computer.run() {\n\n match output {\n\n 0 => {\n\n walls.insert((x, y));\n\n }\n\n 1 => {\n\n next.push_back((x, y, dist + 1, computer));\n\n }\n\n 2 => {\n", "file_path": "day15b/src/main.rs", "rank": 98, "score": 6.792061263444955 }, { "content": "use std::collections::HashMap;\n\nuse std::io::Read;\n\n\n", "file_path": "day14a/src/main.rs", "rank": 99, "score": 6.314341648371697 } ]
Rust
src/server/main.rs
brennie/chat
dfeec8c61f6f7f8a603241e055a4e214195cc2b0
extern crate chat_common; extern crate failure; extern crate futures; extern crate serde; #[macro_use] extern crate slog; extern crate slog_async; extern crate slog_term; extern crate structopt; #[macro_use] extern crate structopt_derive; extern crate tokio; extern crate tokio_io; use std::net::IpAddr; use futures::future; use slog::Drain; use structopt::StructOpt; use tokio::prelude::*; use chat_common::{join_stream, messages, split_stream, Recv, Send}; #[derive(Debug, StructOpt)] #[structopt(name = "server")] struct Options { #[structopt( short = "h", long = "host", default_value = "127.0.0.1", env = "CHAT_HOST", parse(try_from_str) )] host: IpAddr, #[structopt(short = "p", long = "port", default_value = "9999", env = "CHAT_PORT")] port: u16, #[structopt(short = "v", parse(from_occurrences))] verbosity: u8, } fn build_drain<D>( decorator: D, min_level: slog::Level, max_level: slog::Level, ) -> impl Drain<Ok = Option<()>, Err = slog::Never> where D: slog_term::Decorator, { slog_term::FullFormat::new(decorator) .use_original_order() .use_utc_timestamp() .build() .fuse() .filter(move |record: &slog::Record| { min_level <= record.level() && record.level() <= max_level }) } fn main() { let exit_code = { let options = Options::from_args(); let log_level = match options.verbosity { 0 => slog::Level::Info, 1 => slog::Level::Debug, _ => slog::Level::Trace, }; let stderr = build_drain( slog_term::TermDecorator::new().stderr().build(), slog::Level::Critical, slog::Level::Error, ); let stdout = build_drain( slog_term::TermDecorator::new().stdout().build(), slog::Level::Warning, log_level, ); let drain = slog::Duplicate::new(stdout, stderr).fuse(); let drain = slog_async::Async::new(drain).build().fuse(); let log = slog::Logger::root(drain, o!()); info!(log, "Started server"; "options" => ?options, "version" => env!("CARGO_PKG_VERSION")); match run_server(log.clone(), options) { Ok(_) => 0, Err(e) => { crit!(log, "An nexpected error occurred"; "error" => %e); 1 } } }; std::process::exit(exit_code); } fn run_server(log: slog::Logger, options: Options) -> Result<(), failure::Error> { use std::net::SocketAddr; let addr = SocketAddr::new(options.host, options.port); let server = tokio::net::TcpListener::bind(&addr)? .incoming() .for_each({ let log = log.clone(); move |conn| { let peer_addr = conn.peer_addr() .expect("Could not retrieve remote address") .clone(); let peer_addr = format!("{}", peer_addr); let log = log.new(o!("peer" => peer_addr)); info!(log, "New connection."); tokio::spawn(handle_conn(log, conn)); Ok(()) } }) .map_err({ let log = log.clone(); move |e| { error!(log, "Connection error."; "error" => %e); () } }); tokio::run(server); Ok(()) } fn handle_conn( log: slog::Logger, stream: tokio::net::TcpStream, ) -> impl Future<Item = (), Error = ()> { use chat_common::messages::{client::*, handshake, server::*}; future::ok(split_stream::<handshake::AuthRequest, handshake::AuthResponse>(stream)) .and_then({ let log = log.clone(); move |(recv, send)| { do_handshake(log.clone(), recv, send) .map_err({ let log = log.clone(); move |err| { error!(log, "An error occurred during handshaking: {}", err); } }) .and_then(|(log, recv, send)| { let stream = join_stream(recv, send).unwrap(); let (recv, send) = split_stream::<ClientMessageKind, ServerMessage>(stream); future::ok((log, recv, send)) }) } }) .and_then(move |(log, recv, send)| { send.send(ServerMessage::FromServer(ServerMessageKind::Greeting( GreetingMessage { motd: "Hello, world!".into(), }, ))).map_err(|err| failure::Error::from(err)) .and_then({ let log = log.clone(); move |_| read_loop(log, recv) }) .map_err({ let log = log.clone(); move |err| { error!(log, "An unexpected error occurred: {}", err); } }) }) .and_then(|_| future::ok(())) } fn do_handshake( log: slog::Logger, recv: Recv<messages::handshake::AuthRequest>, send: Send<messages::handshake::AuthResponse>, ) -> impl Future< Item = ( slog::Logger, Recv<messages::handshake::AuthRequest>, Send<messages::handshake::AuthResponse>, ), Error = failure::Error, > { use messages::handshake::{AuthRequest, AuthResponse}; recv.into_future() .map_err(|(err, _)| err.into()) .and_then(move |(maybe_msg, recv)| match maybe_msg { Some(AuthRequest::AuthRequest { username }) => future::ok(((send, recv), username)), None => future::err(failure::err_msg("Connection closed unexpectedly.")), }) .and_then({ let log = log.clone(); move |((send, recv), username)| { let log = log.new(o!("username" => username.clone())); send.send(AuthResponse::AuthResponse { result: Ok(username.clone()), }).map_err(|err| err.into()) .and_then(|send| { info!(log, "Client authenticated."); future::ok((log, recv, send)) }) } }) } fn read_loop( log: slog::Logger, recv: Recv<messages::client::ClientMessageKind>, ) -> impl Future<Item = (), Error = failure::Error> { use messages::client::{ClientMessageKind::*, *}; future::loop_fn(recv.into_future(), { let log = log.clone(); move |stream_fut| { stream_fut .map_err(|(err, _)| err.into()) .and_then(|(maybe_msg, stream)| match maybe_msg { Some(msg) => future::ok((msg, stream)), None => future::err(failure::err_msg("Client unexpectedly closed connection.")), }) .and_then({ let log = log.clone(); move |(msg, _stream)| match msg { Goodbye(GoodbyeMessage { reason }) => { info!(log, "Client disconnected."; "reason" => ?reason); Ok(future::Loop::Break(())) } } }) } }) }
extern crate chat_common; extern crate failure; extern crate futures; extern crate serde; #[macro_use] extern crate slog; extern crate slog_async; extern crate slog_term; extern crate structopt; #[macro_use] extern crate structopt_derive; extern crate tokio; extern crate tokio_io; use std::net::IpAddr; use futures::future; use slog::Drain; use structopt::StructOpt; use tokio::prelude::*; use chat_common::{join_stream, messages, split_stream, Recv, Send}; #[derive(Debug, StructOpt)] #[structopt(name = "server")] struct Options { #[structopt( short = "h", long = "host", default_value = "127.0.0.1", env = "CHAT_HOST", parse(try_from_str) )] host: IpAddr, #[structopt(short = "p", long = "port", default_value = "9999", env = "CHAT_PORT")] port: u16, #[structopt(short = "v", parse(from_occurrences))] verbosity: u8, } fn build_drain<D>( decorator: D, min_level: slog::Level, max_level: slog::Level, ) -> impl Drain<Ok = Option<()>, Err = slog::Never> where D: slog_term::Decorator, { slog_term::FullFormat::new(decorator) .use_original_order() .use_utc_timestamp() .build() .fuse() .filter(move |record: &slog::Record| { min_level <= record.level() && record.level() <= max_level }) } fn main() { let exit_code = { let options = Options::from_args(); let log_level = match options.verbosity { 0 => slog::Level::Info, 1 => slog::Level::Debug, _ => slog::Level::Trace, }; let stderr = build_drain( slog_term::TermDecorator::new().stderr().build(), slog::Level::Critical, slog::Level::Error, ); let stdout = build_drain( slog_term::TermDecorator::new().stdout().build(), slog::Level::Warning, log_level, ); let drain = slog::Duplicate::new(stdout, stderr).fuse(); let drain = slog_async::Async::new(drain).build().fuse(); let log = slog::Logger::root(drain, o!()); info!(log, "Started server"; "options" => ?options, "version" => env!("CARGO_PKG_VERSION")); match run_server(log.clone(), options) { Ok(_) => 0, Err(e) => { crit!(log, "An nexpected error occurred"; "error" => %e); 1 } } }; std::process::exit(exit_code); } fn run_server(log: slog::Logger, options: Options) -> Result<(), failure::Error> { use std::net::SocketAddr; let addr = SocketAddr::new(options.host, options.port); let server = tokio::net::TcpListener::bind(&addr)? .incoming() .for_each({ let log = log.clone(); move |conn| { let peer_addr = conn.peer_addr() .expect("Could not retrieve remote address") .clone(); let peer_addr = format!("{}", peer_addr); let log = log.new(o!("peer" => peer_addr)); info!(log, "New connection."); tokio::spawn(handle_conn(log, conn)); Ok(()) } }) .map_err({ let log = log.clone(); move |e| { error!(log, "Connection error."; "error" => %e); () } }); tokio::run(server); Ok(()) } fn handle_conn( log: slog::Logger, stream: tokio::net::TcpStream, ) -> impl Future<Item = (), Error = ()> { use chat_common::messages::{client::*, handshake, server::*}; future::ok(split_stream::<handshake::AuthRequest, handshake::AuthResponse>(stream)) .and_then({ let log = log.clone(); move |(recv, send)| { do_handshake(log.clone(), recv, send) .map_err({ let log = log.clone(); move |err| { error!(log, "An error occurred during handshaking: {}", err); } }) .and_then(|(log, recv, send)| { let stream = join_stream(recv, send).unwrap(); let (recv, send) = split_stream::<ClientMessageKind, ServerMessage>(stream); future::ok((log, recv, send)) }) } }) .and_then(move |(log, recv, send)| { send.send(ServerMessage::FromServer(ServerMessageKind::Greeting( GreetingMessage { motd: "Hello, world!".into(), }, ))).map_err(|err| failure::Error::from(err)) .and_then({ let log = log.clone(); move |_| read_loop(log, recv) }) .map_err({ let log = log.clone(); move |err| { error!(log, "An unexpected error occurred: {}", err); } }) }) .and_then(|_| future::ok(())) } fn do_handshake( log: slog::Logger, recv: Recv<messages::handshake::AuthRequest>, send: Send<messages::handshake::AuthResponse>, ) -> impl Future< Item = ( slog::Logger, Recv<messages::handshake::AuthRequest>, Send<messages::handshake::AuthResponse>, ), Error = failure::Error, > { use messages::handshake::{AuthRequest, AuthResponse}; recv.into_future() .map_err(|(err, _)| err.into()) .and_then(move |(maybe_msg, recv)| match maybe_msg { Some(AuthRequest::AuthRequest { username }) => future::ok(((send, recv), username)), None => future::err(failure::err_msg("Connection closed unexpectedly.")), }) .and_then({ let log = log.clone(); move |((send, recv), username)| { let log = log.new(o!("username" => username.clone())); send.send(AuthResponse::AuthResponse { result: Ok(username.clone()), }).map_err(|err| err.into()) .and_then(|send| { info!(log, "Client authenticated."); future::ok((log, recv, send)) }) } }) } fn read_loop( log: slog::Logger, recv: Recv<messages::client::ClientMessageKind>, ) -> impl Future<Item = (), Error = failure::Error> { use messages::client::{ClientMessageKind::*, *}; future::loop_fn(recv.into_future(), { let log = log.clone(); move |stream_fut| { stream_fut .map_err(|(err, _)| err.into()) .and_then(|(maybe_msg, stream)|
) .and_then({ let log = log.clone(); move |(msg, _stream)| match msg { Goodbye(GoodbyeMessage { reason }) => { info!(log, "Client disconnected."; "reason" => ?reason); Ok(future::Loop::Break(())) } } }) } }) }
match maybe_msg { Some(msg) => future::ok((msg, stream)), None => future::err(failure::err_msg("Client unexpectedly closed connection.")), }
if_condition
[ { "content": "fn read_loop(recv: Recv<messages::server::ServerMessage>) -> impl Future<Item = (), Error = failure::Error>\n\n{\n\n use messages::server::{*, ServerMessage::*, ServerMessageKind::*};\n\n\n\n recv.map_err(|err| err.into())\n\n .for_each(|msg| {\n\n match msg {\n\n _m @ FromClient { .. } => unimplemented!(),\n\n FromServer(msg) => match msg {\n\n Greeting(GreetingMessage { ref motd }) => {\n\n println!(\"MOTD: {}\", motd);\n\n }\n\n },\n\n };\n\n\n\n future::ok(())\n\n })\n\n .map(|_| ())\n\n}\n", "file_path": "src/client/main.rs", "rank": 0, "score": 179069.64943603292 }, { "content": "pub fn split_stream<RecvM, SendM>(stream: TcpStream) -> (Recv<RecvM>, Send<SendM>)\n\nwhere\n\n for<'a> RecvM: serde::Deserialize<'a>,\n\n SendM: serde::Serialize,\n\n{\n\n let (send, recv) = Framed::new(stream).split();\n\n\n\n let recv = ReadJson::<_, RecvM>::new(recv);\n\n let send = WriteJson::<_, SendM>::new(send);\n\n\n\n (recv, send)\n\n}\n\n\n", "file_path": "src/common/lib.rs", "rank": 3, "score": 97311.91749210327 }, { "content": "/// A simple chat server.\n\nstruct Options {\n\n /// The IP address to listen on.\n\n #[structopt(name = \"HOST\", parse(try_from_str))]\n\n host: IpAddr,\n\n\n\n /// The port to bind to.\n\n #[structopt(short = \"p\", long = \"port\", default_value = \"9999\")]\n\n port: u16,\n\n\n\n /// The username to connect with.\n\n username: String,\n\n}\n\n\n", "file_path": "src/client/main.rs", "rank": 5, "score": 84160.53601467627 }, { "content": "fn main() {\n\n let options = Options::from_args();\n\n\n\n let addr = SocketAddr::new(options.host, options.port);\n\n let client = TcpStream::connect(&addr)\n\n .map_err(|err| failure::Error::from(err))\n\n .and_then(|stream| {\n\n use messages::handshake::{AuthRequest, AuthResponse};\n\n\n\n let (recv, send) = split_stream::<AuthResponse, AuthRequest>(stream);\n\n\n\n Ok((recv, send))\n\n })\n\n .and_then(|(recv, send)| do_handshake(options.username, recv, send))\n\n .and_then(|(recv, send, username)| {\n\n use messages::{client::ClientMessageKind, server::ServerMessage};\n\n\n\n let stream = join_stream(recv, send).unwrap();\n\n let (recv, send) = split_stream::<ServerMessage, ClientMessageKind>(stream);\n\n\n", "file_path": "src/client/main.rs", "rank": 7, "score": 80439.67465758884 }, { "content": "fn do_handshake(\n\n username: String,\n\n recv: Recv<messages::handshake::AuthResponse>,\n\n send: Send<messages::handshake::AuthRequest>,\n\n) -> impl Future<\n\n Item = (\n\n Recv<messages::handshake::AuthResponse>,\n\n Send<messages::handshake::AuthRequest>,\n\n String,\n\n ),\n\n Error = failure::Error,\n\n> {\n\n use messages::handshake::{AuthRequest, AuthResponse};\n\n\n\n send.send(AuthRequest::AuthRequest {\n\n username: username,\n\n }).map_err(|err| err.into())\n\n .and_then(move |send| {\n\n recv.into_future()\n\n .map_err(|(err, _)| err.into())\n", "file_path": "src/client/main.rs", "rank": 9, "score": 78291.5374429679 }, { "content": "pub fn join_stream<RecvM, SendM>(\n\n recv: Recv<RecvM>,\n\n send: Send<SendM>,\n\n) -> Result<TcpStream, failure::Error> {\n\n recv.into_inner()\n\n .reunite(send.into_inner())\n\n .map_err(Into::into)\n\n .map(Framed::into_inner)\n\n}\n", "file_path": "src/common/lib.rs", "rank": 12, "score": 62355.06318528976 }, { "content": " .and_then(|(maybe_msg, recv)| match maybe_msg {\n\n Some(AuthResponse::AuthResponse { result }) => match result {\n\n Ok(username) => future::ok((recv, send, username)),\n\n Err(err) => future::err(failure::err_msg(err)),\n\n },\n\n\n\n None => future::err(failure::err_msg(\"Connection closed unexpectedly.\")),\n\n })\n\n })\n\n}\n\n\n", "file_path": "src/client/main.rs", "rank": 13, "score": 34042.28492072266 }, { "content": " future::ok((recv, send, username))\n\n })\n\n .and_then(|(recv, send, username)| {\n\n println!(\"authenticated as {}\", username);\n\n let reader = read_loop(recv);\n\n\n\n let writer = Delay::new(Instant::now() + Duration::from_secs(5))\n\n .map_err(|err| failure::Error::from(err))\n\n .and_then(|_| {\n\n use messages::client::{*, ClientMessageKind::*};\n\n\n\n send.send(Goodbye(GoodbyeMessage {\n\n reason: Some(\"timed out\".into()),\n\n })).map_err(|e| e.into())\n\n })\n\n .map(|_| ());\n\n\n\n reader.select(writer).map(|_| ()).map_err(|(err, _)| err)\n\n })\n\n .map_err(|err| {\n\n eprintln!(\"Error: {:?}\", err);\n\n ()\n\n });\n\n\n\n tokio::run(client);\n\n}\n\n\n", "file_path": "src/client/main.rs", "rank": 14, "score": 34039.7154294367 }, { "content": "extern crate chat_common;\n\nextern crate failure;\n\nextern crate futures;\n\nextern crate structopt;\n\n#[macro_use]\n\nextern crate structopt_derive;\n\nextern crate tokio;\n\nextern crate tokio_io;\n\nextern crate tokio_serde_json;\n\n\n\nuse std::net::{IpAddr, SocketAddr};\n\nuse std::time::{Duration, Instant};\n\n\n\nuse structopt::StructOpt;\n\nuse tokio::net::TcpStream;\n\nuse tokio::prelude::*;\n\nuse tokio::timer::Delay;\n\n\n\nuse chat_common::{join_stream, messages, split_stream, Recv, Send};\n\n\n\n#[derive(Debug, StructOpt)]\n\n#[structopt(name = \"server\")]\n\n/// A simple chat server.\n", "file_path": "src/client/main.rs", "rank": 15, "score": 34039.67890349075 }, { "content": "pub enum AuthResponse {\n\n AuthResponse {\n\n /// Whether or not authentication was successful.\n\n ///\n\n /// The `Ok` is the username.\n\n /// The `Err` is a string representation of why the authentication failed.\n\n #[serde(flatten)]\n\n result: Result<String, String>,\n\n },\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use serde_json;\n\n\n\n #[test]\n\n fn test_serde_auth_request_message() {\n\n let msg = AuthRequest::AuthRequest {\n", "file_path": "src/common/messages/handshake.rs", "rank": 16, "score": 34020.71273171546 }, { "content": " (\n\n AuthResponse::AuthResponse {\n\n result: Ok(\"wiz\".into()),\n\n },\n\n json!({\n\n \"kind\": \"auth_response\",\n\n \"Ok\": \"wiz\",\n\n }),\n\n ),\n\n (\n\n AuthResponse::AuthResponse {\n\n result: Err(\"Invalid username.\".into()),\n\n },\n\n json!({\n\n \"kind\": \"auth_response\",\n\n \"Err\": \"Invalid username.\",\n\n }),\n\n ),\n\n ];\n\n\n", "file_path": "src/common/messages/handshake.rs", "rank": 17, "score": 34017.46021014648 }, { "content": "/// An authentication request.\n\n///\n\n/// This currently exists as an enum due to an [issue in serde][1].\n\n///\n\n/// [1]: https://github.com/serde-rs/serde/issues/271\n\n#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]\n\n#[serde(rename_all = \"snake_case\", tag = \"kind\")]\n\npub enum AuthRequest {\n\n AuthRequest { username: String },\n\n}\n\n\n\n/// An authentication response.\n\n///\n\n/// This is a response to [`AuthRequest`].\n\n///\n\n/// This currently exists as an enum due to an [issue in serde][1].\n\n///\n\n/// [1]: https://github.com/serde-rs/serde/issues/271\n\n#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]\n\n#[serde(rename_all = \"snake_case\", tag = \"kind\")]\n", "file_path": "src/common/messages/handshake.rs", "rank": 18, "score": 34015.5190995142 }, { "content": " username: \"wiz\".into(),\n\n };\n\n\n\n let expected_value = json!({\n\n \"kind\": \"auth_request\",\n\n \"username\": \"wiz\",\n\n });\n\n\n\n let serialized = serde_json::to_string(&msg).unwrap();\n\n\n\n assert_eq!(serde_json::to_value(&msg).unwrap(), expected_value);\n\n assert_eq!(\n\n serde_json::from_str::<AuthRequest>(&serialized).unwrap(),\n\n msg\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_serde_auth_response_message() {\n\n let tests = vec![\n", "file_path": "src/common/messages/handshake.rs", "rank": 19, "score": 34014.48688978151 }, { "content": " for (msg, expected_value) in tests {\n\n let serialized = serde_json::to_string(&msg).unwrap();\n\n assert_eq!(serde_json::to_value(&msg).unwrap(), expected_value);\n\n assert_eq!(\n\n serde_json::from_str::<AuthResponse>(&serialized).unwrap(),\n\n msg\n\n );\n\n }\n\n }\n\n}\n", "file_path": "src/common/messages/handshake.rs", "rank": 20, "score": 34012.06424646535 }, { "content": "#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]\n\n#[serde(rename_all = \"snake_case\", tag = \"kind\")]\n\npub enum ClientMessageKind {\n\n Goodbye(GoodbyeMessage),\n\n}\n\n\n\n\n\n#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]\n\npub struct GoodbyeMessage {\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub reason: Option<String>,\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use serde_json;\n\n\n\n use super::{*, ClientMessageKind::*};\n\n\n\n #[test]\n", "file_path": "src/common/messages/client.rs", "rank": 21, "score": 33469.62168337847 }, { "content": " let serialized = serde_json::to_string(&msg).unwrap();\n\n\n\n assert_eq!(serde_json::to_value(&msg).unwrap(), expected_value);\n\n assert_eq!(\n\n serde_json::from_str::<ClientMessageKind>(&serialized).unwrap(),\n\n msg\n\n );\n\n }\n\n }\n\n}", "file_path": "src/common/messages/client.rs", "rank": 22, "score": 33464.341429704844 }, { "content": " fn test_serde() {\n\n let tests = vec![\n\n (\n\n Goodbye(GoodbyeMessage { reason: None }),\n\n json!({\n\n \"kind\": \"goodbye\",\n\n }),\n\n ),\n\n (\n\n Goodbye(GoodbyeMessage {\n\n reason: Some(\"user quit\".into()),\n\n }),\n\n json!({\n\n \"kind\": \"goodbye\",\n\n \"reason\": \"user quit\",\n\n }),\n\n ),\n\n ];\n\n\n\n for (msg, expected_value) in tests {\n", "file_path": "src/common/messages/client.rs", "rank": 23, "score": 33464.004836033746 }, { "content": " FromServer(ServerMessageKind::Greeting(GreetingMessage {\n\n motd: \"Hello, world!\".into(),\n\n })),\n\n json!({\n\n \"kind\": \"greeting\",\n\n \"motd\": \"Hello, world!\",\n\n }),\n\n ),\n\n (\n\n FromClient {\n\n source: \"user\".into(),\n\n content: ClientMessageKind::Goodbye(GoodbyeMessage { reason: None }),\n\n },\n\n json!({\n\n \"kind\": \"goodbye\",\n\n \"source\": \"user\",\n\n }),\n\n ),\n\n (\n\n FromClient {\n", "file_path": "src/common/messages/server.rs", "rank": 29, "score": 31907.918213838035 }, { "content": "pub enum ServerMessageKind {\n\n Greeting(GreetingMessage),\n\n}\n\n\n\n#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]\n\npub struct GreetingMessage {\n\n pub motd: String,\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{*, ServerMessage::*};\n\n use super::super::client::*;\n\n\n\n use serde_json;\n\n\n\n #[test]\n\n fn test_serde() {\n\n let tests = vec![\n\n (\n", "file_path": "src/common/messages/server.rs", "rank": 30, "score": 31907.401189476896 }, { "content": "use super::client::ClientMessageKind;\n\n\n\n#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]\n\n#[serde(untagged)]\n\npub enum ServerMessage {\n\n /// A message that is being forwarded by the server from another client.\n\n FromClient {\n\n /// The source of the message.\n\n source: String,\n\n\n\n /// The content of the message.\n\n #[serde(flatten)]\n\n content: ClientMessageKind,\n\n },\n\n\n\n FromServer(ServerMessageKind),\n\n}\n\n\n\n#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]\n\n#[serde(rename_all = \"snake_case\", tag = \"kind\")]\n", "file_path": "src/common/messages/server.rs", "rank": 31, "score": 31907.261480225963 }, { "content": " source: \"user\".into(),\n\n content: ClientMessageKind::Goodbye(GoodbyeMessage {\n\n reason: Some(\"Goodbye, world.\".into()),\n\n }),\n\n },\n\n json!({\n\n \"kind\": \"goodbye\",\n\n \"source\": \"user\",\n\n \"reason\": \"Goodbye, world.\",\n\n }),\n\n ),\n\n ];\n\n\n\n for (msg, expected_value) in tests {\n\n let serialized = serde_json::to_string(&msg).unwrap();\n\n\n\n assert_eq!(serde_json::to_value(&msg).unwrap(), expected_value);\n\n assert_eq!(\n\n serde_json::from_str::<ServerMessage>(&serialized).unwrap(),\n\n msg\n\n );\n\n }\n\n }\n\n}", "file_path": "src/common/messages/server.rs", "rank": 32, "score": 31902.59161787378 }, { "content": "pub mod handshake;\n\npub mod client;\n\npub mod server;", "file_path": "src/common/messages/mod.rs", "rank": 33, "score": 16318.016977708698 }, { "content": "extern crate failure;\n\nextern crate futures;\n\nextern crate serde;\n\n#[macro_use]\n\nextern crate serde_derive;\n\n#[macro_use]\n\nextern crate serde_json;\n\nextern crate tokio;\n\nextern crate tokio_io;\n\nextern crate tokio_serde_json;\n\n\n\nuse futures::stream::{SplitSink, SplitStream, Stream};\n\nuse tokio::net::TcpStream;\n\nuse tokio_io::codec::length_delimited::Framed;\n\nuse tokio_serde_json::{ReadJson, WriteJson};\n\n\n\npub mod messages;\n\n\n\npub type Send<M> = WriteJson<SplitSink<Framed<TcpStream>>, M>;\n\npub type Recv<M> = ReadJson<SplitStream<Framed<TcpStream>>, M>;\n\n\n", "file_path": "src/common/lib.rs", "rank": 34, "score": 15.65005479873265 } ]
Rust
src/config.rs
OtaK/vivid
ecaf0c95e2334971e0acc05ea572bec488d85f6f
use crate::error::VividError; use winapi::{ shared::ntdef::NULL, um::{shellapi::ShellExecuteA, winuser::SW_SHOWNORMAL}, }; pub const DEFAULT_CONFIG_FILENAME: &str = "vivid.toml"; #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct VideoMode { pub width: u32, pub height: u32, pub freq: u32, } #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct Program { pub exe_name: String, pub vibrance: u8, pub fullscreen_only: Option<bool>, pub resolution: Option<VideoMode>, } #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct Config { desktop_vibrance: u8, resolution: Option<VideoMode>, program_settings: Vec<Program>, } impl Default for Config { fn default() -> Self { Self { desktop_vibrance: 50, program_settings: vec![], resolution: None, } } } impl Config { fn sample() -> crate::VividResult<Self> { let vibrance = unsafe { crate::GPU.as_ref()?.write().get_vibrance()? }; let mut default = Self::default(); default.desktop_vibrance = vibrance; default.program_settings.push(Program { exe_name: "sample_program.exe".into(), vibrance, fullscreen_only: Some(false), resolution: None, }); Ok(default) } fn config_path() -> crate::VividResult<std::path::PathBuf> { let mut path = std::env::current_exe()?; path.set_file_name(DEFAULT_CONFIG_FILENAME); Ok(path) } fn load_file(maybe_path: Option<String>) -> crate::VividResult<std::fs::File> { use std::io::Write as _; let path = maybe_path.map_or_else(|| Self::config_path(), |path| Ok(path.into()))?; let res = std::fs::OpenOptions::new() .write(true) .read(true) .create_new(true) .open(path.clone()); if let Ok(mut file) = res { write!(file, "{}", toml::to_string_pretty(&Self::sample()?)?)?; Ok(file) } else { let file = std::fs::OpenOptions::new() .write(true) .read(true) .truncate(false) .open(path)?; Ok(file) } } pub fn load(maybe_path: Option<String>) -> crate::VividResult<Self> { use std::io::Read as _; let mut file = Self::load_file(maybe_path)?; let mut file_contents = vec![]; file.read_to_end(&mut file_contents)?; toml::from_slice(&file_contents).map_err(Into::into) } pub fn edit() -> crate::VividResult<()> { let _ = Self::load_file(None)?; let file_path = std::ffi::CString::new(Self::config_path()?.to_str().unwrap().as_bytes()).unwrap(); let hwnd = unsafe { ShellExecuteA( NULL as _, NULL as _, file_path.as_ptr(), NULL as _, NULL as _, SW_SHOWNORMAL, ) }; if hwnd as u32 > 32 { Ok(()) } else { return Err(VividError::windows_error()); } } pub fn vibrance_for_program(&self, program_exe: &str) -> Option<(u8, bool)> { self.program_settings .iter() .find(|&program| program.exe_name == program_exe) .map(|program| { ( program.vibrance, program.fullscreen_only.unwrap_or_default(), ) }) } pub fn default_vibrance(&self) -> u8 { self.desktop_vibrance } }
use crate::error::VividError; use winapi::{ shared::ntdef::NULL, um::{shellapi::ShellExecuteA, winuser::SW_SHOWNORMAL}, }; pub const DEFAULT_CONFIG_FILENAME: &str = "vivid.toml"; #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct VideoMode { pub width: u32, pub height: u32, pub freq: u32, } #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct Program { pub exe_name: String, pub vibrance: u8, pub fullscreen_only: Option<bool>, pub resolution: Option<VideoMode>, } #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct Config { desktop_vibrance: u8, resolution: Option<VideoMode>, program_settings: Vec<Program>, } impl Default for Config { fn default() -> Self { Self { desktop_vibrance: 50, program_settings: vec![], resolution: None, } } } impl Config { fn sample() -> crate::VividResult<Self> { let vibrance = unsafe { crate::GPU.as_ref()?.write().get_vibrance()? }; let mut default = Self::default(); default.desktop_vibrance = vibrance; default.program_settings.push(Program { exe_name: "sample_program.exe".into(), vibrance, fullscreen_only: Some(false), resolution: None, }); Ok(default) } fn config_path() -> crate::VividResult<std::path::PathBuf> { let mut path = std::env::current_exe()?; path.set_file_name(DEFAULT_CONFIG_FILENAME); Ok(path) } fn load_file(maybe_path: Option<String>) -> crate::VividResult<std::fs::File> { use std::io::Write as _; let path = maybe_path.map_or_else(|| Self::config_path(), |path| Ok(path.into()))?; let res = std::fs::OpenOptions::new() .write(true) .read(true) .create_new(true) .open(path.clone()); if let Ok(mut file) = res { write!(file, "{}", toml::to_string_pretty(&Self::sample()?)?)?; Ok(file) } else { let file = std::fs::OpenOptions::new() .write(true) .read(true) .truncate(false) .open(path)?; Ok(file) } } pub fn load(maybe_path: Option<String>) -> crate::VividResult<Self> { use std::io::Read as _; let mut file = Self::load_file(maybe_path)?; let mut file_contents = vec![]; file.read_to_end(&mut file_contents)?; toml::from_slice(&file_contents).map_err(Into::into) } pub fn edit() -> crate::VividResult<()> { let _ = Self::load_file(None)?; let file_path = std::ffi::CString::new(Self::config_path()?.to_str().unwrap().as_bytes()).unwrap(); let hwnd = unsafe { ShellExecuteA( NULL as _, NULL as _,
; } } pub fn vibrance_for_program(&self, program_exe: &str) -> Option<(u8, bool)> { self.program_settings .iter() .find(|&program| program.exe_name == program_exe) .map(|program| { ( program.vibrance, program.fullscreen_only.unwrap_or_default(), ) }) } pub fn default_vibrance(&self) -> u8 { self.desktop_vibrance } }
file_path.as_ptr(), NULL as _, NULL as _, SW_SHOWNORMAL, ) }; if hwnd as u32 > 32 { Ok(()) } else { return Err(VividError::windows_error())
function_block-random_span
[ { "content": "#[inline(always)]\n\nfn dll_exists(path: *const winapi::ctypes::c_char) -> bool {\n\n let hwnd = unsafe {\n\n winapi::um::libloaderapi::LoadLibraryExA(\n\n path,\n\n winapi::shared::ntdef::NULL,\n\n winapi::um::libloaderapi::LOAD_LIBRARY_AS_DATAFILE\n\n | winapi::um::libloaderapi::LOAD_LIBRARY_AS_IMAGE_RESOURCE,\n\n )\n\n };\n\n if hwnd.is_null() {\n\n false\n\n } else {\n\n unsafe {\n\n winapi::um::libloaderapi::DisableThreadLibraryCalls(hwnd);\n\n winapi::um::libloaderapi::FreeLibrary(hwnd);\n\n }\n\n true\n\n }\n\n}\n\n\n", "file_path": "src/adapter/mod.rs", "rank": 0, "score": 88768.92954033597 }, { "content": "#[inline(always)]\n\npub fn read_message(msg: &mut winapi::um::winuser::MSG) -> VividResult<()> {\n\n let message_result =\n\n unsafe { winapi::um::winuser::GetMessageA(msg, winapi::shared::ntdef::NULL as _, 0, 0) };\n\n\n\n if message_result != 0 {\n\n return Err(VividError::message_loop_error());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n/// Processes win32 messages. Will return a boolean telling whether we should exit the message loop or not\n", "file_path": "src/w32_msgloop.rs", "rank": 1, "score": 77244.58287899065 }, { "content": "#[inline(always)]\n\npub fn process_message(msg: &winapi::um::winuser::MSG) -> bool {\n\n if msg.message == winapi::um::winuser::WM_QUIT {\n\n return true;\n\n }\n\n\n\n unsafe {\n\n winapi::um::winuser::TranslateMessage(msg);\n\n winapi::um::winuser::DispatchMessageW(msg);\n\n }\n\n\n\n false\n\n}\n", "file_path": "src/w32_msgloop.rs", "rank": 2, "score": 61241.45021766158 }, { "content": "pub fn register() -> crate::VividResult<()> {\n\n use std::os::windows::ffi::OsStrExt as _;\n\n use std::convert::TryInto as _;\n\n\n\n let hwnd = unsafe { GetActiveWindow() };\n\n\n\n let mut notify_icon_data = NOTIFYICONDATAW::default();\n\n notify_icon_data.hWnd = hwnd;\n\n // FIXME: exit code: 0xc0000138, STATUS_ORDINAL_NOT_FOUND\n\n // if unsafe { LoadIconMetric(\n\n // hwnd as _,\n\n // MAKEINTRESOURCEW(*IDI_APPLICATION),\n\n // LIM_SMALL.try_into().unwrap(),\n\n // &mut notify_icon_data.hIcon\n\n // ) } != S_OK {\n\n // return Err(crate::VividError::windows_error());\n\n // }\n\n notify_icon_data.uFlags = NIF_ICON | NIF_TIP | NIF_SHOWTIP | NIF_GUID;\n\n notify_icon_data.guidItem = GUID {\n\n Data1: 0x23995d22,\n", "file_path": "src/w32_notifyicon.rs", "rank": 3, "score": 50057.08547781024 }, { "content": "pub trait VibranceAdapter: std::fmt::Debug {\n\n fn set_vibrance(&mut self, vibrance: u8) -> VividResult<u8>;\n\n fn get_vibrance(&mut self) -> VividResult<u8>;\n\n fn get_sku(&mut self) -> VividResult<String>;\n\n fn get_vendor(&mut self) -> VividResult<GpuVendor>;\n\n fn get_system_type(&mut self) -> VividResult<SystemType>;\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, Eq, PartialEq)]\n\n#[allow(dead_code)]\n\npub enum GpuVendor {\n\n Nvidia,\n\n Amd,\n\n Ambiguous,\n\n Nothing,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, Eq, PartialEq)]\n\npub enum SystemType {\n\n Desktop,\n", "file_path": "src/adapter/mod.rs", "rank": 4, "score": 39723.4225349252 }, { "content": "#[no_mangle]\n\npub fn handler(args: &crate::foreground_watch::ForegroundWatcherEvent) -> VividResult<()> {\n\n let gpu = unsafe { crate::GPU.as_ref()? };\n\n let previous_vibrance = gpu.write().get_vibrance()?;\n\n log::trace!(\"callback args: {:#?}\", args);\n\n let (vibrance, fullscreen_only) = unsafe { crate::CONFIG.as_ref()? }\n\n .vibrance_for_program(&args.process_exe)\n\n .map_or_else(\n\n || VividResult::Ok((unsafe { crate::CONFIG.as_ref()? }.default_vibrance(), false)),\n\n |values| Ok(values),\n\n )?;\n\n\n\n let apply = if fullscreen_only {\n\n log::trace!(\"{} requires fullscreen, detecting...\", args.process_exe);\n\n use winapi::um::shellapi;\n\n let mut notification_state: shellapi::QUERY_USER_NOTIFICATION_STATE =\n\n shellapi::QUERY_USER_NOTIFICATION_STATE::default();\n\n let api_result = unsafe { shellapi::SHQueryUserNotificationState(&mut notification_state) };\n\n if api_result == winapi::shared::winerror::S_OK {\n\n log::trace!(\"Found notification state: {}\", notification_state);\n\n match notification_state {\n", "file_path": "src/foreground_callback.rs", "rank": 5, "score": 39030.94505256871 }, { "content": "struct Opts {\n\n /// Launch an editor to edit the config file\n\n #[structopt(short, long)]\n\n edit: bool,\n\n /// Pass a custom configuration file path\n\n #[structopt(short = \"c\", long = \"config\")]\n\n config_file: Option<String>,\n\n /// Bypasses GPU detection and forces to load the NVidia-specific code.\n\n /// It can provoke errors if you don't own an NVidia GPU or if drivers cannot be found on your system.\n\n #[structopt(long)]\n\n nvidia: bool,\n\n /// Bypasses GPU detection and forces to load the AMD-specific code.\n\n /// It can provoke errors if you don't own an AMD GPU or if drivers cannot be found on your system.\n\n /// Warning: This is a placeholder flag and will not work, as AMD GPUs are not currently supported.\n\n #[structopt(long)]\n\n amd: bool,\n\n}\n\n\n\npub static mut GPU: VividResult<parking_lot::RwLock<adapter::Gpu>> = Err(VividError::NoGpuDetected);\n\npub static mut CONFIG: VividResult<config::Config> = Err(VividError::NoConfigurationLoaded);\n\n\n\n// lazy_static::lazy_static! {\n\n// pub static ref CONFIG: config::Config = config::Config::load().unwrap_or_default();\n\n// }\n\n\n", "file_path": "src/main.rs", "rank": 6, "score": 36858.735597753766 }, { "content": "#[cfg(not(windows))]\n\nfn main() {\n\n panic!(\"This product cannot be built on other platforms than Windows!\")\n\n}\n", "file_path": "build.rs", "rank": 7, "score": 33798.68164765324 }, { "content": "#[paw::main]\n\nfn main(opts: Opts) -> error::VividResult<()> {\n\n pretty_env_logger::init();\n\n\n\n if opts.edit {\n\n config::Config::edit()?;\n\n return Ok(());\n\n }\n\n\n\n unsafe {\n\n CONFIG = config::Config::load(opts.config_file);\n\n }\n\n\n\n let adapter = if opts.nvidia {\n\n adapter::Gpu::new_nvidia()?\n\n } else if opts.amd {\n\n adapter::Gpu::new_amd()?\n\n } else {\n\n adapter::Gpu::detect_gpu()?\n\n };\n\n\n", "file_path": "src/main.rs", "rank": 8, "score": 23515.880660748833 }, { "content": "pub struct ForegroundWatcherEvent {\n\n pub hwnd: HWND,\n\n pub process_id: usize,\n\n pub process_exe: String,\n\n pub process_path: std::path::PathBuf,\n\n}\n\n\n\n#[derive(Default, Clone)]\n\npub struct ForegroundWatcher {\n\n registered: bool,\n\n hook: Option<windef::HWINEVENTHOOK>,\n\n proc: winuser::WINEVENTPROC,\n\n}\n\n\n\nimpl ForegroundWatcher {\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n\n\n pub fn add_event_callback(&mut self, cb: fn(&ForegroundWatcherEvent) -> VividResult<()>) {\n", "file_path": "src/foreground_watch.rs", "rank": 16, "score": 16.15216578789217 }, { "content": "use crate::error::VividResult;\n\n\n\n#[cfg(all(windows, target_pointer_width = \"32\"))]\n\npub const LIBRARY_NAME: &[u8; 13] = b\"atiadlxx.dll\\0\";\n\n#[cfg(all(windows, target_pointer_width = \"64\"))]\n\npub const LIBRARY_NAME: &[u8; 13] = b\"atiadlxy.dll\\0\";\n\n#[derive(Debug)]\n\npub struct Amd {}\n\n\n\nimpl Amd {\n\n pub fn new() -> VividResult<Self> {\n\n todo!()\n\n }\n\n}\n\n\n\nimpl super::VibranceAdapter for Amd {\n\n fn set_vibrance(&mut self, _vibrance: u8) -> VividResult<u8> {\n\n todo!()\n\n }\n\n\n", "file_path": "src/adapter/amd.rs", "rank": 17, "score": 15.751827115266241 }, { "content": "use crate::arcmutex;\n\nuse crate::{\n\n error::{VividError, VividResult},\n\n ArcMutex,\n\n};\n\nuse nvapi_hi::{Display, Gpu};\n\n\n\n\n\n#[cfg(all(windows, target_pointer_width = \"32\"))]\n\npub const LIBRARY_NAME: &[u8; 10] = b\"nvapi.dll\\0\";\n\n#[cfg(all(windows, target_pointer_width = \"64\"))]\n\npub const LIBRARY_NAME: &[u8; 12] = b\"nvapi64.dll\\0\";\n\n\n\npub struct Nvidia {\n\n gpu: ArcMutex<Gpu>,\n\n displays: Vec<Display>,\n\n}\n\n\n\nunsafe impl Send for Nvidia {}\n\nunsafe impl Sync for Nvidia {}\n", "file_path": "src/adapter/nvidia.rs", "rank": 18, "score": 14.044991679730053 }, { "content": " };\n\n\n\n log::trace!(\"Adapter: {:#?}\", adapter);\n\n\n\n Self::new_with_adapter(adapter)\n\n }\n\n\n\n pub(crate) fn get_primary_monitor_name() -> VividResult<String> {\n\n let primary_monitor_hwnd = unsafe { winapi::um::winuser::MonitorFromWindow(std::ptr::null_mut(), winapi::um::winuser::MONITOR_DEFAULTTOPRIMARY) };\n\n let mut monitor_info = winapi::um::winuser::MONITORINFOEXW::default();\n\n monitor_info.cbSize = std::mem::size_of::<winapi::um::winuser::MONITORINFOEXW>() as u32;\n\n let res = unsafe { winapi::um::winuser::GetMonitorInfoW(primary_monitor_hwnd, &mut monitor_info as *mut _ as *mut _) };\n\n if res != winapi::shared::minwindef::TRUE {\n\n return Err(VividError::NoDisplayDetected);\n\n }\n\n let bytes: Vec<u16> = monitor_info.szDevice.iter().take_while(|b| **b != 0u16).map(|b| *b).collect();\n\n let monitor_name: std::ffi::OsString = std::os::windows::ffi::OsStringExt::from_wide(&bytes);\n\n let monitor_name = monitor_name.into_string().unwrap();\n\n Ok(monitor_name)\n\n }\n", "file_path": "src/adapter/mod.rs", "rank": 19, "score": 13.654547197197942 }, { "content": "\n\n pub(crate) fn new_nvidia() -> VividResult<Self> {\n\n Self::new_with_adapter(Box::new(nvidia::Nvidia::new()?))\n\n }\n\n\n\n pub(crate) fn new_amd() -> VividResult<Self> {\n\n Self::new_with_adapter(Box::new(amd::Amd::new()?))\n\n }\n\n\n\n fn new_with_adapter(mut adapter: Box<dyn VibranceAdapter + Send + Sync>) -> VividResult<Self> {\n\n Ok(Self {\n\n sku: adapter.get_sku()?,\n\n vendor: adapter.get_vendor()?,\n\n system_type: adapter.get_system_type()?,\n\n adapter,\n\n })\n\n }\n\n\n\n pub fn set_vibrance(&mut self, vibrance: u8) -> VividResult<u8> {\n\n self.adapter.set_vibrance(vibrance)\n\n }\n\n\n\n pub fn get_vibrance(&mut self) -> VividResult<u8> {\n\n self.adapter.get_vibrance()\n\n }\n\n}\n", "file_path": "src/adapter/mod.rs", "rank": 20, "score": 11.518552947159975 }, { "content": " Laptop,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Gpu {\n\n pub sku: String,\n\n pub vendor: GpuVendor,\n\n pub system_type: SystemType,\n\n pub adapter: Box<dyn VibranceAdapter + Send + Sync>,\n\n}\n\n\n\nimpl Gpu {\n\n pub fn detect_gpu() -> VividResult<Self> {\n\n let nvidia_exists =\n\n dll_exists(nvidia::LIBRARY_NAME.as_ptr() as *const winapi::ctypes::c_char);\n\n let amd_adl_exists =\n\n dll_exists(amd::LIBRARY_NAME.as_ptr() as *const winapi::ctypes::c_char);\n\n\n\n log::trace!(\n\n \"Detecting driver API DLLs: AMD = {} / Nvidia = {}\",\n", "file_path": "src/adapter/mod.rs", "rank": 21, "score": 9.541724968876322 }, { "content": "use crate::error::{VividError, VividResult, WindowsHookError};\n\nuse winapi::shared::windef::HWND;\n\nuse winapi::{\n\n shared::{minwindef::DWORD, ntdef::NULL, windef},\n\n um::{winnt::LONG, winuser},\n\n};\n\n\n\nlazy_static::lazy_static! {\n\n static ref CALLBACKS: parking_lot::RwLock<Vec<fn(&ForegroundWatcherEvent) -> VividResult<()>>> = parking_lot::RwLock::new(vec![]);\n\n pub(crate) static ref SYSTEM: parking_lot::RwLock<sysinfo::System> = {\n\n use sysinfo::SystemExt as _;\n\n parking_lot::RwLock::new(\n\n sysinfo::System::new_with_specifics(\n\n sysinfo::RefreshKind::default().with_processes()\n\n )\n\n )\n\n };\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "src/foreground_watch.rs", "rank": 22, "score": 9.023892503427422 }, { "content": "\n\nimpl std::fmt::Debug for Nvidia {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_struct(\"Nvidia\")\n\n .field(\"gpu\", &\"[OPAQUE]\")\n\n .field(\"displays\", &self.displays)\n\n .finish()\n\n }\n\n}\n\n\n\nimpl Nvidia {\n\n pub fn new() -> VividResult<Self> {\n\n for gpu in Gpu::enumerate()? {\n\n let displays = gpu.connected_displays()?;\n\n return Ok(Self {\n\n gpu: arcmutex(gpu),\n\n displays,\n\n });\n\n }\n\n\n", "file_path": "src/adapter/nvidia.rs", "rank": 23, "score": 8.899743587311782 }, { "content": "#![allow(dead_code)]\n\n\n\nuse crate::error::{VividError, VividResult};\n\nuse winapi::shared::minwindef::{BOOL, DWORD, FALSE, TRUE};\n\nuse winapi::um::consoleapi::SetConsoleCtrlHandler;\n\nuse winapi::um::winuser::WM_QUIT;\n\n\n\nstatic mut THREAD_ID: std::sync::atomic::AtomicU32 = std::sync::atomic::AtomicU32::new(0);\n\n\n\nunsafe extern \"system\" fn ctrlc_handler(_: DWORD) -> BOOL {\n\n log::trace!(\"received ctrl + c\");\n\n winapi::um::winuser::PostThreadMessageA(\n\n THREAD_ID.load(std::sync::atomic::Ordering::SeqCst),\n\n WM_QUIT,\n\n 0,\n\n 0,\n\n );\n\n TRUE\n\n}\n\n\n", "file_path": "src/w32_ctrlc.rs", "rank": 24, "score": 8.764821117487577 }, { "content": " unsafe {\n\n GPU = Ok(parking_lot::RwLock::new(adapter));\n\n }\n\n\n\n // Touch config and GPU to avoid way too lazy loading\n\n log::info!(\"current vibrance is: {}\", unsafe {\n\n GPU.as_ref()?.write().get_vibrance()?\n\n });\n\n log::info!(\"config loaded: {:#?}\", unsafe { CONFIG.as_ref()? });\n\n\n\n let mut watcher = foreground_watch::ForegroundWatcher::new();\n\n watcher.add_event_callback(foreground_callback::handler);\n\n watcher.register()?;\n\n log::trace!(\"is watcher registered? -> {}\", watcher.is_registered());\n\n\n\n //w32_notifyicon::register()?;\n\n\n\n let mut msg = unsafe { std::mem::zeroed() };\n\n #[cfg(debug_assertions)]\n\n unsafe {\n", "file_path": "src/main.rs", "rank": 25, "score": 8.399060122509493 }, { "content": "# Vivid\n\n\n\nSmol utility to change digital vibrance / saturation when a program within a list starts.\n\n\n\nBasically VibranceGUI but without the GUI and the bloat.\n\n\n\nOnly compatible with Nvidia GPUs for now, AMD support is planned (but we'll need testers!)\n\n\n\n## Installation\n\n\n\n* Grab the .exe in the releases\n\n* Create a `.vivid` folder in your user directory\n\n* Put the exe inside\n\n* Run a `cmd`/`powershell` session and navigate to the new directory, run `vivid.exe --edit` to create and edit a new configuration file.\n\n* Input the settings you want\n\n* Right click on the vivid.exe file in the aforementioned folder, click on \"Create a shortcut\"\n\n* Press Windows + R, type `shell:startup`\n\n* Drag the newly created shortcut to the folder that just opened\n\n* Double click `vivid.exe`\n\n* Enjoy!\n\n\n\nThe installation process will be streamlined at some point (self-installing executable with a flag) but for now please bear with the tedious installation.\n\n\n\n## Usage\n\n\n\n```text\n\nVivid 0.2.0\n\nby Mathieu Amiot / @OtaK_\n\nSmol utility to change digital vibrance / saturation when a program within a list starts\n\n\n\nUSAGE:\n\n vivid.exe [FLAGS]\n\n\n\nFLAGS:\n\n --amd Bypasses GPU detection and forces to load the AMD-specific code. It can provoke errors if you don't\n\n own an AMD GPU or if drivers cannot be found on your system. Warning: This is a placeholder flag\n\n and will not work, as AMD GPUs are not currently supported\n\n -e, --edit Launch an editor to edit the config file\n\n -h, --help Prints help information\n\n --nvidia Bypasses GPU detection and forces to load the NVidia-specific code. It can provoke errors if you\n\n don't own an NVidia GPU or if drivers cannot be found on your system\n\n -V, --version Prints version information\n\n```\n\n\n\n## Configuration format\n\n\n\nThe file format used is [TOML](https://toml.io/en/).\n\n\n\nSample structure:\n\n\n\n```toml\n\n# Vibrance to restore when any non-selected program comes to foreground, included explorer.exe\n\ndesktop_vibrance = 50\n\n\n", "file_path": "README.md", "rank": 26, "score": 8.398615068538282 }, { "content": " CALLBACKS.write().push(cb);\n\n }\n\n\n\n pub fn is_registered(&self) -> bool {\n\n self.registered\n\n }\n\n\n\n pub fn register(&mut self) -> VividResult<()> {\n\n self.proc = Some(Self::event_proc);\n\n let inner_hook = unsafe {\n\n winuser::SetWinEventHook(\n\n winuser::EVENT_SYSTEM_FOREGROUND,\n\n winuser::EVENT_SYSTEM_FOREGROUND,\n\n NULL as _,\n\n self.proc,\n\n 0,\n\n 0,\n\n winuser::WINEVENT_OUTOFCONTEXT | winuser::WINEVENT_SKIPOWNPROCESS,\n\n )\n\n };\n", "file_path": "src/foreground_watch.rs", "rank": 27, "score": 8.310066472828513 }, { "content": "\n\n let mut inspection_result: Option<ForegroundWatcherEvent> = (*SYSTEM)\n\n .read()\n\n .get_process(process_id)\n\n .map(move |process| {\n\n log::trace!(\n\n \"Found process {} [{}]\",\n\n process.name(),\n\n process.exe().display()\n\n );\n\n let process_path: std::path::PathBuf = process.exe().into();\n\n let mut process_exe: String = process.name().into();\n\n if process_exe.len() == 0 {\n\n if let Some(exe_name) =\n\n process_path.file_name().and_then(std::ffi::OsStr::to_str)\n\n {\n\n process_exe.push_str(exe_name);\n\n }\n\n }\n\n ForegroundWatcherEvent {\n", "file_path": "src/foreground_watch.rs", "rank": 28, "score": 8.040094096336333 }, { "content": " fn get_vibrance(&mut self) -> VividResult<u8> {\n\n todo!()\n\n }\n\n\n\n fn get_sku(&mut self) -> VividResult<String> {\n\n todo!()\n\n }\n\n\n\n fn get_vendor(&mut self) -> VividResult<super::GpuVendor> {\n\n todo!()\n\n }\n\n\n\n fn get_system_type(&mut self) -> VividResult<super::SystemType> {\n\n todo!()\n\n }\n\n}\n", "file_path": "src/adapter/amd.rs", "rank": 29, "score": 7.910306889305108 }, { "content": " Err(VividError::NoGpuDetected)\n\n }\n\n\n\n fn get_target_display(&mut self) -> VividResult<&Display> {\n\n self.displays = self.gpu.lock().connected_displays()?;\n\n let target_display = super::Gpu::get_primary_monitor_name()?;\n\n self.displays\n\n .iter()\n\n .find(|display| display.display_name == target_display)\n\n .ok_or_else(|| VividError::NoDisplayDetected)\n\n }\n\n}\n\n\n\nimpl super::VibranceAdapter for Nvidia {\n\n fn set_vibrance(&mut self, vibrance: u8) -> VividResult<u8> {\n\n self.get_target_display()?\n\n .set_vibrance(vibrance)\n\n .map_err(Into::into)\n\n }\n\n\n", "file_path": "src/adapter/nvidia.rs", "rank": 30, "score": 7.718661978138762 }, { "content": " id_event_thread: DWORD,\n\n dwms_event_time: DWORD,\n\n ) {\n\n use sysinfo::{ProcessExt as _, SystemExt as _};\n\n log::trace!(\n\n \"ForegroundWatcher::event_proc({:?}, {}, {:?}, {}, {}, {}, {})\",\n\n event_hook,\n\n event,\n\n hwnd,\n\n id_object,\n\n id_child,\n\n id_event_thread,\n\n dwms_event_time\n\n );\n\n let mut process_id = 0u32;\n\n let _ = winapi::um::winuser::GetWindowThreadProcessId(hwnd, &mut process_id);\n\n let process_id = process_id as usize;\n\n log::trace!(\"Found process id #{} from hwnd\", process_id);\n\n\n\n let _ = (*SYSTEM).write().refresh_process(process_id);\n", "file_path": "src/foreground_watch.rs", "rank": 31, "score": 7.615815177652611 }, { "content": " Data2: 0x5b28,\n\n Data3: 0x4300,\n\n Data4: 0x8fdbccebf071u64.to_le_bytes()\n\n };\n\n unsafe { *notify_icon_data.u.uVersion_mut() = NOTIFYICON_VERSION_4; }\n\n notify_icon_data.uID = 0x1337;\n\n notify_icon_data.szTip = {\n\n let mut s = [0u16; 128];\n\n s[..6].copy_from_slice(&OsStr::new(\"Vivid\\0\").encode_wide().collect::<Vec<u16>>());\n\n s\n\n };\n\n notify_icon_data.dwState = NIS_HIDDEN;\n\n notify_icon_data.szInfoTitle = {\n\n let mut s = [0u16; 64];\n\n s[..6].copy_from_slice(&OsStr::new(\"Vivid\\0\").encode_wide().collect::<Vec<u16>>());\n\n s\n\n };\n\n notify_icon_data.dwInfoFlags = NIIF_NOSOUND | NIIF_RESPECT_QUIET_TIME | NIIF_USER;\n\n notify_icon_data.hBalloonIcon = NULL as _;\n\n\n", "file_path": "src/w32_notifyicon.rs", "rank": 32, "score": 7.44289782434544 }, { "content": "\n\n if inner_hook != NULL as _ {\n\n self.hook = Some(inner_hook);\n\n self.registered = true;\n\n log::trace!(\"ForegroundWatcher::register() -> successful\");\n\n } else {\n\n self.proc = None;\n\n log::error!(\"ForegroundWatcher::register() -> failed\");\n\n return Err(WindowsHookError::SetWinEventHook(std::io::Error::last_os_error()).into());\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn unregister(&mut self) -> VividResult<()> {\n\n if let Some(hook) = self.hook.take() {\n\n if unsafe { winuser::UnhookWinEvent(hook) } != 0 {\n\n log::trace!(\"ForegroundWatcher::unregister() -> successful\");\n\n self.proc = None;\n\n self.registered = false;\n", "file_path": "src/foreground_watch.rs", "rank": 33, "score": 7.0402593753787635 }, { "content": " NvAPIError(#[from] nvapi_hi::sys::Status),\n\n #[error(transparent)]\n\n Other(#[from] anyhow::Error),\n\n}\n\n\n\nimpl VividError {\n\n pub fn message_loop_error() -> Self {\n\n Self::WindowsMessageLoopError(std::io::Error::last_os_error())\n\n }\n\n\n\n pub fn windows_error() -> Self {\n\n Self::WindowsOtherError(std::io::Error::last_os_error())\n\n }\n\n}\n\n\n\npub type VividResult<T> = Result<T, VividError>;\n", "file_path": "src/error.rs", "rank": 34, "score": 6.35677799916547 }, { "content": "pub(crate) type ArcMutex<T> = std::sync::Arc<parking_lot::Mutex<T>>;\n\npub(crate) fn arcmutex<T: Into<parking_lot::Mutex<T>>>(x: T) -> ArcMutex<T> {\n\n std::sync::Arc::new(x.into())\n\n}\n\n\n\n#[derive(Debug, structopt::StructOpt)]\n\n#[structopt(\n\n name = \"Vivid\",\n\n about = \"Smol utility to change digital vibrance / saturation when a program within a list starts\",\n\n author = \"by Mathieu Amiot / @OtaK_\"\n\n)]\n", "file_path": "src/main.rs", "rank": 35, "score": 6.296646331975742 }, { "content": " fn get_vibrance(&mut self) -> VividResult<u8> {\n\n self.get_target_display()?\n\n .get_vibrance()\n\n .map_err(From::from)\n\n }\n\n\n\n fn get_sku(&mut self) -> VividResult<String> {\n\n Ok(self.gpu.lock().info()?.name)\n\n }\n\n\n\n fn get_vendor(&mut self) -> VividResult<super::GpuVendor> {\n\n Ok(super::GpuVendor::Nvidia)\n\n }\n\n\n\n fn get_system_type(&mut self) -> VividResult<super::SystemType> {\n\n Ok(match self.gpu.lock().info()?.system_type {\n\n nvapi_hi::SystemType::Desktop | nvapi_hi::SystemType::Unknown => {\n\n super::SystemType::Desktop\n\n }\n\n nvapi_hi::SystemType::Laptop => super::SystemType::Laptop,\n\n })\n\n }\n\n}\n", "file_path": "src/adapter/nvidia.rs", "rank": 36, "score": 6.234688403063299 }, { "content": " hwnd,\n\n process_id,\n\n process_exe,\n\n process_path,\n\n }\n\n });\n\n\n\n if let Some(event) = inspection_result.take() {\n\n CALLBACKS.read().iter().for_each(|f| {\n\n if let Err(e) = f(&event) {\n\n log::error!(\"ForegroundWatcher::event_proc: Error in callback: {}\", e);\n\n }\n\n })\n\n } else {\n\n log::error!(\"{}\", VividError::ProcessNotAvailable(process_id));\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for ForegroundWatcher {\n\n fn drop(&mut self) {\n\n while self.registered {\n\n let _ = self.unregister();\n\n }\n\n\n\n CALLBACKS.write().clear();\n\n }\n\n}\n", "file_path": "src/foreground_watch.rs", "rank": 37, "score": 6.183147238705668 }, { "content": "use winapi::{\n\n um::{\n\n shellapi::{\n\n NOTIFYICONDATAW, Shell_NotifyIconW, NOTIFYICON_VERSION_4,\n\n NIM_SETVERSION, NIM_ADD, NIS_HIDDEN, NIIF_NOSOUND,\n\n NIIF_RESPECT_QUIET_TIME, NIIF_USER,\n\n NIF_GUID, NIF_SHOWTIP, NIF_TIP, NIF_ICON,\n\n },\n\n winuser::{IDI_APPLICATION, GetActiveWindow, MAKEINTRESOURCEW},\n\n commctrl::{LIM_SMALL, LoadIconMetric},\n\n },\n\n shared::{\n\n minwindef::TRUE,\n\n ntdef::NULL,\n\n guiddef::GUID,\n\n winerror::S_OK,\n\n }\n\n};\n\n\n\nuse std::ffi::OsStr;\n\n\n", "file_path": "src/w32_notifyicon.rs", "rank": 38, "score": 5.848498448550005 }, { "content": "# Program-specific settings\n\n[[program_settings]]\n\nexe_name = \"r5apex.exe\" # Name of the program to react on\n\nvibrance = 72 # Vibrance value in percentage to apply when this program comes to foreground.\n\nfullscreen_only = false # Whether or not we only apply settings when the program comes to foreground in FullScreen mode\n\n\n\n[[program_settings]]\n\nexe_name = \"your_favorite_program.exe\"\n\nvibrance = 100\n\nfullscreen_only = true\n\n```\n\n\n\n## Roadmap\n\n\n\n* [x] Docs improvements\n\n* [ ] Shell Icon (notification area) support\n\n* [ ] NSIS installer\n\n* [ ] Resolution / Display mode change support\n\n\n\n## Credits\n\n\n\n* Mathieu Amiot / OtaK_ - Author of this program\n\n* The nvapi-rs developers\n\n* VibranceGUI for the inspiration\n\n\n\n## License\n\n\n\nLicensed under either of these:\n\n\n\n* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or\n\n [https://www.apache.org/licenses/LICENSE-2.0](https://www.apache.org/licenses/LICENSE-2.0)\n\n* MIT license ([LICENSE-MIT](LICENSE-MIT) or\n\n [https://opensource.org/licenses/MIT](https://opensource.org/licenses/MIT))\n", "file_path": "README.md", "rank": 39, "score": 5.784527572015979 }, { "content": "pub unsafe fn init_ctrlc() -> VividResult<()> {\n\n THREAD_ID.store(\n\n winapi::um::processthreadsapi::GetCurrentThreadId(),\n\n std::sync::atomic::Ordering::SeqCst,\n\n );\n\n if SetConsoleCtrlHandler(Some(ctrlc_handler), TRUE) == FALSE {\n\n return Err(VividError::windows_error());\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/w32_ctrlc.rs", "rank": 40, "score": 4.951676651024977 }, { "content": " return Ok(());\n\n } else {\n\n log::error!(\"ForegroundWatcher::unregister() -> failed\");\n\n self.proc = None;\n\n self.registered = false;\n\n return Err(\n\n WindowsHookError::UnhookWinEvent(std::io::Error::last_os_error()).into(),\n\n );\n\n }\n\n }\n\n\n\n Err(WindowsHookError::NoHookToUnRegister(std::io::Error::last_os_error()).into())\n\n }\n\n\n\n unsafe extern \"system\" fn event_proc(\n\n event_hook: windef::HWINEVENTHOOK,\n\n event: DWORD,\n\n hwnd: HWND,\n\n id_object: LONG,\n\n id_child: LONG,\n", "file_path": "src/foreground_watch.rs", "rank": 41, "score": 4.802120182542945 }, { "content": " if unsafe { Shell_NotifyIconW(NIM_ADD, &mut notify_icon_data) } == TRUE {\n\n if unsafe { Shell_NotifyIconW(NIM_SETVERSION, &mut notify_icon_data) } == TRUE {\n\n Ok(())\n\n } else {\n\n Err(crate::VividError::windows_error())\n\n }\n\n } else {\n\n Err(crate::VividError::windows_error())\n\n }\n\n}\n", "file_path": "src/w32_notifyicon.rs", "rank": 42, "score": 4.421620695548883 }, { "content": "#![cfg_attr(not(debug_assertions), windows_subsystem = \"windows\")]\n\n\n\n// TODO: Support AMD GPUs\n\n// TODO: Create NotificationArea Icon with `Shell_NotifyIconA`\n\n// TODO: Tweak release process to build a NSIS-powered installer\n\n// TODO: Support changing desktop resolution on application start\n\n\n\nmod adapter;\n\nmod config;\n\nmod foreground_callback;\n\nmod foreground_watch;\n\nmod w32_msgloop;\n\n// mod w32_notifyicon;\n\n#[cfg(debug_assertions)]\n\nmod w32_ctrlc;\n\n\n\nmod error;\n\n\n\nuse self::error::*;\n\n\n", "file_path": "src/main.rs", "rank": 43, "score": 3.928531425130874 }, { "content": "#[derive(Debug, thiserror::Error)]\n\npub enum WindowsHookError {\n\n #[error(\"Failed to hook w32 event [SetWinEventHook]\")]\n\n SetWinEventHook(std::io::Error),\n\n #[error(\"Failed to unhook w32 event [UnhookWinEvent]\")]\n\n UnhookWinEvent(std::io::Error),\n\n #[error(\"There's no hook to unhook! You should call register() first.\")]\n\n NoHookToUnRegister(std::io::Error),\n\n}\n\n\n\n#[derive(Debug, thiserror::Error)]\n\npub enum VividError {\n\n #[error(transparent)]\n\n SelfError(#[from] &'static Self),\n\n #[error(transparent)]\n\n IoError(#[from] std::io::Error),\n\n #[error(transparent)]\n\n SerializeError(#[from] toml::ser::Error),\n\n #[error(transparent)]\n\n DeserializeError(#[from] toml::de::Error),\n", "file_path": "src/error.rs", "rank": 44, "score": 3.2140653585823418 }, { "content": " shellapi::QUNS_RUNNING_D3D_FULL_SCREEN\n\n | shellapi::QUNS_PRESENTATION_MODE\n\n | shellapi::QUNS_ACCEPTS_NOTIFICATIONS => true,\n\n _ => false,\n\n }\n\n } else {\n\n false\n\n }\n\n } else {\n\n true\n\n };\n\n\n\n log::trace!(\n\n \"Vibrance: old = {} / new = {} | to be applied = {}\",\n\n previous_vibrance,\n\n vibrance,\n\n apply\n\n );\n\n if apply && vibrance != previous_vibrance {\n\n log::trace!(\"Applying new vibrance = {}\", vibrance);\n\n gpu.write().set_vibrance(vibrance)?;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/foreground_callback.rs", "rank": 45, "score": 3.1459088893030174 }, { "content": "use crate::error::VividResult;\n\n\n\n#[no_mangle]\n", "file_path": "src/foreground_callback.rs", "rank": 46, "score": 2.51368953995902 }, { "content": "use crate::error::{VividError, VividResult};\n\n\n\nmod amd;\n\nmod nvidia;\n\n\n\n#[inline(always)]\n", "file_path": "src/adapter/mod.rs", "rank": 47, "score": 2.2318774534021526 }, { "content": "use crate::error::{VividError, VividResult};\n\n\n\n/// Fetches a message from the Win32 event loop and raises an error if any error occured.\n\n#[inline(always)]\n", "file_path": "src/w32_msgloop.rs", "rank": 48, "score": 2.0664015852610147 }, { "content": " w32_ctrlc::init_ctrlc()?;\n\n }\n\n\n\n log::trace!(\"w32 waitloop started\");\n\n loop {\n\n w32_msgloop::read_message(&mut msg)?;\n\n log::trace!(\"Got W32 Message: {}\", msg.message);\n\n if w32_msgloop::process_message(&msg) {\n\n break;\n\n }\n\n }\n\n\n\n log::info!(\"Exiting...\");\n\n Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 49, "score": 1.6132906012541677 }, { "content": " amd_adl_exists,\n\n nvidia_exists\n\n );\n\n\n\n let vendor = if nvidia_exists && amd_adl_exists {\n\n GpuVendor::Ambiguous\n\n } else if nvidia_exists {\n\n GpuVendor::Nvidia\n\n } else if amd_adl_exists {\n\n GpuVendor::Amd\n\n } else {\n\n GpuVendor::Nothing\n\n };\n\n\n\n log::trace!(\"Creating adapter...\");\n\n let adapter: Box<dyn VibranceAdapter + Send + Sync> = match vendor {\n\n GpuVendor::Nvidia => Box::new(nvidia::Nvidia::new()?),\n\n GpuVendor::Amd => Box::new(amd::Amd::new()?),\n\n GpuVendor::Ambiguous => return Err(VividError::DualDriversDetected),\n\n GpuVendor::Nothing => return Err(VividError::NoGpuDetected),\n", "file_path": "src/adapter/mod.rs", "rank": 50, "score": 1.1289403225998016 }, { "content": " #[error(transparent)]\n\n WindowsHookError(#[from] WindowsHookError),\n\n #[error(transparent)]\n\n WindowsMessageLoopError(std::io::Error),\n\n #[error(transparent)]\n\n WindowsOtherError(std::io::Error),\n\n #[error(r#\"Vivid detected both AMD and Nvidia drivers on your system.\n\nPlease launch the app with the appropriate flag to choose which driver you use to display.\"#r)]\n\n DualDriversDetected,\n\n #[error(\"Vivid couldn't detect any GPU on your system. Is your computer okay?\")]\n\n NoGpuDetected,\n\n #[error(\"Vivid couldn't detect any Displays on your system. How are you seeing this?\")]\n\n NoDisplayDetected,\n\n #[error(\"Configuration isn't loaded just yet!\")]\n\n NoConfigurationLoaded,\n\n #[error(\n\n \"Vivid couldn't inspect the process with PID #{0}. Probably because it's system owned.\"\n\n )]\n\n ProcessNotAvailable(usize),\n\n #[error(transparent)]\n", "file_path": "src/error.rs", "rank": 51, "score": 1.0443363109925579 } ]
Rust
adapter/deps/lldb/src/sb/sbdata.rs
naari3/vscode-lldb
bed54848119cb0e8846ed198c5bfd652f650922b
use super::*; use std::marker::PhantomData; cpp_class!(unsafe struct _SBData as "SBData"); unsafe impl Send for _SBData {} #[repr(transparent)] pub struct SBData<'a> { _inner: _SBData, _marker: PhantomData<&'a ()>, } pub type SBDataOwned = SBData<'static>; impl<'b> SBData<'b> { pub fn new() -> SBDataOwned { cpp!(unsafe [] -> SBData as "SBData" { return SBData(); }) } pub fn borrow_bytes<'a>(bytes: &'a [u8], endian: ByteOrder, addr_size: usize) -> SBData<'a> { let buf = bytes.as_ptr(); let size = bytes.len(); let inner = cpp!(unsafe [buf as "void*", size as "size_t", endian as "ByteOrder", addr_size as "size_t"] -> _SBData as "SBData" { SBData data; SBError error; data.SetData(error, buf, size, endian, addr_size); return data; }); SBData { _inner: inner, _marker: PhantomData, } } pub fn from_cstr(cstr: &CStr, endian: ByteOrder, addr_size: usize) -> SBDataOwned { let ptr = cstr.as_ptr(); cpp!(unsafe [ptr as "const char*", endian as "ByteOrder", addr_size as "size_t"] -> SBData as "SBData" { return SBData::CreateDataFromCString(endian, addr_size, ptr); }) } pub fn clear(&mut self) { cpp!(unsafe [self as "SBData*"] { return self->Clear(); }) } pub fn byte_order(&self) -> ByteOrder { cpp!(unsafe [self as "SBData*"] -> ByteOrder as "ByteOrder" { return self->GetByteOrder(); }) } pub fn address_byte_size(&self) -> usize { cpp!(unsafe [self as "SBData*"] -> usize as "size_t" { return (size_t)self->GetAddressByteSize(); }) } pub fn byte_size(&self) -> usize { cpp!(unsafe [self as "SBData*"] -> usize as "size_t" { return self->GetByteSize(); }) } pub fn read_f32(&self, offset: u64) -> Result<f32, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> f32 as "float" { return self->GetFloat(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_f64(&self, offset: u64) -> Result<f64, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> f64 as "double" { return self->GetDouble(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_address(&self, offset: u64) -> Result<Address, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> Address as "addr_t" { return self->GetAddress(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_u8(&self, offset: u64) -> Result<u8, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> u8 as "uint8_t" { return self->GetUnsignedInt8(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_u16(&self, offset: u64) -> Result<u16, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> u16 as "uint16_t" { return self->GetUnsignedInt16(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_u32(&self, offset: u64) -> Result<u32, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> u32 as "uint32_t" { return self->GetUnsignedInt32(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_u64(&self, offset: u64) -> Result<u64, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> u64 as "uint64_t" { return self->GetUnsignedInt64(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_string(&self, offset: u64) -> Result<*const c_char, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> *const c_char as "const char*" { return self->GetString(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_raw_data(&self, offset: u64, buffer: &mut [u8]) -> Result<(), SBError> { let ptr = buffer.as_ptr(); let size = buffer.len(); let mut error = SBError::new(); cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t", ptr as "void*", size as "size_t"] -> usize as "size_t" { return self->ReadRawData(error, offset, ptr, size); }); if error.is_success() { Ok(()) } else { Err(error) } } } impl<'a> IsValid for SBData<'a> { fn is_valid(&self) -> bool { cpp!(unsafe [self as "SBData*"] -> bool as "bool" { return self->IsValid(); }) } } impl<'a> fmt::Debug for SBData<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { debug_descr(f, |descr| { cpp!(unsafe [self as "SBData*", descr as "SBStream*"] -> bool as "bool" { return self->GetDescription(*descr); }) }) } } #[derive(Clone, Copy, Eq, PartialEq, Debug)] #[repr(u32)] pub enum ByteOrder { Invalid = 0, Big = 1, PDP = 2, Little = 4, }
use super::*; use std::marker::PhantomData; cpp_class!(unsafe struct _SBData as "SBData"); unsafe impl Send for _SBData {} #[repr(transparent)] pub struct SBData<'a> { _inner: _SBData, _marker: PhantomData<&'a ()>, } pub type SBDataOwned = SBData<'static>; impl<'b> SBData<'b> { pub fn new() -> SBDataOwned { cpp!(unsafe [] -> SBData as "SBData" { return SBData(); }) } pub fn borrow_bytes<'a>(bytes: &'a [u8], endian: ByteOrder, addr_size: usize) -> SBData<'a> { let buf = bytes.as_ptr(); let size = bytes.len(); let inner = cpp!(unsafe [buf as "void*", size as "size_t", endian as "ByteOrder", addr_size as "size_t"] -> _SBData as "SBData" { SBData data; SBError error; data.SetData(error, buf, size, endian, addr_size); return data; }); SBData { _inner: inner, _marker: PhantomData, } } pub fn from_cstr(cstr: &CStr, endian: ByteOrder, addr_size: usize) -> SBDataOwned { let ptr = cstr.as_ptr(); cpp!(unsafe [ptr as "const char*", endian as "ByteOrder", addr_size as "size_t"] -> SBData as "SBData" { return SBData::CreateDataFromCString(endian, addr_size, ptr); }) } pub fn clear(&mut self) { cpp!(unsafe [self as "SBData*"] { return self->Clear(); }) } pub fn byte_order(&self) -> ByteOrder { cpp!(unsafe [self as "SBData*"] -> ByteOrder as "ByteOrder" { return self->GetByteOrder(); }) } pub fn address_byte_size(&self) -> usize { cpp!(unsafe [self as "SBData*"] -> usize as "size_t" { return (size_t)self->GetAddressByteSize(); }) } pub fn byte_size(&self) -> usize { cpp!(unsafe [self as "SBData*"] -> usize as "size_t" { return self->GetByteSize(); }) } pub fn read_f32(&self, offset: u64) -> Result<f32, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> f32 as "float" { return self->GetFloat(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_f64(&self, offset: u64) -> Result<f64, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> f64 as "double" { return self->GetDouble(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_address(&self, offset: u64) -> Result<Address, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> Address as "addr_t" { return self->GetAddress(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_u8(&self, offset: u64) -> Result<u8, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> u8 as "uint8_t" { return self->GetUnsignedInt8(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_u16(&self, offset: u64) -> Result<u16, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> u16 as "uint16_t" { return self->GetUnsignedInt16(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_u32(&self, offset: u64) -> Result<u32, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> u32 as "uint32_t" { return self->GetUnsignedInt32(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_u64(&self, offset: u64) -> Result<u64, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> u64 as "uint64_t" { return self->GetUnsignedInt64(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_string(&self, offset: u64) -> Result<*const c_char, SBError> { let mut error = SBError::new(); let result = cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t"] -> *const c_char as "const char*" { return self->GetString(error, offset); }); if error.is_success() { Ok(result) } else { Err(error) } } pub fn read_raw_data(&self, offset: u64, buffer: &mut [u8]) -> Result<(), SBError> { let ptr = buffer.as_ptr(); let size = buffer.len(); let mut error = SBError::new(); cpp!(unsafe [self as "SBData*", mut error as "SBError", offset as "offset_t", ptr as "void*", size as "size_t"] -> usize as "size_t" { return self->ReadRawData(error, offset, ptr, size); }); if error.is_success() { Ok(()) } else { Err(error) } } } impl<'a> IsValid for SBData<'a> { fn is_valid(&self) -> bool { cpp!(unsafe [self as "SBData*"] -> bool as "bool" { return self->IsValid(); }) } } impl<'a> fmt::Debug for SBData<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
} } #[derive(Clone, Copy, Eq, PartialEq, Debug)] #[repr(u32)] pub enum ByteOrder { Invalid = 0, Big = 1, PDP = 2, Little = 4, }
debug_descr(f, |descr| { cpp!(unsafe [self as "SBData*", descr as "SBStream*"] -> bool as "bool" { return self->GetDescription(*descr); }) })
call_expression
[ { "content": "// The returned FILE takes ownership of file's descriptor.\n\npub fn cfile_from_file(file: File, write: bool) -> Result<*mut FILE, SBError> {\n\n #[cfg(unix)]\n\n let fd = file.into_raw_fd() as isize;\n\n #[cfg(windows)]\n\n let fd = file.into_raw_handle() as isize;\n\n\n\n let mut error = SBError::new();\n\n let cfile = cpp!(unsafe [fd as \"intptr_t\", write as \"bool\", mut error as \"SBError\"] -> *mut FILE as \"FILE*\" {\n\n FILE* cfile;\n\n #ifdef _WIN32\n\n cfile = fdopen(_open_osfhandle(fd, write ? 0 : _O_RDONLY), write ? \"w\" : \"r\");\n\n #else\n\n cfile = fdopen(fd, write ? \"w\" : \"r\");\n\n #endif\n\n if (cfile) {\n\n setvbuf(cfile, nullptr, _IOLBF, BUFSIZ);\n\n int x = fileno(cfile);\n\n if (x < 0)\n\n return nullptr;\n\n return cfile;\n", "file_path": "adapter/deps/lldb/src/cfile.rs", "rank": 0, "score": 297752.31473311316 }, { "content": "#[cfg(unix)]\n\npub fn get_fs_path_case(path: &Path) -> Result<PathBuf, std::io::Error> {\n\n Ok(path.into())\n\n}\n\n\n", "file_path": "adapter/src/platform.rs", "rank": 1, "score": 242922.55783165398 }, { "content": "pub fn from_i64(v: i64) -> Result<Handle, Error> {\n\n match Handle::new(v as u32) {\n\n Some(h) => Ok(h),\n\n None => Err(\"Expected non-zero handle value\".into()),\n\n }\n\n}\n\n\n\npub struct HandleTree<Value> {\n\n obj_by_handle: HashMap<Handle, (Option<Handle>, Rc<String>, Value)>,\n\n handle_tree: HashMap<(Option<Handle>, Rc<String>), Handle>,\n\n prev_handle_tree: HashMap<(Option<Handle>, Rc<String>), Handle>,\n\n next_handle_value: u32,\n\n}\n\n\n\nimpl<Value> HandleTree<Value> {\n\n pub fn new() -> Self {\n\n HandleTree {\n\n obj_by_handle: HashMap::new(),\n\n handle_tree: HashMap::new(),\n\n prev_handle_tree: HashMap::new(),\n", "file_path": "adapter/src/handles.rs", "rank": 2, "score": 238001.78721343388 }, { "content": "pub fn terminal_agent(matches: &ArgMatches) -> Result<(), Error> {\n\n let data;\n\n #[cfg(unix)]\n\n {\n\n unsafe {\n\n let ptr = libc::ttyname(1);\n\n assert!(!ptr.is_null());\n\n data = std::ffi::CStr::from_ptr(ptr).to_str()?;\n\n }\n\n }\n\n #[cfg(windows)]\n\n {\n\n data = std::process::id();\n\n }\n\n\n\n let port: u16 = matches.value_of(\"port\").unwrap().parse().unwrap();\n\n let addr = net::SocketAddr::new(net::Ipv4Addr::new(127, 0, 0, 1).into(), port);\n\n let mut stream = net::TcpStream::connect(addr)?;\n\n writeln!(stream, \"{}\", data)?;\n\n\n", "file_path": "adapter/loader/terminal_agent.rs", "rank": 3, "score": 232983.61065326974 }, { "content": "#[cfg(any(windows, test))]\n\npub fn make_case_folder() -> impl FnMut(&str) -> String {\n\n use std::collections::hash_map::Entry;\n\n use std::collections::HashMap;\n\n\n\n let mut case_map: HashMap<String, String> = HashMap::new();\n\n move |k| {\n\n let uk = k.to_uppercase();\n\n match case_map.entry(uk) {\n\n Entry::Occupied(e) => e.get().into(),\n\n Entry::Vacant(e) => {\n\n e.insert(k.into());\n\n k.into()\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "adapter/src/platform.rs", "rank": 4, "score": 228012.03585059693 }, { "content": "#[cfg(windows)]\n\npub fn pipe() -> Result<(fs::File, fs::File), Error> {\n\n use fs::File;\n\n use std::os::windows::prelude::*;\n\n use std::os::windows::raw::HANDLE;\n\n use std::ptr;\n\n use winapi::um::namedpipeapi::CreatePipe;\n\n\n\n unsafe {\n\n let mut r: HANDLE = ptr::null_mut();\n\n let mut w: HANDLE = ptr::null_mut();\n\n if CreatePipe(&mut r, &mut w, ptr::null_mut(), 65536) != 0 {\n\n let r = File::from_raw_handle(r);\n\n let w = File::from_raw_handle(w);\n\n Ok((r, w))\n\n } else {\n\n bail!(\"Failed to create a pipe.\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "adapter/src/platform.rs", "rank": 5, "score": 225427.57109389137 }, { "content": "fn main() -> Result<(), Error> {\n\n env_logger::Builder::from_default_env().init();\n\n\n\n let matches = App::new(\"codelldb\")\n\n .arg(Arg::with_name(\"port\").long(\"port\").takes_value(true))\n\n .arg(Arg::with_name(\"multi-session\").long(\"multi-session\"))\n\n .arg(Arg::with_name(\"preload\").long(\"preload\").multiple(true).takes_value(true))\n\n .arg(Arg::with_name(\"liblldb\").long(\"liblldb\").takes_value(true))\n\n .arg(Arg::with_name(\"params\").long(\"params\").takes_value(true))\n\n .subcommand(SubCommand::with_name(\"terminal-agent\").arg(Arg::with_name(\"port\").long(\"port\").takes_value(true)))\n\n .get_matches();\n\n\n\n if let Some(matches) = matches.subcommand_matches(\"terminal-agent\") {\n\n terminal_agent::terminal_agent(&matches)\n\n } else {\n\n debug_server(&matches)\n\n }\n\n}\n\n\n", "file_path": "adapter/loader/main.rs", "rank": 6, "score": 213380.94607497932 }, { "content": "pub fn normalize_path(path: impl AsRef<Path>) -> PathBuf {\n\n let path = path.as_ref();\n\n let mut normalized = PathBuf::new();\n\n for component in Path::new(path).components() {\n\n match component {\n\n Component::Prefix(_) | Component::RootDir | Component::Normal(_) => normalized.push(component),\n\n Component::CurDir => {}\n\n Component::ParentDir => {\n\n normalized.pop();\n\n }\n\n }\n\n }\n\n normalized\n\n}\n\n\n", "file_path": "adapter/src/fsutil.rs", "rank": 7, "score": 211359.92797020858 }, { "content": "// AsyncResponse is used to \"smuggle\" futures out of request handlers\n\n// in the few cases when we need to respond asynchronously.\n\nstruct AsyncResponse(pub Box<dyn Future<Output = Result<ResponseBody, Error>> + 'static>);\n\n\n\nimpl std::error::Error for AsyncResponse {}\n\nimpl std::fmt::Debug for AsyncResponse {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"AsyncResponse\")\n\n }\n\n}\n\nimpl std::fmt::Display for AsyncResponse {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"AsyncResponse\")\n\n }\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////////////////////////////////\n\n\n\nunsafe impl Send for DebugSession {}\n\n\n\nimpl DebugSession {\n\n pub fn run(dap_session: DAPSession, settings: AdapterSettings) -> impl Future {\n", "file_path": "adapter/src/debug_session.rs", "rank": 8, "score": 201441.27551752975 }, { "content": "fn debug_server(matches: &ArgMatches) -> Result<(), Error> {\n\n use loading::*;\n\n use std::mem::transmute;\n\n use std::path::{Path, PathBuf};\n\n\n\n let multi_session = matches.is_present(\"multi-session\");\n\n let port = matches.value_of(\"port\").map(|s| s.parse().unwrap()).unwrap_or(0);\n\n let adapter_params = matches.value_of(\"params\");\n\n\n\n unsafe {\n\n // Preload anything passed via --preload\n\n for dylib in matches.values_of(\"preload\").unwrap_or_default() {\n\n load_library(Path::new(dylib), true)?;\n\n }\n\n\n\n let mut codelldb_dir = std::env::current_exe()?;\n\n codelldb_dir.pop();\n\n\n\n // Load liblldb\n\n let liblldb_path = match matches.value_of(\"liblldb\") {\n", "file_path": "adapter/loader/main.rs", "rank": 9, "score": 185571.42426523732 }, { "content": "fn structs() {\n\n let tuple = (1, \"a\", 42.0);\n\n let tuple_ref = &(1, \"a\", 42.0);\n\n\n\n let tuple_struct = TupleStruct(3, \"xxx\", -3.0);\n\n let reg_struct = RegularStruct {\n\n a: 1,\n\n b: \"b\",\n\n c: 12.0,\n\n d: vec![12, 34, 56],\n\n };\n\n let reg_struct_ref = &reg_struct;\n\n\n\n reg_struct.print();\n\n\n\n println!(\"---\"); // #BP_structs\n\n println!(\"---\");\n\n println!(\"---\");\n\n}\n\n\n", "file_path": "debuggee/rust/types.rs", "rank": 10, "score": 180717.1722491855 }, { "content": "struct TupleStruct<'a>(i32, &'a str, f32);\n\n\n", "file_path": "debuggee/rust/types.rs", "rank": 11, "score": 180374.14256808686 }, { "content": "#[allow(unused)]\n\npub fn is_same_path(path1: &Path, path2: &Path) -> bool {\n\n if path1 == path2 {\n\n true\n\n } else {\n\n match (path1.canonicalize(), path2.canonicalize()) {\n\n (Ok(path1), Ok(path2)) => path1 == path2,\n\n _ => false,\n\n }\n\n }\n\n}\n\n\n", "file_path": "adapter/src/fsutil.rs", "rank": 12, "score": 175605.4771979737 }, { "content": "pub fn as_user_error<E: ToString>(err: E) -> UserError {\n\n UserError(err.to_string())\n\n}\n\n\n\npub type Error = Box<dyn std::error::Error>;\n\n\n\nmacro_rules! bail(($err:expr) => (return Err(From::from($err))));\n\n\n\nmacro_rules! log_errors(($e:expr) => (if let Err(err) = $e { error!(\"{}\", err); }));\n", "file_path": "adapter/src/error.rs", "rank": 13, "score": 172509.20187543056 }, { "content": "pub fn parse_hit_condition(expr: &str) -> Result<HitCondition, ()> {\n\n fn parser(input: Span) -> IResult<Span, HitCondition> {\n\n alt((\n\n map(preceded(tag(\"<=\"), preceded(space0, unsigned)), |n| HitCondition::LE(n)),\n\n map(preceded(tag(\"<\"), preceded(space0, unsigned)), |n| HitCondition::LT(n)),\n\n map(preceded(tag(\"==\"), preceded(space0, unsigned)), |n| HitCondition::EQ(n)),\n\n map(preceded(tag(\"=\"), preceded(space0, unsigned)), |n| HitCondition::EQ(n)),\n\n map(preceded(tag(\">=\"), preceded(space0, unsigned)), |n| HitCondition::GE(n)),\n\n map(preceded(tag(\">\"), preceded(space0, unsigned)), |n| HitCondition::GT(n)),\n\n map(preceded(tag(\"%\"), preceded(space0, unsigned)), |n| HitCondition::MOD(n)),\n\n map(unsigned, |n| HitCondition::GE(n)),\n\n ))(input)\n\n }\n\n\n\n match parser.parse(expr.trim()) {\n\n Ok((_, hc)) => Ok(hc),\n\n Err(_) => Err(()),\n\n }\n\n}\n\n\n\n///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////\n\n\n", "file_path": "adapter/src/expressions/hit_condition.rs", "rank": 14, "score": 169663.63094961343 }, { "content": "fn debug_descr<CPP>(f: &mut fmt::Formatter, cpp: CPP) -> fmt::Result\n\nwhere\n\n CPP: FnOnce(&mut SBStream) -> bool,\n\n{\n\n let mut descr = SBStream::new();\n\n if cpp(&mut descr) {\n\n match str::from_utf8(descr.data()) {\n\n Ok(s) => f.write_str(s),\n\n Err(_) => Err(fmt::Error),\n\n }\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n\n/////////////////////////////////////////////////////////////////////////////////////////////////////\n\n\n", "file_path": "adapter/deps/lldb/src/lib.rs", "rank": 15, "score": 165535.6717045342 }, { "content": "#[derive(Debug)]\n\nstruct Inner {\n\n flag: AtomicBool,\n\n receiver_count: AtomicU16,\n\n}\n\n#[derive(Debug)]\n\npub struct Sender(Arc<Inner>);\n\n\n\nimpl Sender {\n\n pub fn new() -> Self {\n\n Sender(Arc::new(Inner {\n\n flag: AtomicBool::new(false),\n\n receiver_count: AtomicU16::new(0),\n\n }))\n\n }\n\n\n\n pub fn subscribe(&self) -> Receiver {\n\n self.0.receiver_count.fetch_add(1, Ordering::Relaxed);\n\n Receiver(self.0.clone())\n\n }\n\n\n", "file_path": "adapter/src/cancellation.rs", "rank": 16, "score": 165434.54454028845 }, { "content": "type Error = Box<dyn std::error::Error>;\n\n\n\nmod terminal_agent;\n\n\n", "file_path": "adapter/loader/main.rs", "rank": 17, "score": 156217.65724591952 }, { "content": "// Initialize Python interface.\n\n// In order to maintain compatibility with Python 2 (in case we need to load an older liblldb), we eschew Python's C API,\n\n// instead preferring to interact with it via the `ctypes` module:\n\n// - Use LLDB's SBCommandInterpreter to import `codelldb` module and invoke `codelldb.initialize()`.\n\n// - Python code calls us back via `init_callback()` providing pointers to C ABI wrappers of the functions we need.\n\n// We stash these pointers and later call them directly, bypassing slow SBCommandInterpreter API.\n\n// - If any of the above fails, we declare Python scripting defunct and proceed in reduced functionality mode.\n\npub fn initialize(\n\n interpreter: SBCommandInterpreter,\n\n adapter_dir: &Path,\n\n console_stream: Option<std::fs::File>,\n\n) -> Result<(Box<PythonInterface>, mpsc::Receiver<EventBody>), Error> {\n\n let mut command_result = SBCommandReturnObject::new();\n\n\n\n // Import debugger.py into script interpreter's namespace.\n\n // This also adds our bin directory to sys.path, so we can import the rest of the modules below.\n\n let init_script = adapter_dir.join(\"debugger.py\");\n\n let command = format!(\"command script import '{}'\", init_script.to_str().unwrap());\n\n interpreter.handle_command(&command, &mut command_result, false);\n\n if !command_result.succeeded() {\n\n bail!(format!(\"{:?}\", command_result));\n\n }\n\n let (sender, receiver) = mpsc::channel(10);\n\n let interface = Box::new(PythonInterface {\n\n initialized: false,\n\n event_sender: sender,\n\n postinit_ptr: NotInitialized,\n", "file_path": "adapter/src/python.rs", "rank": 18, "score": 151923.64031426437 }, { "content": "// Parse expression type and preprocess it.\n\npub fn prepare(expression: &str, default_type: Expressions) -> PreparedExpression {\n\n let (expr, ty) = get_expression_type(expression, default_type);\n\n match ty {\n\n Expressions::Native => PreparedExpression::Native(expr.to_owned()),\n\n Expressions::Simple => PreparedExpression::Simple(preprocess_simple_expr(expr)),\n\n Expressions::Python => PreparedExpression::Python(preprocess_python_expr(expr)),\n\n }\n\n}\n\n\n", "file_path": "adapter/src/expressions/mod.rs", "rank": 19, "score": 151633.385542421 }, { "content": "pub fn qualified_ident(input: Span) -> IResult<Span, QIdent> {\n\n preceded(opt(terminated(tag(\"::\"), space0)), separated_list1(ws(tag(\"::\")), qident_segment))(input)\n\n}\n\n\n\n///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////\n\n\n", "file_path": "adapter/src/expressions/qualified_ident.rs", "rank": 20, "score": 149504.9979763039 }, { "content": "// Same as prepare(), but also parses formatting options at the end of expression,\n\n// for example, `value,x` to format value as hex or `ptr,[50]` to interpret `ptr` as an array of 50 elements.\n\npub fn prepare_with_format(\n\n expression: &str,\n\n default_type: Expressions,\n\n) -> Result<(PreparedExpression, Option<FormatSpec>), String> {\n\n let (expr, ty) = get_expression_type(expression, default_type);\n\n let (expr, format) = get_expression_format(expr)?;\n\n let pp_expr = match ty {\n\n Expressions::Native => PreparedExpression::Native(expr.to_owned()),\n\n Expressions::Simple => PreparedExpression::Simple(preprocess_simple_expr(expr)),\n\n Expressions::Python => PreparedExpression::Python(preprocess_python_expr(expr)),\n\n };\n\n Ok((pp_expr, format))\n\n}\n\n\n", "file_path": "adapter/src/expressions/mod.rs", "rank": 21, "score": 147412.7147968455 }, { "content": "pub fn get_expression_format<'a>(expr: &'a str) -> Result<(&'a str, Option<FormatSpec>), String> {\n\n fn array_spec(input: Span) -> IResult<Span, u32> {\n\n delimited(tag(\"[\"), unsigned, tag(\"]\"))(input)\n\n }\n\n\n\n if let Some(pos) = expr.rfind(',') {\n\n let spec = &expr[pos + 1..];\n\n let expr = &expr[..pos];\n\n if let Ok((\"\", n)) = array_spec.parse(spec) {\n\n return Ok((expr, Some(FormatSpec::Array(n))));\n\n } else if spec.len() == 1 {\n\n let f = match spec {\n\n \"c\" => lldb::Format::Char,\n\n \"h\" => lldb::Format::Hex,\n\n \"x\" => lldb::Format::Hex,\n\n \"o\" => lldb::Format::Octal,\n\n \"d\" => lldb::Format::Decimal,\n\n \"b\" => lldb::Format::Binary,\n\n \"f\" => lldb::Format::Float,\n\n \"p\" => lldb::Format::Pointer,\n", "file_path": "adapter/src/expressions/expression_format.rs", "rank": 22, "score": 144112.04061991133 }, { "content": "pub fn dummy() -> Receiver {\n\n Sender::new().subscribe()\n\n}\n\n\n\n\n", "file_path": "adapter/src/cancellation.rs", "rank": 23, "score": 142045.7558457875 }, { "content": "fn into_string_lossy(cstr: &CStr) -> String {\n\n cstr.to_string_lossy().into_owned()\n\n}\n", "file_path": "adapter/src/debug_session.rs", "rank": 24, "score": 139471.55689170075 }, { "content": "#[derive(Clone)]\n\nstruct RegularStruct<'a> {\n\n b: &'a str,\n\n a: i32,\n\n c: f32,\n\n d: Vec<u32>,\n\n}\n\n\n\nimpl RegularStruct<'_> {\n\n fn print(&self) {\n\n println!(\"{} {} {} {:?}\", self.a, self.b, self.c, self.d);\n\n }\n\n}\n\n\n\nimpl<'a> Drop for RegularStruct<'a> {\n\n fn drop(&mut self) {\n\n self.b = \"invalid\";\n\n self.a = 0;\n\n self.c = 0.0;\n\n self.d.clear();\n\n }\n\n}\n\n\n", "file_path": "debuggee/rust/types.rs", "rank": 25, "score": 134785.94533625792 }, { "content": "fn hashes() {\n\n let mut hash: HashMap<String, i32> = HashMap::default();\n\n hash.insert(\"Einar\".into(), 25);\n\n hash.insert(\"Olaf\".into(), 24);\n\n hash.insert(\"Harald\".into(), 12);\n\n hash.insert(\"Conan\".into(), 29);\n\n\n\n let set = hash.iter().map(|(name, age)| name.clone()).collect::<HashSet<String>>();\n\n\n\n println!(\"---\"); // #BP_hashes\n\n println!(\"---\");\n\n println!(\"---\");\n\n}\n\n\n", "file_path": "debuggee/rust/types.rs", "rank": 26, "score": 131235.32370620023 }, { "content": "fn arrays() {\n\n let array = [1, 2, 3, 4, 5];\n\n let slice = &array[..];\n\n let mut array2 = [1000, 2000, 3000, 4000, 5000];\n\n let mut_slice = &mut array2[..];\n\n let empty_vec = Vec::<i32>::new();\n\n let vec_int = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];\n\n let vec_str = vec![\"111\", \"2222\", \"3333\", \"4444\", \"5555\"];\n\n let vec_tuple = vec![(1, 2), (2, 3), (3, 4)];\n\n let large_vec: Vec<i32> = (0..20000).collect();\n\n\n\n println!(\"---\"); // #BP_arrays\n\n println!(\"---\");\n\n println!(\"---\");\n\n}\n\n\n", "file_path": "debuggee/rust/types.rs", "rank": 27, "score": 131235.32370620023 }, { "content": "fn main() {\n\n primitives();\n\n enums();\n\n structs();\n\n arrays();\n\n boxes();\n\n strings();\n\n hashes();\n\n misc();\n\n}\n", "file_path": "debuggee/rust/types.rs", "rank": 28, "score": 131235.32370620023 }, { "content": "fn boxes() {\n\n let reg_struct = RegularStruct {\n\n a: 1,\n\n b: \"b\",\n\n c: 12.0,\n\n d: vec![12, 34, 56],\n\n };\n\n\n\n let boxed = Box::new(\"boxed\");\n\n let rc_box = rc::Rc::new(reg_struct.clone());\n\n let rc_box2 = rc::Rc::new(reg_struct.clone());\n\n let rc_box2c = rc_box2.clone();\n\n let rc_box3 = rc::Rc::new(reg_struct.clone());\n\n let rc_weak = rc::Rc::downgrade(&rc_box3);\n\n let arc_box = sync::Arc::new(reg_struct.clone());\n\n let arc_weak = sync::Arc::downgrade(&arc_box);\n\n let mutex_box = sync::Mutex::new(reg_struct.clone());\n\n\n\n let rc_weak_dropped = rc::Rc::downgrade(&rc::Rc::new(reg_struct.clone()));\n\n let arc_weak_dropped = sync::Arc::downgrade(&sync::Arc::new(reg_struct.clone()));\n", "file_path": "debuggee/rust/types.rs", "rank": 29, "score": 131235.32370620023 }, { "content": "fn strings() {\n\n let empty_string = String::from(\"\");\n\n let string = String::from(\"A String\");\n\n let str_slice = \"String slice\";\n\n let wstr1 = \"Превед йожэг!\";\n\n let wstr2 = String::from(\"Ḥ̪͔̦̺E͍̹̯̭͜ C̨͙̹̖̙O̡͍̪͖ͅM̢̗͙̫̬E̜͍̟̟̮S̢̢̪̘̦!\");\n\n\n\n let cstring = std::ffi::CString::new(\"C String\").unwrap();\n\n let cstr = &cstring[..];\n\n\n\n let osstring = std::ffi::OsString::from(\"OS String\");\n\n let osstr = &osstring[..];\n\n\n\n let mut path_buf = path::PathBuf::new();\n\n path_buf.push(\"foo\");\n\n path_buf.push(\"bar\");\n\n let path = path_buf.as_path();\n\n\n\n let str_tuple = (\n\n string.clone(),\n", "file_path": "debuggee/rust/types.rs", "rank": 30, "score": 131235.32370620023 }, { "content": "fn misc() {\n\n let i32_ = 32;\n\n let f32_ = 42.0;\n\n let closure = move |x: i32| (x + i32_) as f32 * f32_;\n\n\n\n let class = PyKeywords {\n\n finally: 1,\n\n import: 2,\n\n lambda: 3,\n\n raise: 4,\n\n };\n\n\n\n println!(\"---\"); // #BP_misc\n\n println!(\"---\");\n\n println!(\"---\");\n\n}\n\n\n", "file_path": "debuggee/rust/types.rs", "rank": 31, "score": 131235.32370620023 }, { "content": "fn primitives() {\n\n let char_: char = 'A';\n\n let bool_: bool = true;\n\n\n\n let i8_: i8 = -8;\n\n let u8_: u8 = 8;\n\n let i16_: i16 = -16;\n\n let u16_: u16 = 16;\n\n let i32_: i32 = -32;\n\n let u32_: u32 = 32;\n\n let i64_: i64 = -64;\n\n let u64_: u64 = 64;\n\n let i128_: i128 = -128;\n\n let u128_: u128 = 128;\n\n let isize_: isize = -2;\n\n let usize_: usize = 2;\n\n\n\n let f32_: f32 = 3.1415926535;\n\n let f64_: f64 = 3.1415926535 * 2.0;\n\n\n\n let unit = ();\n\n\n\n println!(\"---\"); // #BP_primitives\n\n println!(\"---\");\n\n println!(\"---\");\n\n}\n\n\n", "file_path": "debuggee/rust/types.rs", "rank": 32, "score": 131235.32370620023 }, { "content": "fn enums() {\n\n let reg_enum1 = RegularEnum::A;\n\n let reg_enum2 = RegularEnum::B(100, 200);\n\n let reg_enum3 = RegularEnum::C {\n\n x: 11.35,\n\n y: 20.5,\n\n };\n\n let reg_enum_ref = &reg_enum3;\n\n\n\n let cstyle_enum1 = CStyleEnum::A;\n\n let cstyle_enum2 = CStyleEnum::B;\n\n\n\n let enc_enum1: EncodedEnum<&str> = EncodedEnum::Some(\"string\");\n\n let enc_enum2: EncodedEnum<&str> = EncodedEnum::Nothing;\n\n\n\n let opt_str1: Option<&str> = Some(\"string\");\n\n let opt_str2: Option<&str> = None;\n\n\n\n let result_ok: Result<&str, String> = Ok(\"ok\");\n\n let result_err: Result<&str, String> = Err(\"err\".into());\n", "file_path": "debuggee/rust/types.rs", "rank": 33, "score": 131235.32370620023 }, { "content": "struct PyKeywords {\n\n finally: i32,\n\n import: i32,\n\n lambda: i32,\n\n raise: i32,\n\n}\n\n\n", "file_path": "debuggee/rust/types.rs", "rank": 34, "score": 129299.92907914365 }, { "content": "#[test]\n\nfn test_adapter_data() {\n\n let addresses = &[10, 20, 23, 25, 30, 35, 41, 42, 50];\n\n let adapter_data = AdapterData {\n\n start: 10,\n\n end: 55,\n\n line_offsets: addresses.windows(2).map(|w| (w[1] - w[0]) as u32).collect(),\n\n };\n\n let addresses2 = DisassembledRange::lines_from_adapter_data(&adapter_data);\n\n assert_eq!(addresses, &addresses2[3..]);\n\n}\n", "file_path": "adapter/src/disassembly.rs", "rank": 35, "score": 126850.52616526186 }, { "content": "class ValueObjectConstResultImpl;\n", "file_path": "adapter/deps/lldb/include/lldb/lldb-forward.h", "rank": 36, "score": 126230.91892584055 }, { "content": "pub fn to_i64(h: Option<Handle>) -> i64 {\n\n match h {\n\n None => 0,\n\n Some(v) => v.get() as i64,\n\n }\n\n}\n\n\n", "file_path": "adapter/src/handles.rs", "rank": 37, "score": 123821.18663348409 }, { "content": "#[cfg(test)]\n\nfn test_pair(input: &str, expected: &str, preprocessor: impl Fn(&str) -> String) {\n\n {\n\n let prepr = preprocessor(input);\n\n assert_eq!(prepr, expected);\n\n }\n\n {\n\n let input = format!(\" {}\", input);\n\n let expected = format!(\" {}\", expected);\n\n let prepr = preprocessor(&input);\n\n assert_eq!(prepr, expected);\n\n }\n\n {\n\n let input = format!(\"{} \", input);\n\n let expected = format!(\"{} \", expected);\n\n let prepr = preprocessor(&input);\n\n assert_eq!(prepr, expected);\n\n }\n\n}\n\n\n", "file_path": "adapter/src/expressions/preprocess.rs", "rank": 38, "score": 122950.96997489748 }, { "content": "// Replaces identifiers that are invalid according to Python syntax in simple expressions:\n\n// - identifiers that happen to be Python keywords (e.g.`for`),\n\n// - qualified identifiers (e.g. `foo::bar::baz`),\n\n// - raw identifiers of the form $xxxxxx,\n\n// with access via `__frame_vars`, or `__getattr__()` (if prefixed by a dot).\n\n// For example, `for + foo::bar::baz + foo::bar::baz.class() + $SomeClass<int>::value` will be translated to\n\n// `__frame_vars[\"for\"] + __frame_vars[\"foo::bar::baz\"] +\n\n// __frame_vars[\"foo::bar::baz\"].__getattr__(\"class\") + __frame_vars[\"SomeClass<int>::value\"]`\n\npub fn preprocess_simple_expr(expr: &str) -> String {\n\n let mut pref_qident = pair(opt(char('.')), recognize(qualified_ident));\n\n let mut pref_eident = pair(opt(char('.')), escaped_ident);\n\n let handle_prefixed = |result: &mut String, (prefix, ident): (Option<char>, &str)| {\n\n if prefix.is_none() {\n\n write!(result, \"__frame_vars[\\\"{}\\\"]\", ident).unwrap();\n\n } else {\n\n write!(result, \".__getattr__(\\\"{}\\\")\", ident).unwrap();\n\n }\n\n };\n\n fn logical_keyword(input: Span) -> IResult<Span, Span> {\n\n alt((tag(\"and\"), tag(\"or\"), tag(\"not\")))(input)\n\n }\n\n\n\n let mut expr = expr;\n\n let mut result = String::new();\n\n loop {\n\n if let Ok((rest, s)) = python_string(expr) {\n\n result.push_str(s);\n\n expr = rest;\n", "file_path": "adapter/src/expressions/preprocess.rs", "rank": 39, "score": 121704.0974445543 }, { "content": "// Replaces variable placeholders in native Python expressions with access via __frame_vars,\n\n// or `__getattr__()` (if prefixed by a dot).\n\n// For example, `$var + 42` will be translated to `__frame_vars[\"var\"] + 42`.\n\npub fn preprocess_python_expr(expr: &str) -> String {\n\n let mut expr = expr;\n\n let mut result = String::new();\n\n let mut pref_eident = pair(opt(char('.')), escaped_ident);\n\n\n\n loop {\n\n if let Ok((rest, s)) = python_string(expr) {\n\n result.push_str(s);\n\n expr = rest;\n\n } else if let Ok((rest, (prefix, ident))) = pref_eident(expr) {\n\n if prefix.is_none() {\n\n write!(result, \"__frame_vars[\\\"{}\\\"]\", ident).unwrap();\n\n } else {\n\n write!(result, \".__getattr__(\\\"{}\\\")\", ident).unwrap();\n\n }\n\n expr = rest;\n\n } else {\n\n let mut chars = expr.chars();\n\n if let Some(ch) = chars.next() {\n\n result.push(ch);\n", "file_path": "adapter/src/expressions/preprocess.rs", "rank": 40, "score": 121704.0974445543 }, { "content": "// Every register is described in detail including its name, alternate name\n\n// (optional), encoding, size in bytes and the default display format.\n\nstruct RegisterInfo {\n\n const char *name; // Name of this register, can't be NULL\n\n const char *alt_name; // Alternate name of this register, can be NULL\n\n uint32_t byte_size; // Size in bytes of the register\n\n uint32_t byte_offset; // The byte offset in the register context data where\n\n // this register's value is found.\n\n // This is optional, and can be 0 if a particular RegisterContext does not\n\n // need to address its registers by byte offset.\n\n lldb::Encoding encoding; // Encoding of the register bits\n\n lldb::Format format; // Default display format\n\n uint32_t kinds[lldb::kNumRegisterKinds]; // Holds all of the various register\n\n // numbers for all register kinds\n\n uint32_t *value_regs; // List of registers (terminated with\n\n // LLDB_INVALID_REGNUM). If this value is not null,\n\n // all registers in this list will be read first, at\n\n // which point the value for this register will be\n\n // valid. For example, the value list for ah would be\n\n // eax (x86) or rax (x64).\n\n uint32_t *invalidate_regs; // List of registers (terminated with\n\n // LLDB_INVALID_REGNUM). If this value is not\n", "file_path": "adapter/deps/lldb/include/lldb/lldb-private-types.h", "rank": 41, "score": 120698.46210968832 }, { "content": "struct OptionValidator {\n\n virtual ~OptionValidator() {}\n\n virtual bool IsValid(Platform &platform,\n\n const ExecutionContext &target) const = 0;\n\n virtual const char *ShortConditionString() const = 0;\n\n virtual const char *LongConditionString() const = 0;\n\n};\n\n\n", "file_path": "adapter/deps/lldb/include/lldb/lldb-private-types.h", "rank": 42, "score": 120693.71355338133 }, { "content": "struct OptionDefinition {\n\n uint32_t usage_mask; // Used to mark options that can be used together. If (1\n\n // << n & usage_mask) != 0\n\n // then this option belongs to option set n.\n\n bool required; // This option is required (in the current usage level)\n\n const char *long_option; // Full name for this option.\n\n int short_option; // Single character for this option.\n\n int option_has_arg; // no_argument, required_argument or optional_argument\n\n OptionValidator *validator; // If non-NULL, option is valid iff\n\n // |validator->IsValid()|, otherwise always valid.\n\n OptionEnumValues enum_values; // If not empty, an array of enum values.\n\n uint32_t completion_type; // Cookie the option class can use to do define the\n\n // argument completion.\n\n lldb::CommandArgumentType argument_type; // Type of argument this option takes\n\n const char *usage_text; // Full text explaining what this options does and\n\n // what (if any) argument to\n\n // pass it.\n\n};\n\n\n\ntypedef struct type128 { uint64_t x[2]; } type128;\n\ntypedef struct type256 { uint64_t x[4]; } type256;\n\n\n\n} // namespace lldb_private\n\n\n\n#endif // #if defined(__cplusplus)\n\n\n\n#endif // liblldb_lldb_private_types_h_\n", "file_path": "adapter/deps/lldb/include/lldb/lldb-private-types.h", "rank": 43, "score": 120693.71355338133 }, { "content": "// Registers are grouped into register sets\n\nstruct RegisterSet {\n\n const char *name; // Name of this register set\n\n const char *short_name; // A short name for this register set\n\n size_t num_registers; // The number of registers in REGISTERS array below\n\n const uint32_t *registers; // An array of register indices in this set. The\n\n // values in this array are\n\n // *indices* (not register numbers) into a particular RegisterContext's\n\n // register array. For example, if eax is defined at index 4 for a\n\n // particular RegisterContext, eax would be included in this RegisterSet by\n\n // adding the value 4. Not by adding the value lldb_eax_i386.\n\n};\n\n\n", "file_path": "adapter/deps/lldb/include/lldb/lldb-private-types.h", "rank": 44, "score": 120693.71355338133 }, { "content": "struct OptionEnumValueElement {\n\n int64_t value;\n\n const char *string_value;\n\n const char *usage;\n\n};\n\n\n\nusing OptionEnumValues = llvm::ArrayRef<OptionEnumValueElement>;\n\n\n", "file_path": "adapter/deps/lldb/include/lldb/lldb-private-types.h", "rank": 45, "score": 116943.55049937137 }, { "content": "struct VecMapVisitor<K, V>(PhantomData<(K, V)>);\n\n\n\nimpl<'de, K, V> Visitor<'de> for VecMapVisitor<K, V>\n\nwhere\n\n K: Deserialize<'de>,\n\n V: Deserialize<'de>,\n\n{\n\n type Value = VecMap<K, V>;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a map\")\n\n }\n\n\n\n fn visit_map<M>(self, mut access: M) -> Result<Self::Value, M::Error>\n\n where\n\n M: MapAccess<'de>,\n\n {\n\n let mut vec = Vec::with_capacity(access.size_hint().unwrap_or(0));\n\n while let Some((key, value)) = access.next_entry()? {\n\n vec.push((key, value));\n", "file_path": "adapter/src/vec_map.rs", "rank": 46, "score": 114960.83792803466 }, { "content": "pub fn start_polling(event_listener: &SBListener) -> mpsc::Receiver<SBEvent> {\n\n let mut event_listener = event_listener.clone();\n\n let (sender, receiver) = mpsc::channel(1000);\n\n\n\n tokio::task::spawn(async move {\n\n let mut event = SBEvent::new();\n\n loop {\n\n let result = tokio::task::spawn_blocking(move || {\n\n event_listener.wait_for_event(1, &mut event);\n\n (event_listener, event)\n\n })\n\n .await\n\n .unwrap();\n\n\n\n event_listener = result.0;\n\n event = result.1;\n\n\n\n if event.is_valid() {\n\n match sender.try_send(event) {\n\n Ok(_) => {}\n", "file_path": "adapter/src/debug_event_listener.rs", "rank": 47, "score": 106931.12070713374 }, { "content": "fn get_expression_type<'a>(expr: &'a str, default_type: Expressions) -> (&'a str, Expressions) {\n\n if expr.starts_with(\"/nat \") {\n\n (&expr[5..], Expressions::Native)\n\n } else if expr.starts_with(\"/py \") {\n\n (&expr[4..], Expressions::Python)\n\n } else if expr.starts_with(\"/se \") {\n\n (&expr[4..], Expressions::Simple)\n\n } else {\n\n (expr, default_type)\n\n }\n\n}\n", "file_path": "adapter/src/expressions/mod.rs", "rank": 48, "score": 103164.20758688648 }, { "content": "pub trait IsValid {\n\n fn is_valid(&self) -> bool;\n\n\n\n /// If `self.is_valid()` is `true`, returns `Some(self)`, otherwise `None`.\n\n fn check(self) -> Option<Self>\n\n where\n\n Self: Sized,\n\n {\n\n if self.is_valid() {\n\n Some(self)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\n/////////////////////////////////////////////////////////////////////////////////////////////////////\n\n\n\nmod cfile;\n\nmod strings;\n", "file_path": "adapter/deps/lldb/src/lib.rs", "rank": 49, "score": 101921.89210249583 }, { "content": "#[cfg(windows)]\n\nfn get_raw_fd(stream: std::fs::File) -> usize {\n\n use std::os::windows::prelude::*;\n\n stream.into_raw_handle() as usize\n\n}\n\n\n", "file_path": "adapter/src/python.rs", "rank": 50, "score": 101004.07099893162 }, { "content": "fn ident(input: Span) -> IResult<Span, Span> {\n\n recognize(pair(alt((alpha1, tag(\"_\"))), many0_count(alt((alphanumeric1, tag(\"_\"))))))(input)\n\n}\n\n\n", "file_path": "adapter/src/expressions/qualified_ident.rs", "rank": 51, "score": 100935.80271852852 }, { "content": "// Recognize Python strings\n\nfn python_string(input: Span) -> IResult<Span, Span> {\n\n fn body(delim: &'static str) -> impl Fn(Span) -> IResult<Span, Span> {\n\n move |input| {\n\n recognize(many0_count(alt((\n\n recognize(pair(char('\\\\'), anychar)), //.\n\n recognize(none_of(delim)),\n\n ))))(input)\n\n }\n\n }\n\n recognize(alt((\n\n delimited(char('\\\"'), body(\"\\\"\"), char('\\\"')), //.\n\n delimited(char('\\''), body(\"\\'\"), char('\\'')),\n\n delimited(tag(\"r\\\"\"), is_not(\"\\\"\"), char('\"')),\n\n delimited(tag(\"r\\'\"), is_not(\"\\'\"), char('\\'')),\n\n )))(input)\n\n}\n\n\n", "file_path": "adapter/src/expressions/preprocess.rs", "rank": 52, "score": 100935.80271852852 }, { "content": "fn escaped_ident(input: Span) -> IResult<Span, Span> {\n\n preceded(\n\n tag(\"$\"),\n\n alt((\n\n recognize(qualified_ident), //\n\n delimited(tag(\"{\"), recognize(is_not(\"}\")), tag(\"}\")),\n\n )),\n\n )(input)\n\n}\n\n\n", "file_path": "adapter/src/expressions/preprocess.rs", "rank": 53, "score": 100935.80271852852 }, { "content": "fn template_param(input: Span) -> IResult<Span, QIdentParam> {\n\n match qualified_ident(input) {\n\n Ok((rest, result)) => Ok((rest, QIdentParam::QIdent(result))),\n\n Err(_) => match recognize(is_not(\"<,>\"))(input) {\n\n Ok((rest, result)) => Ok((rest, QIdentParam::Other(result.trim()))),\n\n Err(err) => Err(err),\n\n },\n\n }\n\n}\n\n\n", "file_path": "adapter/src/expressions/qualified_ident.rs", "rank": 54, "score": 95867.23353106796 }, { "content": "fn qident_segment(input: Span) -> IResult<Span, QIdentSegment> {\n\n let (rest, (ident, parameters)) = pair(ident, opt(preceded(space0, template_params)))(input)?;\n\n let parameters = match parameters {\n\n Some(parameters) => parameters,\n\n None => Vec::new(),\n\n };\n\n Ok((\n\n rest,\n\n QIdentSegment {\n\n ident,\n\n parameters,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "adapter/src/expressions/qualified_ident.rs", "rank": 55, "score": 95867.23353106796 }, { "content": "fn template_params(input: Span) -> IResult<Span, Vec<QIdentParam>> {\n\n let (rest, parameters) = delimited(tag(\"<\"), separated_list0(tag(\",\"), ws(template_param)), tag(\">\"))(input)?;\n\n Ok((rest, parameters))\n\n}\n\n\n", "file_path": "adapter/src/expressions/qualified_ident.rs", "rank": 56, "score": 92814.95366819369 }, { "content": "class SBCommandReturnObjectImpl;\n\n}\n\n\n\nnamespace lldb {\n\n\n", "file_path": "adapter/deps/lldb/include/lldb/API/SBCommandReturnObject.h", "rank": 57, "score": 91516.81124555063 }, { "content": "class DataBuffer;\n", "file_path": "adapter/deps/lldb/include/lldb/lldb-forward.h", "rank": 58, "score": 89949.71374203546 }, { "content": "class TypeImpl;\n", "file_path": "adapter/deps/lldb/include/lldb/lldb-forward.h", "rank": 59, "score": 89538.76150066004 }, { "content": "class StructuredDataImpl;\n", "file_path": "adapter/deps/lldb/include/lldb/lldb-forward.h", "rank": 60, "score": 87847.58930063323 }, { "content": "class TypeListImpl;\n", "file_path": "adapter/deps/lldb/include/lldb/lldb-forward.h", "rank": 61, "score": 87535.34342427787 }, { "content": "class TypeSummaryImpl;\n", "file_path": "adapter/deps/lldb/include/lldb/lldb-forward.h", "rank": 62, "score": 87535.34342427787 }, { "content": "class TypeCategoryImpl;\n", "file_path": "adapter/deps/lldb/include/lldb/lldb-forward.h", "rank": 63, "score": 87535.34342427787 }, { "content": "class TypeFormatImpl;\n", "file_path": "adapter/deps/lldb/include/lldb/lldb-forward.h", "rank": 64, "score": 87535.34342427787 }, { "content": "class TypeMemberImpl;\n", "file_path": "adapter/deps/lldb/include/lldb/lldb-forward.h", "rank": 65, "score": 87535.34342427787 }, { "content": "class TypeFilterImpl;\n", "file_path": "adapter/deps/lldb/include/lldb/lldb-forward.h", "rank": 66, "score": 87535.34342427787 }, { "content": "class ValueObjectConstResult;\n", "file_path": "adapter/deps/lldb/include/lldb/lldb-forward.h", "rank": 67, "score": 86026.62190688749 }, { "content": "export function char(ch: string): ValidatorFn {\n\n assert.equal(ch.length, 1);\n\n return v => parseInt(v.value) == ch.charCodeAt(0) || v.value == `'${ch}'`;\n", "file_path": "tests/testUtils.ts", "rank": 68, "score": 85914.26828671558 }, { "content": "class SBError(ctypes.Structure):\n\n _fields_ = [('_opaque', c_int64)]\n", "file_path": "adapter/codelldb.py", "rank": 69, "score": 85840.46790824554 }, { "content": "class TypeNameSpecifierImpl;\n", "file_path": "adapter/deps/lldb/include/lldb/lldb-forward.h", "rank": 70, "score": 85619.615722045 }, { "content": "class TypeEnumMemberImpl;\n", "file_path": "adapter/deps/lldb/include/lldb/lldb-forward.h", "rank": 71, "score": 85619.615722045 }, { "content": "class TypeMemberFunctionImpl;\n", "file_path": "adapter/deps/lldb/include/lldb/lldb-forward.h", "rank": 72, "score": 85619.615722045 }, { "content": "class ValueObjectConstResultChild;\n", "file_path": "adapter/deps/lldb/include/lldb/lldb-forward.h", "rank": 73, "score": 84184.23387686092 }, { "content": "class TypeEnumMemberListImpl;\n", "file_path": "adapter/deps/lldb/include/lldb/lldb-forward.h", "rank": 74, "score": 83785.94433468646 }, { "content": "fn main() {\n\n let target_os = env::var(\"CARGO_CFG_TARGET_OS\").unwrap();\n\n let weak_linkage = match env::var(\"CARGO_FEATURE_WEAK_LINKAGE\") {\n\n Ok(_) => true,\n\n Err(_) => false,\n\n };\n\n\n\n if weak_linkage {\n\n if target_os == \"linux\" {\n\n println!(\"cargo:rustc-cdylib-link-arg=-Wl,-Bstatic\");\n\n println!(\"cargo:rustc-cdylib-link-arg=-lstdc++\");\n\n println!(\"cargo:rustc-cdylib-link-arg=-Wl,-Bdynamic\");\n\n } else if target_os == \"macos\" {\n\n println!(\"cargo:rustc-cdylib-link-arg=-undefined\");\n\n println!(\"cargo:rustc-cdylib-link-arg=dynamic_lookup\");\n\n }\n\n } else {\n\n if target_os == \"linux\" || target_os == \"macos\" {\n\n #[rustfmt::skip]\n\n let origin = if target_os == \"linux\" { \"$ORIGIN\" } else { \"@loader_path\" };\n\n // Relative to adapter/\n\n println!(\"cargo:rustc-cdylib-link-arg=-Wl,-rpath,{}/../lldb/lib\", origin);\n\n // Relative to target/debug/deps/ - for `cargo test`\n\n println!(\"cargo:rustc-cdylib-link-arg=-Wl,-rpath,{}/../../../lldb/lib\", origin);\n\n }\n\n }\n\n}\n", "file_path": "adapter/build.rs", "rank": 75, "score": 83408.94786799664 }, { "content": "struct Ranges {\n\n pub by_handle: HashMap<Handle, Rc<DisassembledRange>>,\n\n pub by_address: Vec<Rc<DisassembledRange>>,\n\n}\n\npub struct AddressSpace {\n\n target: SBTarget,\n\n ranges: RefCell<Ranges>,\n\n}\n\n\n\nimpl AddressSpace {\n\n pub fn new(target: &SBTarget) -> AddressSpace {\n\n AddressSpace {\n\n target: target.clone(),\n\n ranges: RefCell::new(Ranges {\n\n by_handle: HashMap::new(),\n\n by_address: Vec::new(),\n\n }),\n\n }\n\n }\n\n\n", "file_path": "adapter/src/disassembly.rs", "rank": 76, "score": 82622.3827772855 }, { "content": "#[test]\n\nfn refcounts() {\n\n let sender = Sender::new();\n\n assert_eq!(sender.receiver_count(), 0);\n\n\n\n let recv1 = sender.subscribe();\n\n assert_eq!(sender.receiver_count(), 1);\n\n\n\n let recv2 = sender.subscribe();\n\n assert_eq!(sender.receiver_count(), 2);\n\n\n\n drop(recv1);\n\n assert_eq!(sender.receiver_count(), 1);\n\n\n\n let sender2 = sender.clone();\n\n let recv3 = sender2.subscribe();\n\n assert_eq!(sender.receiver_count(), 2);\n\n\n\n drop(recv2);\n\n drop(recv3);\n\n assert_eq!(sender.receiver_count(), 0);\n\n assert_eq!(sender2.receiver_count(), 0);\n\n}\n", "file_path": "adapter/src/cancellation.rs", "rank": 77, "score": 82135.19916704058 }, { "content": "#[test]\n\nfn evaluate() {\n\n use lldb::*;\n\n let interp = DEBUGGER.command_interpreter();\n\n let cwd = std::env::current_dir().unwrap(); // Should be \"build/adapter\"\n\n let (python, _) = initialize(interp, &cwd, None).unwrap();\n\n let context = SBExecutionContext::from_target(&DEBUGGER.dummy_target());\n\n let pycode = python.compile_code(\"2+2\", \"<string>\").unwrap();\n\n let result = python.evaluate(&pycode, true, &context);\n\n println!(\"result = {:?}\", result);\n\n let value = result.unwrap().value_as_signed(0);\n\n assert_eq!(value, 4);\n\n}\n", "file_path": "adapter/src/python.rs", "rank": 78, "score": 82135.19916704058 }, { "content": "#[test]\n\nfn test1() {\n\n println!(\"test1\");\n\n}\n\n\n", "file_path": "debuggee/rust/tests.rs", "rank": 79, "score": 82135.19916704058 }, { "content": "#[test]\n\nfn test2() {\n\n println!(\"test2\");\n\n}\n", "file_path": "debuggee/rust/tests.rs", "rank": 80, "score": 82135.19916704058 }, { "content": "#[test]\n\n#[allow(unused)]\n\nfn test1() {\n\n let mut handles = HandleTree::new();\n\n let a1 = handles.create(None, \"1\", 0xa1);\n\n let a2 = handles.create(None, \"2\", 0xa2);\n\n let a11 = handles.create(Some(a1), \"1.1\", 0xa11);\n\n let a12 = handles.create(Some(a1), \"1.2\", 0xa12);\n\n let a121 = handles.create(Some(a12), \"1.2.1\", 0xa121);\n\n let a21 = handles.create(Some(a2), \"2.1\", 0xa21);\n\n\n\n assert_eq!(handles.get(a1).unwrap(), &0xa1);\n\n assert_eq!(handles.get(a12).unwrap(), &0xa12);\n\n assert_eq!(handles.get(a121).unwrap(), &0xa121);\n\n\n\n handles.reset();\n\n let b1 = handles.create(None, \"1\", 0xb1);\n\n let b3 = handles.create(None, \"3\", 0xb3);\n\n let b11 = handles.create(Some(b1), \"1.1\", 0xb11);\n\n let b12 = handles.create(Some(b1), \"1.2\", 0xb12);\n\n let b13 = handles.create(Some(b1), \"1.3\", 0xb13);\n\n let b121 = handles.create(Some(b12), \"1.2.1\", 0xb121);\n", "file_path": "adapter/src/handles.rs", "rank": 81, "score": 82135.19916704058 }, { "content": "#[test]\n\n#[should_panic]\n\n#[allow(unused)]\n\nfn test2() {\n\n let mut handles = HandleTree::new();\n\n let h1 = handles.create(None, \"12345\", 12345);\n\n // Should panic because parent handle is invalid\n\n let h2 = handles.create(Some(Handle::new(h1.get() + 1).unwrap()), \"12345\", 12345);\n\n}\n", "file_path": "adapter/src/handles.rs", "rank": 82, "score": 82135.19916704058 }, { "content": "fn main() {\n\n println!(\"Build script\");\n\n}\n", "file_path": "debuggee/rust/build.rs", "rank": 83, "score": 82135.19916704058 }, { "content": "#[test]\n\nfn pypath() {\n\n use lldb::*;\n\n let interp = DEBUGGER.command_interpreter();\n\n let mut result = SBCommandReturnObject::new();\n\n let status = interp.handle_command(\"script import sys; print(sys.path)\", &mut result, false);\n\n println!(\"result = {:?}\", result.output());\n\n assert_eq!(status, ReturnStatus::SuccessFinishNoResult);\n\n}\n\n\n", "file_path": "adapter/src/python.rs", "rank": 84, "score": 82135.19916704058 }, { "content": "class LLDB_API SBError {\n\npublic:\n\n SBError();\n\n\n\n SBError(const lldb::SBError &rhs);\n\n\n\n ~SBError();\n\n\n\n const SBError &operator=(const lldb::SBError &rhs);\n\n\n\n const char *GetCString() const;\n\n\n\n void Clear();\n\n\n\n bool Fail() const;\n\n\n\n bool Success() const;\n\n\n\n uint32_t GetError() const;\n\n\n", "file_path": "adapter/deps/lldb/include/lldb/API/SBError.h", "rank": 85, "score": 82112.77310727697 }, { "content": "class LLDB_API SBData {\n\npublic:\n\n SBData();\n\n\n\n SBData(const SBData &rhs);\n\n\n\n const SBData &operator=(const SBData &rhs);\n\n\n\n ~SBData();\n\n\n\n uint8_t GetAddressByteSize();\n\n\n\n void SetAddressByteSize(uint8_t addr_byte_size);\n\n\n\n void Clear();\n\n\n\n explicit operator bool() const;\n\n\n\n bool IsValid();\n\n\n", "file_path": "adapter/deps/lldb/include/lldb/API/SBData.h", "rank": 86, "score": 82073.73542186097 }, { "content": " enum class Type { eTypeKeepSame, eTypeFormat, eTypeEnum };\n\n\n\n bool CopyOnWrite_Impl(Type);\n\n};\n\n\n\n} // namespace lldb\n\n\n\n#endif // LLDB_SBTypeFormat_h_\n", "file_path": "adapter/deps/lldb/include/lldb/API/SBTypeFormat.h", "rank": 87, "score": 81738.55696761717 }, { "content": "fn main() {\n\n let target_os = env::var(\"CARGO_CFG_TARGET_OS\").unwrap();\n\n let weak_linkage = match env::var(\"CARGO_FEATURE_WEAK_LINKAGE\") {\n\n Ok(_) => true,\n\n Err(_) => false,\n\n };\n\n\n\n // Generate C++ bindings\n\n let mut build_config = cpp_build::Config::new();\n\n build_config.include(\"include\");\n\n if weak_linkage {\n\n build_config.cpp_link_stdlib(None);\n\n }\n\n\n\n build_config.build(\"src/lib.rs\");\n\n for entry in fs::read_dir(\"src\").unwrap() {\n\n println!(\"cargo:rerun-if-changed={}\", entry.unwrap().path().display());\n\n }\n\n\n\n if target_os == \"windows\" {\n", "file_path": "adapter/deps/lldb/build.rs", "rank": 88, "score": 80924.05330890154 }, { "content": "fn finalize_reproducer() {\n\n if CREATING_REPRODUCER.load(Ordering::Acquire) {\n\n if let Some(path) = SBReproducer::path() {\n\n if SBReproducer::generate() {\n\n info!(\"Reproducer saved to {:?}\", path);\n\n } else {\n\n error!(\"finalize_reproducer: failed\");\n\n }\n\n }\n\n }\n\n}\n", "file_path": "adapter/src/lib.rs", "rank": 89, "score": 80924.05330890154 }, { "content": "#[cfg(unix)]\n\nfn hook_crashes() {\n\n extern \"C\" fn handler(sig: libc::c_int) {\n\n let sig_name = match sig {\n\n libc::SIGSEGV => \"SIGSEGV\",\n\n libc::SIGBUS => \"SIGBUS\",\n\n libc::SIGILL => \"SIGILL\",\n\n libc::SIGFPE => \"SIGFPE\",\n\n libc::SIGABRT => \"SIGABRT\",\n\n _ => unreachable!(),\n\n };\n\n let bt = backtrace::Backtrace::new();\n\n eprintln!(\"Received signal: {}\", sig_name);\n\n eprintln!(\"{:?}\", bt);\n\n finalize_reproducer();\n\n std::process::exit(255);\n\n }\n\n\n\n unsafe {\n\n libc::signal(libc::SIGSEGV, handler as usize);\n\n libc::signal(libc::SIGBUS, handler as usize);\n\n libc::signal(libc::SIGILL, handler as usize);\n\n libc::signal(libc::SIGFPE, handler as usize);\n\n libc::signal(libc::SIGABRT, handler as usize);\n\n }\n\n}\n\n\n", "file_path": "adapter/src/lib.rs", "rank": 90, "score": 80924.05330890154 }, { "content": "#[test]\n\nfn case_folder() {\n\n let mut folder = make_case_folder();\n\n assert_eq!(folder(\"Path\"), \"Path\");\n\n assert_eq!(folder(\"PATH\"), \"Path\");\n\n\n\n assert_eq!(folder(\"Foo\"), \"Foo\");\n\n assert_eq!(folder(\"foo\"), \"Foo\");\n\n}\n\n\n\n// #[cfg(unix)]\n\n// pub fn waitpid(pid: u32) -> Result<(), io::Error> {\n\n// use std::ptr;\n\n\n\n// unsafe {\n\n// if libc::waitpid(pid as libc::pid_t, ptr::null_mut(), 0) < 0 {\n\n// return Err(io::Error::last_os_error()).into();\n\n// }\n\n// }\n\n// Ok(())\n\n// }\n", "file_path": "adapter/src/platform.rs", "rank": 91, "score": 80924.05330890154 }, { "content": "#[cfg(windows)]\n\nfn hook_crashes() {}\n\n\n\nstatic CREATING_REPRODUCER: AtomicBool = AtomicBool::new(false);\n\n\n", "file_path": "adapter/src/lib.rs", "rank": 92, "score": 80924.05330890154 }, { "content": "#[test]\n\nfn test_sizeof() {\n\n // codelldb.py makes assumptions about sizes of these types:\n\n assert_eq!(mem::size_of::<SBError>(), 8);\n\n assert_eq!(mem::size_of::<SBExecutionContext>(), 16);\n\n assert_eq!(mem::size_of::<SBValue>(), 16);\n\n assert_eq!(mem::size_of::<SBModule>(), 16);\n\n assert_eq!(mem::size_of::<PyObject>(), 16);\n\n}\n\n\n\n#[cfg(test)]\n\nlazy_static::lazy_static! {\n\n static ref DEBUGGER: SBDebugger = {\n\n use lldb::*;\n\n std::env::remove_var(\"PYTHONHOME\");\n\n std::env::remove_var(\"PYTHONPATH\");\n\n SBDebugger::initialize();\n\n SBDebugger::create(false)\n\n };\n\n}\n\n\n", "file_path": "adapter/src/python.rs", "rank": 93, "score": 80924.05330890154 }, { "content": "#[test]\n\nfn test_normalize_path() {\n\n assert_eq!(normalize_path(\"/foo/bar\"), Path::new(\"/foo/bar\"));\n\n assert_eq!(normalize_path(\"foo/bar\"), Path::new(\"foo/bar\"));\n\n assert_eq!(normalize_path(\"/foo/bar/./baz/./../\"), Path::new(\"/foo/bar\"));\n\n assert_eq!(normalize_path(r\"c:\\foo\\bar/./baz/./../\"), Path::new(r\"c:\\foo\\bar\"));\n\n #[cfg(windows)]\n\n assert_eq!(normalize_path(r\"C:/QQQ/WWW/..\\..\\FOO/\\bar.baz\"), Path::new(r\"c:\\FOO/bar.baz\"));\n\n}\n", "file_path": "adapter/src/fsutil.rs", "rank": 94, "score": 79771.00573563772 }, { "content": "fn strong_linkage() {\n\n // Find CMakeCache\n\n let mut path = PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n let cmakecache = loop {\n\n let f = path.with_file_name(\"CMakeCache.txt\");\n\n if f.is_file() {\n\n break f;\n\n }\n\n if !path.pop() {\n\n println!(\"cargo:warning=Could not find CMakeCache.txt\");\n\n return;\n\n }\n\n };\n\n println!(\"cargo:rerun-if-changed={}\", cmakecache.display());\n\n let config = parse_cmakecache(&cmakecache);\n\n\n\n if let Some(value) = config.get(\"LLDB_LinkSearch\") {\n\n for path in value.split_terminator(';') {\n\n println!(\"cargo:rustc-link-search=native={}\", path);\n\n }\n", "file_path": "adapter/deps/lldb/build.rs", "rank": 95, "score": 79771.00573563772 } ]
Rust
src/once.rs
tiqwab/xv6-rust
525899393df10855a274ce0a9d4e5841aa032aeb
use core::cell::UnsafeCell; use core::fmt; use core::fmt::Formatter; use core::hint::unreachable_unchecked as unreachable; use core::sync::atomic::{spin_loop_hint as cpu_relax, AtomicUsize, Ordering}; pub(crate) struct Once<T> { state: AtomicUsize, data: UnsafeCell<Option<T>>, } impl<T: fmt::Debug> fmt::Debug for Once<T> { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self.try_get() { Some(s) => write!(f, "Once {{ data: ") .and_then(|()| s.fmt(f)) .and_then(|()| write!(f, "}}")), None => write!(f, "Once {{ <uninitialized> }}"), } } } unsafe impl<T: Send + Sync> Sync for Once<T> {} unsafe impl<T: Send> Send for Once<T> {} const INCOMPLETE: usize = 0x0; const RUNNING: usize = 0x01; const COMPLETE: usize = 0x2; const PANICKED: usize = 0x3; impl<T> Once<T> { pub(crate) const INIT: Self = Once { state: AtomicUsize::new(INCOMPLETE), data: UnsafeCell::new(None), }; pub(crate) const fn new() -> Once<T> { Self::INIT } fn force_get(&self) -> &T { match unsafe { &*self.data.get() }.as_ref() { None => unsafe { unreachable() }, Some(p) => p, } } pub fn call_once<F: FnOnce() -> T>(&self, builder: F) -> &T { let mut status = self.state.load(Ordering::SeqCst); if status == INCOMPLETE { status = self .state .compare_and_swap(INCOMPLETE, RUNNING, Ordering::SeqCst); let mut finish = Finish { state: &self.state, panicked: true, }; unsafe { *self.data.get() = Some(builder()) }; finish.panicked = false; status = COMPLETE; self.state.store(status, Ordering::SeqCst); return self.force_get(); } loop { match status { INCOMPLETE => unreachable!(), RUNNING => { cpu_relax(); status = self.state.load(Ordering::SeqCst) } PANICKED => panic!("Once has panicked"), COMPLETE => return self.force_get(), _ => unreachable!(), } } } pub(crate) fn try_get(&self) -> Option<&T> { match self.state.load(Ordering::SeqCst) { COMPLETE => Some(self.force_get()), _ => None, } } pub(crate) fn wait(&self) -> Option<&T> { loop { match self.state.load(Ordering::SeqCst) { INCOMPLETE => return None, RUNNING => cpu_relax(), COMPLETE => return Some(self.force_get()), PANICKED => panic!("Once has panicked"), _ => unreachable!(), } } } } struct Finish<'a> { state: &'a AtomicUsize, panicked: bool, } impl<'a> Drop for Finish<'a> { fn drop(&mut self) { if self.panicked { self.state.store(PANICKED, Ordering::SeqCst); } } }
use core::cell::UnsafeCell; use core::fmt; use core::fmt::Formatter; use core::hint::unreachable_unchecked as unreachable; use core::sync::atomic::{spin_loop_hint as cpu_relax, AtomicUsize, Ordering}; pub(crate) struct Once<T> { state: AtomicUsize, data: UnsafeCell<Option<T>>, } impl<T: fmt::Debug> fmt::Debug for Once<T> { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self.try_get() { Some(s) => write!(f, "Once {{ data: ") .and_then(|()| s.fmt(f)) .and_then(|()| write!(f, "}}")), None => write!(f, "Once {{ <uninitialized> }}"), } } } unsafe impl<T: Send + Sync> Sync for Once<T> {} unsafe impl<T: Send> Send for Once<T> {} const INCOMPLETE: usize = 0x0; const RUNNING: usize = 0x01; const COMPLETE: usize = 0x2; const PANICKED: usize = 0x3; impl<T> Once<T> { pub(crate) const INIT: Self = Once { state: AtomicUsize::new(INCOMPLETE), data: UnsafeCell::new(None), }; pub(crate) const fn new() -> Once<T> { Self::INIT }
pub fn call_once<F: FnOnce() -> T>(&self, builder: F) -> &T { let mut status = self.state.load(Ordering::SeqCst); if status == INCOMPLETE { status = self .state .compare_and_swap(INCOMPLETE, RUNNING, Ordering::SeqCst); let mut finish = Finish { state: &self.state, panicked: true, }; unsafe { *self.data.get() = Some(builder()) }; finish.panicked = false; status = COMPLETE; self.state.store(status, Ordering::SeqCst); return self.force_get(); } loop { match status { INCOMPLETE => unreachable!(), RUNNING => { cpu_relax(); status = self.state.load(Ordering::SeqCst) } PANICKED => panic!("Once has panicked"), COMPLETE => return self.force_get(), _ => unreachable!(), } } } pub(crate) fn try_get(&self) -> Option<&T> { match self.state.load(Ordering::SeqCst) { COMPLETE => Some(self.force_get()), _ => None, } } pub(crate) fn wait(&self) -> Option<&T> { loop { match self.state.load(Ordering::SeqCst) { INCOMPLETE => return None, RUNNING => cpu_relax(), COMPLETE => return Some(self.force_get()), PANICKED => panic!("Once has panicked"), _ => unreachable!(), } } } } struct Finish<'a> { state: &'a AtomicUsize, panicked: bool, } impl<'a> Drop for Finish<'a> { fn drop(&mut self) { if self.panicked { self.state.store(PANICKED, Ordering::SeqCst); } } }
fn force_get(&self) -> &T { match unsafe { &*self.data.get() }.as_ref() { None => unsafe { unreachable() }, Some(p) => p, } }
function_block-full_function
[ { "content": "/// Modify mappings in kern_pgdir to support SMP\n\n/// - Map the per-CPU stacks in the region [KSTACKTOP-PTSIZE, KSTACKTOP)\n\nfn mem_init_mp(kern_pgdir: &mut PageDirectory, allocator: &mut PageAllocator) {\n\n // Map per-CPU stacks starting at KSTACKTOP, for up to 'NCPU' CPUs.\n\n //\n\n // For CPU i, use the physical memory that 'percpu_kstacks[i]' refers\n\n // to as its kernel stack. CPU i's kernel stack grows down from virtual\n\n // address kstacktop_i = KSTACKTOP - i * (KSTKSIZE + KSTKGAP), and is\n\n // divided into two pieces, just like the single stack you set up in\n\n // mem_init:\n\n // * [kstacktop_i - KSTKSIZE, kstacktop_i)\n\n // -- backed by physical memory\n\n // * [kstacktop_i - (KSTKSIZE + KSTKGAP), kstacktop_i - KSTKSIZE)\n\n // -- not backed; so if the kernel overflows its stack,\n\n // it will fault rather than overwrite another CPU's stack.\n\n // Known as a \"guard page\".\n\n // Permissions: kernel RW, user NONE\n\n\n\n for i in 0..MAX_NUM_CPU {\n\n let start_va = VirtAddr(KSTACKTOP - (KSTKSIZE + KSTKGAP) * (i as u32) - KSTKSIZE);\n\n let start_pa = unsafe { VirtAddr(&PERCPU_KSTACKS[i] as *const _ as u32).to_pa() };\n\n kern_pgdir.boot_map_region(\n", "file_path": "src/pmap.rs", "rank": 0, "score": 131759.39109564264 }, { "content": "/// Look up and return the inode for a path name.\n\n/// If does_want_parent == true, return the inode for the parent and copy the final\n\n/// path element into name, which must have room for DIRSIZ bytes.\n\n/// Must be called inside a transaction since it calls iput().\n\nfn namex(mut path: *const u8, does_want_parent: bool, name: *mut u8) -> Option<Arc<RwLock<Inode>>> {\n\n let mut ip: Arc<RwLock<Inode>>;\n\n\n\n unsafe {\n\n if *path == '/' as u8 {\n\n ip = iget(ROOT_DEV, ROOT_INUM);\n\n } else {\n\n let cur_env = env::cur_env().unwrap();\n\n ip = idup(cur_env.get_cwd())\n\n }\n\n\n\n loop {\n\n path = skip_elem(path, name);\n\n if path.is_null() {\n\n break;\n\n }\n\n\n\n let mut inode = ilock(&ip);\n\n\n\n if !inode.is_dir() {\n", "file_path": "src/fs.rs", "rank": 1, "score": 119175.45255560821 }, { "content": "fn set_mask_8259a(new_mask: u16, mut mask: MutexGuard<u16>) {\n\n *mask = new_mask;\n\n if !DID_INIT.load(Ordering::Acquire) {\n\n return;\n\n }\n\n x86::outb(IO_MASTER_DATA, new_mask as u8);\n\n x86::outb(IO_SLAVE_DATA, (new_mask >> 8) as u8);\n\n print!(\"enabled interrupts:\");\n\n for i in 0..16 {\n\n if !new_mask & (1 << i) != 0 {\n\n print!(\" {}\", i);\n\n }\n\n }\n\n println!();\n\n}\n", "file_path": "src/picirq.rs", "rank": 2, "score": 116793.74576505853 }, { "content": "fn do_nothing_write(_inode: &Inode, _buf: *const u8, _count: usize) -> i32 {\n\n 0\n\n}\n\n\n\nunsafe impl Sync for DevSw {}\n\nunsafe impl Send for DevSw {}\n\n\n\nstatic DEV_SW: Once<[Option<DevSw>; NDEV]> = Once::new();\n\n\n\npub(crate) fn get_dev_sw(idx: usize) -> Option<&'static DevSw> {\n\n let dev_sw = DEV_SW.call_once(|| {\n\n let mut res = [None; NDEV];\n\n\n\n res[CONSOLE] = Some(DevSw {\n\n read: Box::new(console::console_read),\n\n write: Box::new(console::console_write),\n\n });\n\n\n\n res\n\n });\n\n\n\n dev_sw.get(idx).and_then(|sw_opt| sw_opt.as_ref())\n\n}\n", "file_path": "src/device.rs", "rank": 3, "score": 112807.43001352283 }, { "content": "fn sys_write(fd: FileDescriptor, buf: *const u8, len: usize) -> i32 {\n\n match env::cur_env_mut().unwrap().fd_get(fd) {\n\n None => SysError::IllegalFileDescriptor.err_no(),\n\n Some(ent) => {\n\n let mut f = ent.file.write();\n\n match f.write(buf, len) {\n\n Err(err) => err.err_no(),\n\n Ok(cnt) => cnt as i32,\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/syscall.rs", "rank": 4, "score": 110233.54393089723 }, { "content": "// This MUST be initialized first with `init()`\n\nstruct KernelPageDirectory(*mut PageDirectory);\n\n// Get the lock of KERN_PGDIR first if you use both of KERN_PGDIR and PAGE_ALLOCATOR.\n\nstatic KERN_PGDIR: Mutex<KernelPageDirectory> = Mutex::new(KernelPageDirectory(null_mut()));\n\n\n\nunsafe impl Send for KernelPageDirectory {}\n\nunsafe impl Sync for KernelPageDirectory {}\n\n\n\nimpl KernelPageDirectory {\n\n fn init(&mut self, pgdir: *mut PageDirectory) {\n\n self.0 = pgdir;\n\n }\n\n\n\n fn paddr(&self) -> PhysAddr {\n\n VirtAddr(self.0 as u32).to_pa()\n\n }\n\n}\n\n\n\nimpl Deref for KernelPageDirectory {\n\n type Target = PageDirectory;\n\n\n", "file_path": "src/pmap.rs", "rank": 5, "score": 110053.17866238645 }, { "content": "fn commit(log: &mut Log) {\n\n if log.lh.n > 0 {\n\n write_log(log); // write modified blocks from cache to log\n\n write_head(log); // write header to disk -- the real commit\n\n install_trans(log); // now install writes to home locations\n\n log.lh.n = 0;\n\n write_head(log); // erase the transaction from the log\n\n }\n\n}\n\n\n", "file_path": "src/log.rs", "rank": 6, "score": 109359.38533287351 }, { "content": "// Truncate inode (discard contents).\n\n// Only called when the inode has no links\n\n// to it (no directory entries referring to it)\n\n// and has no in-memory reference to it (is\n\n// not an open file or current directory).\n\nfn itrunc(inode: &mut Inode) {\n\n for i in 0..NDIRECT {\n\n if inode.addrs[i] > 0 {\n\n bfree(inode.dev, inode.addrs[i]);\n\n inode.addrs[i] = 0;\n\n }\n\n }\n\n\n\n if inode.addrs[NDIRECT] > 0 {\n\n // there are indirect inodes too.\n\n let mut bcache = buf::buf_cache();\n\n let mut bp = bcache.get(inode.dev, inode.addrs[NDIRECT]);\n\n bp.read();\n\n\n\n let a = bp.data().as_ptr().cast::<u32>();\n\n for i in 0..NINDIRECT {\n\n let inum = unsafe { *a.add(i) };\n\n if inum > 0 {\n\n bfree(inode.dev, inum);\n\n }\n", "file_path": "src/fs.rs", "rank": 7, "score": 109359.38533287351 }, { "content": "fn do_nothing_read(_inode: &Inode, _buf: *mut u8, _count: usize) -> Option<i32> {\n\n Some(0)\n\n}\n\n\n", "file_path": "src/device.rs", "rank": 8, "score": 108541.75338959119 }, { "content": "fn load_from_disk(mut dst: VirtAddr, inode: &mut Inode, mut off: u32, mut remain_sz: u32) {\n\n while remain_sz > 0 {\n\n let sz = cmp::min(PGSIZE, remain_sz);\n\n if fs::readi(inode, dst.as_mut_ptr(), off, sz) != Some(sz) {\n\n panic!(\"load_from_disk: failed to readi\");\n\n }\n\n dst += sz;\n\n off += sz;\n\n remain_sz -= sz;\n\n }\n\n}\n\n\n\npub(crate) fn exec(path: *const u8, argv: &[*const u8], env: &mut Env) -> Result<(), SysError> {\n\n // check path and return error without changing pgdir if path is illegal.\n\n let ip = fs::namei(path)\n\n .into_result()\n\n .map_err(|_| SysError::InvalidArg)?;\n\n let mut inode = fs::ilock(&ip);\n\n\n\n // Allocate and set up the page directory for this environment.\n", "file_path": "src/env.rs", "rank": 9, "score": 107337.17668209237 }, { "content": "/// Check a system call argument for path.\n\n/// It should be in user space and less than MAX_CMD_ARG_LEN.\n\n/// If check fails, the functino doesn't return.\n\nfn path_check(arg: *const u8) {\n\n let curenv = env::cur_env_mut().expect(\"curenv should be exist\");\n\n let len = util::strnlen(arg, MAX_PATH_LEN + 1);\n\n if len > MAX_PATH_LEN {\n\n let env_table = env::env_table();\n\n env::env_destroy(curenv.get_env_id(), env_table);\n\n }\n\n env::user_mem_assert(curenv, VirtAddr(arg as u32), len, 0);\n\n}\n\n\n\n/// Dispatched to the correct kernel function, passing the arguments.\n\npub(crate) unsafe fn syscall(syscall_no: u32, a1: u32, a2: u32, a3: u32, a4: u32, a5: u32) -> i32 {\n\n if syscall_no == SYS_CPUTS {\n\n // SYS_CPUTS is deprecated, use SYS_WRITE instead.\n\n let raw_s = a1 as *const u8;\n\n let len = a2 as usize;\n\n let curenv = env::cur_env_mut().expect(\"curenv should exist\");\n\n env::user_mem_assert(curenv, VirtAddr(raw_s as u32), len, 0);\n\n sys_write(FileDescriptor(1), raw_s, len)\n\n } else if syscall_no == SYS_EXIT {\n", "file_path": "src/syscall.rs", "rank": 10, "score": 106963.64485816518 }, { "content": "/// Read the log header from disk into the in-memory log header\n\nfn read_head(log: &mut Log) {\n\n let mut bcache = buf::buf_cache();\n\n\n\n let buf = bcache.get(log.dev, log.start as u32);\n\n\n\n let lh_on_disk = unsafe {\n\n let ptr = buf.data_mut().as_mut_ptr().cast::<LogHeader>();\n\n &mut *ptr\n\n };\n\n\n\n log.lh.init(lh_on_disk);\n\n\n\n bcache.release(buf);\n\n}\n\n\n", "file_path": "src/log.rs", "rank": 11, "score": 106845.94472972251 }, { "content": "fn trap_dispatch(tf: &mut Trapframe) {\n\n // Handle processor exceptions.\n\n if tf.tf_trapno == (IRQ_OFFSET + IRQ_TIMER) as u32 {\n\n lapic::eoi();\n\n } else if tf.tf_trapno == (IRQ_OFFSET + IRQ_KBD) as u32 {\n\n console::console_intr();\n\n } else if tf.tf_trapno == (IRQ_OFFSET + IRQ_IDE) as u32 {\n\n panic!(\"unexpected interrupt from IDE\");\n\n } else if tf.tf_trapno == T_SYSCALL {\n\n unsafe {\n\n let ret = syscall::syscall(\n\n tf.tf_regs.reg_eax,\n\n tf.tf_regs.reg_edx,\n\n tf.tf_regs.reg_ecx,\n\n tf.tf_regs.reg_ebx,\n\n tf.tf_regs.reg_edi,\n\n tf.tf_regs.reg_esi,\n\n );\n\n tf.tf_regs.reg_eax = ret as u32;\n\n }\n", "file_path": "src/trap.rs", "rank": 12, "score": 106845.94472972251 }, { "content": "fn recover_from_log(log: &mut Log) {\n\n read_head(log);\n\n install_trans(log); // if committed, copy from log to disk\n\n log.lh.n = 0;\n\n write_head(log); // clear the log\n\n}\n\n\n\n/// Caller has modified b->data and is done with the buffer.\n\n/// Record the block number and pin in the cache with B_DIRTY.\n\n/// commit()/write_log() will do the disk write.\n\n///\n\n/// log_write() replaces bwrite(); a typical use is:\n\n/// bp = bread(...)\n\n/// modify bp->data[]\n\n/// log_write(bp)\n\n/// brelse(bp)\n\npub(crate) fn log_write(buf: &mut BufCacheHandler) {\n\n let mut log = get_log();\n\n\n\n if log.lh.n >= LOG_SIZE || log.lh.n >= log.size - 1 {\n", "file_path": "src/log.rs", "rank": 13, "score": 106845.94472972251 }, { "content": "/// Restores the register values in the Trapframe with the 'iret' instruction.\n\n/// This exits the kernel and starts executing some environment's code.\n\n///\n\n/// This function does not return.\n\nfn env_pop_tf(tf: *const Trapframe) -> ! {\n\n unsafe {\n\n llvm_asm!(\n\n \"movl $0, %esp; \\\n\n popal; \\\n\n popl %es; \\\n\n popl %ds; \\\n\n addl $1, %esp; \\\n\n iret\"\n\n : : \"rmi\" (tf), \"i\" (0x8) : \"memory\" : \"volatile\"\n\n );\n\n }\n\n\n\n panic!(\"iret failed\")\n\n}\n\n\n\n/// Context switch from curenv to env e.\n\n/// Note: if this is the first call to env_run, curenv is NULL.\n\n/// Note: This function unlock a passed MutexGuard<ENV_TABLE>.\n\n///\n", "file_path": "src/env.rs", "rank": 14, "score": 104616.88608304589 }, { "content": "/// Return inode pointer in the block.\n\n/// Assume that a passed block is calculated correctly by block_for_inode.\n\nfn ref_to_inode(inum: u32, bp: &mut BufCacheHandler) -> &mut DInode {\n\n let data = bp.data_mut().as_mut_ptr();\n\n let dip = data.cast::<DInode>();\n\n unsafe { &mut *dip.add((inum as usize) % IPB) }\n\n}\n\n\n\n/// Allocate an inode on device dev.\n\npub(crate) fn ialloc(dev: u32, typ: InodeType, major: u16, minor: u16) -> Arc<RwLock<Inode>> {\n\n let sb = superblock::get();\n\n\n\n for inum in 1..(sb.ninodes) {\n\n let mut bcache = buf::buf_cache();\n\n let mut bp = bcache.get(dev, block_for_inode(inum, sb));\n\n bp.read();\n\n\n\n let dinode = ref_to_inode(inum, &mut bp);\n\n if dinode.typ == InodeType::Empty {\n\n // a free node\n\n unsafe {\n\n util::memset(\n", "file_path": "src/fs.rs", "rank": 15, "score": 100874.96586474398 }, { "content": "pub fn mem_init() {\n\n // Find out how much memory the machine has (npages & npages_basemem).\n\n let (npages, npages_basemem) = i386_detect_memory();\n\n\n\n // create initial page directory.\n\n let bss_end = VirtAddr(unsafe { &end as *const _ as u32 }).round_up(PGSIZE as usize);\n\n unsafe {\n\n println!(\n\n \"end: {:p}, bss_end(end rounded up by page size): 0x{:x}\",\n\n &end, bss_end.0\n\n )\n\n };\n\n let mut boot_allocator = BootAllocator::new(bss_end);\n\n let kern_pgdir_va = boot_allocator.alloc(PGSIZE);\n\n let mut kern_pgdir = KERN_PGDIR.lock();\n\n kern_pgdir.init(kern_pgdir_va.0 as *mut PageDirectory);\n\n println!(\"kern_pgdir: 0x{:x}\", kern_pgdir_va.0);\n\n // memset(kern_pgdir, 0, PGSIZE);\n\n\n\n // Allocate an array of npages 'struct PageInfo's and store it in 'pages'.\n", "file_path": "src/pmap.rs", "rank": 16, "score": 99230.24458742022 }, { "content": "/// This is just for debug.\n\nfn print_file_name(label: &str, p: *const u8) {\n\n let mut buf = [0 as u8; DIR_SIZ];\n\n for i in 0..(util::strnlen(p, DIR_SIZ)) {\n\n buf[i] = unsafe { *p.add(i) };\n\n }\n\n let sli = core::str::from_utf8(&buf).unwrap();\n\n println!(\"{}: {}\", label, sli);\n\n}\n\n\n", "file_path": "src/fs.rs", "rank": 17, "score": 95309.36762835551 }, { "content": "/// Allocate a zeroed disk block.\n\nfn balloc(dev: u32, bcache: &mut BufCache) -> u32 {\n\n let sb = superblock::get();\n\n\n\n for blockno in 0..sb.size {\n\n let mut bp = bcache.get(dev, block_for_bitmap(blockno, sb));\n\n bp.read();\n\n\n\n let mut bi = 0;\n\n while bi < BPB && blockno + (bi as u32) < sb.size {\n\n let m = 1 << (bi % 8);\n\n // is block free?\n\n if bp.data()[bi / 8] & m == 0 {\n\n bp.data_mut()[bi / 8] |= m; // mark block in use\n\n log::log_write(&mut bp);\n\n bcache.release(bp);\n\n bzero(dev, blockno + (bi as u32), bcache);\n\n #[cfg(feature = \"debug\")]\n\n println!(\"[balloc] allocated blockno {}\", blockno + (bi as u32));\n\n return blockno + (bi as u32);\n\n }\n\n bi += 1;\n\n }\n\n\n\n bcache.release(bp);\n\n }\n\n\n\n panic!(\"balloc: out of blocks\");\n\n}\n\n\n", "file_path": "src/fs.rs", "rank": 18, "score": 90536.51458129546 }, { "content": "fn log_init(dev: u32) -> Log {\n\n if mem::size_of::<LogHeader>() >= BLK_SIZE {\n\n panic!(\"log_init: too big logheader\");\n\n }\n\n\n\n let sb = superblock::get();\n\n let mut log = Log::new(sb.log_start as usize, sb.nlog as usize, dev);\n\n\n\n recover_from_log(&mut log);\n\n\n\n log\n\n}\n", "file_path": "src/log.rs", "rank": 19, "score": 88292.36759263382 }, { "content": "/// Zero a block\n\nfn bzero(dev: u32, blockno: u32, bcache: &mut BufCache) {\n\n let bp = bcache.get(dev, blockno);\n\n unsafe { util::memset(VirtAddr(bp.data().as_ptr() as u32), 0, BLK_SIZE) };\n\n bcache.release(bp);\n\n}\n\n\n", "file_path": "src/fs.rs", "rank": 20, "score": 86413.06246392793 }, { "content": "fn main() {\n\n // TODO: Is it possible to specify filename with wildcard such as '*.c'?\n\n cc::Build::new()\n\n .file(\"src/entry.S\")\n\n .file(\"src/entrypgdir.c\")\n\n .file(\"src/vectors.S\")\n\n .file(\"src/alltraps.S\")\n\n .file(\"src/mpentry.S\")\n\n .file(\"src/kbdmap.c\")\n\n .include(\"inc\")\n\n .compile(\"xv6rustkernel\");\n\n}\n\n\n", "file_path": "build.rs", "rank": 21, "score": 70583.01221696331 }, { "content": "struct Input {\n\n buf: [u8; INPUT_BUF],\n\n r: usize, // read index\n\n w: usize, // write index\n\n e: usize, // edit index\n\n}\n\n\n\nimpl Input {\n\n const fn new() -> Input {\n\n Input {\n\n buf: [0; INPUT_BUF],\n\n r: 0,\n\n w: 0,\n\n e: 0,\n\n }\n\n }\n\n}\n\n\n\nstatic INPUT: Mutex<Input> = Mutex::new(Input::new());\n\n\n", "file_path": "src/console.rs", "rank": 22, "score": 69609.2705369986 }, { "content": "#[repr(C, packed)]\n\nstruct Mp {\n\n signature: [u8; 4], // \"_MP_\"\n\n phys_addr: PhysAddr, // the physical address of the beginning of the MP configuration table.\n\n length: u8, // the length of the floating pointer structure table in paragraph (16-byte) units. This is always 0x01.\n\n spec_rev: u8, // the version number of the MP spec supported.\n\n checksum: u8, // all bytes must add up to 0.\n\n typ: u8, // MP system config type\n\n imcrp: u8, // set if IMCR is present and PIC Mode is implemented\n\n reserved: [u8; 3],\n\n}\n\n\n\nimpl Mp {\n\n /// Search for the MP Floating Pointer Structure, which according to\n\n /// MP 4 is in one of the following three locations:\n\n /// 1) in the first KB of the EBDA;\n\n /// 2) if there is no EBDA, in the last KB of system base memory;\n\n /// 3) in the BIOS ROM between 0xF0000 and 0xFFFFF.\n\n unsafe fn new() -> Option<&'static Mp> {\n\n assert_eq!(mem::size_of::<Mp>(), 16);\n\n\n", "file_path": "src/mpconfig.rs", "rank": 23, "score": 69609.2705369986 }, { "content": "struct Log {\n\n start: usize,\n\n size: usize,\n\n outstanding: usize, // how many FS sys calls are executing\n\n // committing: bool, // true if someone is in commit(). Please wait\n\n dev: u32,\n\n lh: LogHeader,\n\n}\n\n\n\nimpl Log {\n\n /// Create a new Log.\n\n fn new(start: usize, size: usize, dev: u32) -> Log {\n\n Log {\n\n start,\n\n size,\n\n outstanding: 0,\n\n dev,\n\n lh: LogHeader::empty(),\n\n }\n\n }\n\n}\n\n\n\nstatic LOG: Once<Mutex<Log>> = Once::new();\n\n\n", "file_path": "src/log.rs", "rank": 24, "score": 69609.2705369986 }, { "content": "fn create(\n\n path: *const u8,\n\n typ: InodeType,\n\n major: u16,\n\n minor: u16,\n\n) -> Result<Arc<RwLock<Inode>>, SysError> {\n\n let mut name = [0; DIR_SIZ];\n\n\n\n let dp = fs::nameiparent(path, name.as_mut_ptr())\n\n .into_result()\n\n .map_err(|_| {\n\n log::end_op();\n\n SysError::InvalidArg\n\n })?;\n\n\n\n let mut dir_inode = fs::ilock(&dp);\n\n\n\n let ip = fs::dir_lookup_with_name(&mut dir_inode, name.as_ptr(), null_mut());\n\n let ip = match ip {\n\n Some(p) => {\n", "file_path": "src/sysfile.rs", "rank": 25, "score": 68885.95005600045 }, { "content": "// Contents of the header block, used for both the on-disk header block\n\n// and to keep track in memory of logged block# before commit.\n\n//\n\n// This is stored at the top of log blocks of disk\n\nstruct LogHeader {\n\n n: usize,\n\n block: [u32; LOG_SIZE],\n\n}\n\n\n\nimpl LogHeader {\n\n /// Create a new empty LogHeader.\n\n /// It should be initialized with recover_from_log.\n\n const fn empty() -> LogHeader {\n\n LogHeader {\n\n n: 0,\n\n block: [0; LOG_SIZE],\n\n }\n\n }\n\n\n\n fn init(&mut self, lh: &LogHeader) {\n\n *self = LogHeader {\n\n n: lh.n,\n\n block: lh.block,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/log.rs", "rank": 26, "score": 68036.5025022018 }, { "content": "// FIXME: how to represent it in rust way\n\n// This MUST be protected by Mutex\n\nstruct PageAllocator {\n\n page_free_list: *mut PageInfo,\n\n pages: *mut PageInfo,\n\n}\n\n\n", "file_path": "src/pmap.rs", "rank": 27, "score": 68032.6824273753 }, { "content": "#[repr(C, packed)]\n\nstruct MpProc {\n\n typ: u8, // entry type (0 for Processor Entries)\n\n apicid: u8, // local APIC id\n\n version: u8, // local APIC version\n\n flags: u8, // CPU flags\n\n signature: [u8; 4], // CPU signature\n\n feature: u32, // feature flags from CPUID instruction\n\n reserved: [u8; 8],\n\n}\n\n\n\nimpl MpProc {\n\n fn is_bsp(&self) -> bool {\n\n self.flags & MPPROC_FLAGS_BP != 0\n\n }\n\n}\n\n\n\nunsafe fn check_sum<T>(mp: *const T, size: usize) -> bool {\n\n // checksum\n\n // Rust detects overflow, so accumulates as u32.\n\n let p = mp.cast::<u8>();\n", "file_path": "src/mpconfig.rs", "rank": 28, "score": 68032.6824273753 }, { "content": "#[repr(align(4096))]\n\n#[repr(C)]\n\nstruct PageTable {\n\n entries: [PTE; NPTENTRIES],\n\n}\n\n\n\nimpl Index<usize> for PageTable {\n\n type Output = PTE;\n\n fn index(&self, index: usize) -> &Self::Output {\n\n &self.entries[index]\n\n }\n\n}\n\n\n\nimpl IndexMut<usize> for PageTable {\n\n fn index_mut(&mut self, index: usize) -> &mut Self::Output {\n\n &mut self.entries[index]\n\n }\n\n}\n\n\n\nimpl Index<PTX> for PageTable {\n\n type Output = PTE;\n\n fn index(&self, index: PTX) -> &Self::Output {\n", "file_path": "src/pmap.rs", "rank": 29, "score": 68032.6824273753 }, { "content": "struct BootAllocator {\n\n bss_end: VirtAddr,\n\n next_free: Option<VirtAddr>,\n\n}\n\n\n\nimpl BootAllocator {\n\n pub fn new(bss_end: VirtAddr) -> BootAllocator {\n\n BootAllocator {\n\n bss_end: bss_end,\n\n next_free: None,\n\n }\n\n }\n\n\n\n /// This simple physical memory allocator is used only while JOS is setting\n\n /// up its virtual memory system. page_alloc() is the real allocator.\n\n ///\n\n /// If n>0, allocates enough pages of contiguous physical memory to hold 'n'\n\n /// bytes. Doesn't initialize the memory. Returns a kernel virtual address.\n\n ///\n\n /// If n==0, returns the address of the next free page without allocating\n", "file_path": "src/pmap.rs", "rank": 30, "score": 68032.6824273753 }, { "content": "#[repr(C, align(8))]\n\nstruct GateDesc {\n\n offsetl: u16,\n\n selector: u16,\n\n count: u8,\n\n typ: u8,\n\n offseth: u16,\n\n}\n\n\n\nimpl GateDesc {\n\n const fn empty() -> GateDesc {\n\n GateDesc {\n\n offsetl: 0,\n\n selector: 0,\n\n count: 0,\n\n typ: 0,\n\n offseth: 0,\n\n }\n\n }\n\n\n\n /// Set up a normal interrupt/trap gate descriptor.\n", "file_path": "src/trap.rs", "rank": 31, "score": 68032.6824273753 }, { "content": "#[repr(C, packed)]\n\nstruct MpConf {\n\n signature: [u8; 4], // \"PCMP\"\n\n length: u16, // the length of the base configuration table in bytes.\n\n version: u8, // the version number of the MP specification.\n\n checksum: u8,\n\n product: [u8; 20], // product id\n\n oem_table: PhysAddr, // OEM table pointer\n\n oem_length: u16, // OEM table length\n\n entry: u16, // the number of entries in the variable portion of the base table\n\n lapic_addr: PhysAddr, // the physical address of local APIC\n\n xlength: u16, // the length in bytes of the extended entries\n\n xchecksum: u8, // the checksum for the extended entries\n\n reserved: u8,\n\n entries: [u8; 0], // table entries (the number of entries is in 'entry' field)\n\n}\n\n\n\nimpl MpConf {\n\n unsafe fn new() -> Result<&'static MpConf, &'static str> {\n\n let mp = {\n\n let p = Mp::new().ok_or(\"MP floating pointer structure is not found\")?;\n", "file_path": "src/mpconfig.rs", "rank": 32, "score": 68032.6824273753 }, { "content": "struct InodeCache {\n\n inodes: BTreeMap<InodeCacheKey, Arc<RwLock<Inode>>>,\n\n n: usize,\n\n}\n\n\n\nimpl InodeCache {\n\n fn new() -> InodeCache {\n\n InodeCache {\n\n inodes: BTreeMap::new(),\n\n n: 0,\n\n }\n\n }\n\n\n\n fn get(&self, dev: u32, inum: u32) -> Option<Arc<RwLock<Inode>>> {\n\n let key = InodeCacheKey { dev, inum };\n\n self.inodes.get(&key).map(|v| v.clone())\n\n }\n\n\n\n fn create(&mut self, dev: u32, inum: u32) -> Option<Arc<RwLock<Inode>>> {\n\n if self.n >= NINODE {\n", "file_path": "src/fs.rs", "rank": 33, "score": 68032.6824273753 }, { "content": "#[derive(Debug)]\n\n#[repr(C)]\n\nstruct PageInfo {\n\n pp_link: *mut PageInfo,\n\n pp_ref: u16,\n\n}\n\n\n", "file_path": "src/pmap.rs", "rank": 34, "score": 68032.6824273753 }, { "content": "#[inline(always)]\n\nfn compare_exchange(\n\n atomic: &AtomicUsize,\n\n current: usize,\n\n new: usize,\n\n success: Ordering,\n\n failure: Ordering,\n\n strong: bool,\n\n) -> Result<usize, usize> {\n\n if strong {\n\n atomic.compare_exchange(current, new, success, failure)\n\n } else {\n\n atomic.compare_exchange_weak(current, new, success, failure)\n\n }\n\n}\n", "file_path": "src/rwlock.rs", "rank": 35, "score": 67320.3119596632 }, { "content": "fn dir_lookup(\n\n dir: &mut Inode,\n\n p_off: *mut u32,\n\n cond: Box<dyn Fn(&DirEnt) -> bool>,\n\n) -> Option<Arc<RwLock<Inode>>> {\n\n if dir.typ != InodeType::Dir {\n\n panic!(\"dir_lookup: inode is not dir\");\n\n }\n\n\n\n let dir_ent_size = mem::size_of::<DirEnt>() as u32;\n\n let mut ent = DirEnt::empty();\n\n let mut off = 0;\n\n\n\n #[cfg(feature = \"debug\")]\n\n println!(\n\n \"[dir_lookup] dir.inum: {}, dir.size: {}\",\n\n dir.inum, dir.size\n\n );\n\n\n\n while off < dir.size {\n", "file_path": "src/fs.rs", "rank": 36, "score": 67320.3119596632 }, { "content": "fn sys_yield() {\n\n sched::sched_yield();\n\n}\n\n\n", "file_path": "src/syscall.rs", "rank": 37, "score": 67320.3119596632 }, { "content": "#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]\n\nstruct InodeCacheKey {\n\n dev: u32,\n\n inum: u32,\n\n}\n\n\n\nimpl InodeCacheKey {\n\n fn new() -> InodeCacheKey {\n\n InodeCacheKey { dev: 0, inum: 0 }\n\n }\n\n}\n\n\n", "file_path": "src/fs.rs", "rank": 39, "score": 66573.63723300314 }, { "content": "#[inline]\n\nfn imcr_pic_to_apic() {\n\n // Select IMCR register\n\n x86::outb(0x22, 0x70);\n\n // NMI and 8259 INTR go through APIC\n\n let orig = x86::inb(0x23);\n\n x86::outb(0x23, orig | 0x01);\n\n}\n\n\n\npub(crate) fn lapic_addr() -> Option<PhysAddr> {\n\n unsafe { LAPIC_ADDR.clone() }\n\n}\n\n\n\npub(crate) fn this_cpu() -> &'static CpuInfo {\n\n unsafe { &CPUS[lapic::cpu_num() as usize] }\n\n}\n\n\n\npub(crate) fn this_cpu_mut() -> &'static mut CpuInfo {\n\n unsafe { &mut CPUS[lapic::cpu_num() as usize] }\n\n}\n\n\n", "file_path": "src/mpconfig.rs", "rank": 40, "score": 65871.40039763662 }, { "content": "#[derive(Debug)]\n\n#[repr(C)]\n\nstruct PTE(u32);\n\n\n\nimpl PTE {\n\n fn new(pa: PhysAddr, attr: u32) -> PTE {\n\n let mut pte = PTE(0);\n\n pte.set(pa, attr);\n\n pte\n\n }\n\n\n\n fn exists(&self) -> bool {\n\n self.0 & PTE_P == 0x1\n\n }\n\n\n\n fn addr(&self) -> PhysAddr {\n\n PhysAddr(self.0 & 0xfffff000)\n\n }\n\n\n\n fn attr(&self) -> u32 {\n\n self.0 & 0x00000fff\n\n }\n\n\n\n fn set(&mut self, pa: PhysAddr, attr: u32) {\n\n self.0 = pa.0 | attr;\n\n }\n\n\n\n fn clear(&mut self) {\n\n self.0 = 0;\n\n }\n\n}\n\n\n", "file_path": "src/pmap.rs", "rank": 41, "score": 63514.64693209684 }, { "content": "#[derive(Debug, Clone, Copy)]\n\n#[repr(C)]\n\nstruct PTX(VirtAddr);\n\n\n", "file_path": "src/pmap.rs", "rank": 42, "score": 62055.601737724675 }, { "content": "#[no_mangle]\n\npub fn lib_main() {\n\n unsafe {\n\n let vga_buffer = &mut *((0xb8000 + KERN_BASE) as *mut Buffer);\n\n vga_buffer::init_writer(vga_buffer);\n\n pmap::mem_init();\n\n HeapAllocator::init(KHEAP_BASE as usize, KHEAP_SIZE);\n\n gdt::init_percpu();\n\n trap::trap_init();\n\n mpconfig::mp_init();\n\n lapic::lapic_init();\n\n // do mp::boot_aps() after preparing processes\n\n picirq::pic_init();\n\n ide::ide_init();\n\n buf::buf_init();\n\n kbd::kbd_init();\n\n {\n\n let mut env_table = env::env_table();\n\n env::env_create_for_init(&mut env_table);\n\n }\n\n mp::boot_aps();\n\n sched::sched_yield();\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 43, "score": 61422.25156230734 }, { "content": "struct LocalAPIC(VirtAddr);\n\n\n\nimpl LocalAPIC {\n\n fn write(&self, index: isize, value: i32) {\n\n unsafe {\n\n let p = self.as_mut_ptr();\n\n p.offset(index).write_volatile(value);\n\n p.offset(ID).read_volatile(); // wait for write to finish, by reading\n\n }\n\n }\n\n\n\n fn read(&self, index: isize) -> i32 {\n\n unsafe {\n\n let p = self.as_ptr();\n\n p.offset(index).read_volatile()\n\n // `*(p.offset(index))` also works, but `p.offset(index).read()` not.\n\n }\n\n }\n\n\n\n /// See Intel SDM Vol.3 10.4.6 Local APIC ID\n", "file_path": "src/lapic.rs", "rank": 44, "score": 60701.42670711608 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\n#[repr(transparent)]\n\nstruct ColorCode(u8);\n\n\n\nimpl ColorCode {\n\n const fn new(foreground: Color, background: Color) -> ColorCode {\n\n ColorCode((background as u8) << 4 | (foreground as u8))\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\n#[repr(C)]\n\npub(crate) struct ScreenChar {\n\n ascii_character: u8,\n\n color_code: ColorCode,\n\n}\n\n\n\nconst BUFFER_HEIGHT: usize = 25;\n\nconst BUFFER_WIDTH: usize = 80;\n\n\n\n#[repr(transparent)]\n\npub struct Buffer {\n", "file_path": "src/vga_buffer.rs", "rank": 45, "score": 60701.42670711608 }, { "content": "/// Check whether Device 1 exists.\n\n/// (With qemu, it means that we have an option like `-drive file=fs.img,index=1,media=disk,format=raw`)\n\nfn ide_probe_disk1() -> bool {\n\n // wait for Device 0 to be ready\n\n if !ide_wait_ready(true) {\n\n panic!(\"something wrong with ide\");\n\n }\n\n\n\n // switch to Device 1\n\n // ref. 7.2.8 Drive/head register in Spec\n\n x86::outb(PRIMARY_COMMAND_BASE_REG + REG_HDDEVSEL, 0xe0 | (1 << 4));\n\n\n\n // check whether Device 1 exists and get ready\n\n let mut found: bool = false;\n\n for _ in 0..1000 {\n\n let r = x86::inb(PRIMARY_COMMAND_BASE_REG + REG_STATUS);\n\n if r != 0 {\n\n if r & (SR_BSY | SR_DWF | SR_ERR) == 0 {\n\n found = true;\n\n break;\n\n }\n\n }\n", "file_path": "src/ide.rs", "rank": 46, "score": 60077.48180025703 }, { "content": "fn sys_fork() -> EnvId {\n\n let cur_env = env::cur_env_mut().unwrap();\n\n env::fork(cur_env)\n\n}\n\n\n", "file_path": "src/syscall.rs", "rank": 47, "score": 60077.48180025703 }, { "content": "fn ide_start(b: &Buf) {\n\n if b.blockno >= (FS_SIZE as u32) {\n\n panic!(\"ide_start: incorrect blockno\");\n\n }\n\n\n\n let sector_per_block = (BLK_SIZE / SECTOR_SIZE) as u32;\n\n let sector = b.blockno * sector_per_block;\n\n let read_cmd = if sector_per_block == 1 {\n\n IDE_CMD_READ\n\n } else {\n\n IDE_CMD_RDMUL\n\n };\n\n let write_cmd = if sector_per_block == 1 {\n\n IDE_CMD_WRITE\n\n } else {\n\n IDE_CMD_WRMUL\n\n };\n\n\n\n if sector_per_block > 7 {\n\n panic!(\"ide_start: illegal sector per block\");\n", "file_path": "src/ide.rs", "rank": 48, "score": 58176.39303493178 }, { "content": "fn sys_cputs(s: &str) {\n\n print!(\"{}\", s);\n\n}\n\n\n", "file_path": "src/syscall.rs", "rank": 49, "score": 58176.39303493178 }, { "content": "fn sys_get_env_id() -> EnvId {\n\n let cur_env = env::cur_env().unwrap();\n\n cur_env.get_env_id()\n\n}\n\n\n", "file_path": "src/syscall.rs", "rank": 50, "score": 57658.467578763455 }, { "content": "/// Copy committed blocks from log to their home location.\n\nfn install_trans(log: &Log) {\n\n let mut bcache = buf::buf_cache();\n\n\n\n for tail in 0..(log.lh.n) {\n\n let mut buf_to = bcache.get(log.dev, log.lh.block[tail]);\n\n buf_to.read();\n\n let mut buf_from = bcache.get(log.dev, (log.start + tail + 1) as u32);\n\n buf_from.read();\n\n\n\n unsafe {\n\n let dst = VirtAddr(buf_to.data().as_ptr() as u32);\n\n let src = VirtAddr(buf_from.data().as_ptr() as u32);\n\n let len = BLK_SIZE;\n\n util::memmove(dst, src, len);\n\n }\n\n\n\n buf_to.write();\n\n bcache.release(buf_from);\n\n bcache.release(buf_to);\n\n }\n\n}\n\n\n", "file_path": "src/log.rs", "rank": 51, "score": 56831.62327288147 }, { "content": "#[panic_handler]\n\nfn panic(info: &PanicInfo) -> ! {\n\n println!(\"{}\", info);\n\n loop {}\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 52, "score": 56831.62327288147 }, { "content": "/// Copy modified blocks from cache to log.\n\nfn write_log(log: &Log) {\n\n let mut bcache = buf::buf_cache();\n\n\n\n for tail in 0..(log.lh.n) {\n\n let mut buf_to = bcache.get(log.dev, (log.start + tail + 1) as u32);\n\n buf_to.read();\n\n let mut buf_from = bcache.get(log.dev, log.lh.block[tail]);\n\n buf_from.read();\n\n\n\n unsafe {\n\n let dst = VirtAddr(buf_to.data().as_ptr() as u32);\n\n let src = VirtAddr(buf_from.data().as_ptr() as u32);\n\n let len = BLK_SIZE;\n\n util::memmove(dst, src, len);\n\n }\n\n\n\n buf_to.write();\n\n bcache.release(buf_from);\n\n bcache.release(buf_to);\n\n }\n\n}\n\n\n", "file_path": "src/log.rs", "rank": 53, "score": 56831.62327288147 }, { "content": "/// Write in-memory log header to disk.\n\n/// This is the true point at which the current transaction commits.\n\nfn write_head(log: &Log) {\n\n let mut bcache = buf::buf_cache();\n\n\n\n let mut buf = bcache.get(log.dev, log.start as u32);\n\n buf.read();\n\n\n\n let lh_on_disk = unsafe {\n\n let ptr = buf.data_mut().as_mut_ptr().cast::<LogHeader>();\n\n &mut *ptr\n\n };\n\n\n\n lh_on_disk.n = log.lh.n;\n\n\n\n for i in 0..(log.lh.n) {\n\n lh_on_disk.block[i] = log.lh.block[i];\n\n }\n\n\n\n buf.write();\n\n bcache.release(buf);\n\n}\n\n\n", "file_path": "src/log.rs", "rank": 54, "score": 56831.62327288147 }, { "content": "#[repr(align(4096))]\n\nstruct InterruptDescriptorTable([GateDesc; 256]);\n\n\n\n// #[repr(C, packed)]\n", "file_path": "src/trap.rs", "rank": 55, "score": 56145.0931098287 }, { "content": "/// Return (npages, npages_basemem).\n\n/// npages: the amount of physical memory (in pages).\n\n/// napages_basemem: the amount of base memory (in pages).\n\nfn i386_detect_memory() -> (u32, u32) {\n\n // Use CMOS calls to measure available base & extended memory.\n\n // (CMOS calls return results in kilobytes.)\n\n let basemem = nvram_read(kclock::NVRAM_BASELO) as u32;\n\n let extmem = nvram_read(kclock::NVRAM_EXTLO) as u32;\n\n let ext16mem = (nvram_read(kclock::NVRAM_EXT16LO) as u32) * 64;\n\n\n\n let totalmem = if ext16mem > 0 {\n\n 16 * 1024 + ext16mem\n\n } else if extmem > 0 {\n\n 1 * 1024 + extmem\n\n } else {\n\n basemem\n\n };\n\n\n\n let npages = totalmem / (PGSIZE / 1024);\n\n let npages_basemem = basemem / (PGSIZE / 1024);\n\n\n\n println!(\n\n \"Physical memory: {}KB available, base = {}K, extended = {}K\",\n", "file_path": "src/pmap.rs", "rank": 56, "score": 55580.156895192886 }, { "content": "// Initialize the kernel virtual memory layout for environment e.\n\n// Allocate a page directory, set e->env_pgdir accordingly,\n\n// and initialize the kernel portion of the new environment's address space.\n\n// Do NOT (yet) map anything into the user portion\n\n// of the environment's virtual address space.\n\n//\n\n// Returns 0 on success, < 0 on error. Errors include:\n\n//\t-E_NO_MEM if page directory or table could not be allocated.\n\nfn env_setup_vm() -> Box<PageDirectory> {\n\n PageDirectory::new_for_user()\n\n}\n\n\n\nuse crate::file::{FileDescriptor, FileTableEntry};\n\nuse crate::fs::Inode;\n\nuse crate::rwlock::RwLock;\n\nuse alloc::sync::Arc;\n\nuse core::ops::{Add, Try};\n\n\n\n/// Allocates a new env with env_alloc, loads the named elf\n\n/// binary into it with load_icode, and sets its env_type.\n\n/// This function is ONLY called during kernel initialization,\n\n/// before running the first user-mode environment.\n\n/// The new env's parent ID is set to 0.\n\npub(crate) fn env_create_for_init(env_table: &mut EnvTable) -> EnvId {\n\n extern \"C\" {\n\n static _binary_obj_user_init_start: u8;\n\n static _binary_obj_user_init_end: u8;\n\n static _binary_obj_user_init_size: usize;\n", "file_path": "src/env.rs", "rank": 57, "score": 54415.740487892355 }, { "content": "fn nvram_read(reg: u8) -> u16 {\n\n let low = kclock::mc146818_read(reg) as u16;\n\n let high = kclock::mc146818_read(reg + 1) as u16;\n\n low | (high << 8)\n\n}\n\n\n", "file_path": "src/pmap.rs", "rank": 58, "score": 53107.58472937738 }, { "content": "/// Should call after kernel heap set up\n\nfn inode_cache() -> &'static Mutex<InodeCache> {\n\n INODE_CACHE.call_once(|| Mutex::new(InodeCache::new()))\n\n}\n\n\n", "file_path": "src/fs.rs", "rank": 59, "score": 51940.03688557239 }, { "content": "fn get_log() -> MutexGuard<'static, Log> {\n\n LOG.call_once(|| Mutex::new(log_init(ROOT_DEV))).lock()\n\n}\n\n\n\n/// Called at the start of each FS system call.\n\npub(crate) fn begin_op() {\n\n // xv6 use sleep to wait, but use spin here for the simplicity.\n\n loop {\n\n let mut log = get_log();\n\n\n\n if log.lh.n + (log.outstanding + 1) * MAX_OP_BLOCKS > LOG_SIZE {\n\n // this op might exhaust log space; wait for commit\n\n continue;\n\n }\n\n\n\n log.outstanding += 1;\n\n break;\n\n }\n\n}\n\n\n", "file_path": "src/log.rs", "rank": 60, "score": 51940.03688557239 }, { "content": "fn get_input() -> MutexGuard<'static, Input> {\n\n INPUT.lock()\n\n}\n\n\n\npub(crate) fn console_intr() {\n\n match kbd::kbd_getc() {\n\n None => {\n\n // do nothing\n\n }\n\n Some(c) => {\n\n let mut input = get_input();\n\n let orig_e = input.e;\n\n\n\n {\n\n if c == '\\n' as u8 || input.e == input.r + INPUT_BUF {\n\n print!(\"{}\", c as char);\n\n input.buf[orig_e as usize % INPUT_BUF] = c;\n\n input.e = orig_e + 1;\n\n input.w = input.e;\n\n } else if c == 0x08 {\n", "file_path": "src/console.rs", "rank": 61, "score": 51940.03688557239 }, { "content": "fn read_sb(dev: u32) -> SuperBlock {\n\n let mut sb = SuperBlock::empty();\n\n\n\n let mut bcache = buf::buf_cache();\n\n let mut b = bcache.get(dev, 1);\n\n b.read();\n\n let data = b.data();\n\n\n\n let disk_sb = unsafe { &*data.as_ptr().cast::<SuperBlock>() };\n\n sb.init(disk_sb);\n\n println!(\n\n \"log_start: {}, inode_start: {}, bmap_start: {}\",\n\n sb.log_start, sb.inode_start, sb.bmap_start\n\n );\n\n\n\n bcache.release(b);\n\n\n\n sb\n\n}\n\n\n\n/// Should be called only after ide_init and buf_init\n\npub(crate) fn get() -> &'static SuperBlock {\n\n SUPER_BLOCK.call_once(|| read_sb(1))\n\n}\n", "file_path": "src/superblock.rs", "rank": 62, "score": 51940.03688557239 }, { "content": "#[doc(hidden)]\n\npub fn _print(args: fmt::Arguments) {\n\n serial().write_fmt(args).unwrap();\n\n}\n\n\n\n#[allow(dead_code)]\n\nconst COM1: u16 = 0x3F8;\n\n\n\n#[allow(dead_code)]\n\nconst COM_RX: u16 = 0; // In: Receive buffer (DLAB=0)\n\n#[allow(dead_code)]\n\nconst COM_TX: u16 = 0; // Out: Transmit buffer (DLAB=0)\n\n#[allow(dead_code)]\n\nconst COM_DLL: u16 = 0; // Out: Divisor Latch Low (DLAB=1)\n\n#[allow(dead_code)]\n\nconst COM_DLM: u16 = 1; // Out: Divisor Latch High (DLAB=1)\n\n#[allow(dead_code)]\n\nconst COM_IER: u16 = 1; // Out: Interrupt Enable Register\n\n#[allow(dead_code)]\n\nconst COM_IER_RDI: u8 = 0x01; // Enable receiver data interrupt\n\n#[allow(dead_code)]\n", "file_path": "src/serial.rs", "rank": 63, "score": 51161.37331640208 }, { "content": "/// Free a disk block\n\nfn bfree(dev: u32, blockno: u32) {\n\n let sb = superblock::get();\n\n let mut bcache = buf::buf_cache();\n\n\n\n let mut bp = bcache.get(dev, block_for_bitmap(blockno, sb));\n\n bp.read();\n\n\n\n let bi = (blockno % (BPB as u32)) as usize;\n\n let m = 1 << (bi % 8);\n\n if bp.data()[bi / 8] & m == 0 {\n\n panic!(\"bfree: freeing free block\");\n\n }\n\n bp.data_mut()[bi / 8] &= !m;\n\n log::log_write(&mut bp);\n\n\n\n bcache.release(bp);\n\n}\n\n\n\n// ---------------------------------------------------------------------------------\n\n// Dir\n", "file_path": "src/fs.rs", "rank": 64, "score": 51161.37331640208 }, { "content": "pub fn print(args: fmt::Arguments) {\n\n let _lock = CONSOLE_LOCK.lock();\n\n vga_buffer::_print(args);\n\n serial::_print(args);\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! print {\n\n ($($arg:tt)*) => {\n\n $crate::console::print(format_args!($($arg)*));\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! println {\n\n () => ($crate::print!(\"\\n\"));\n\n ($($arg:tt)*) => {\n\n $crate::print!(\"{}\\n\", format_args!($($arg)*));\n\n }\n\n}\n", "file_path": "src/console.rs", "rank": 65, "score": 51161.37331640208 }, { "content": "fn trapname(trapno: u32) -> &'static str {\n\n match trapno {\n\n T_DIVIDE => \"Divide error\",\n\n T_DEBUG => \"Debug\",\n\n T_NMI => \"Non-Maskable Interrupt\",\n\n T_BRKPT => \"Breakpoint\",\n\n T_OFLOW => \"Overflow\",\n\n T_BOUND => \"BOUND Rnage Exceeded\",\n\n T_ILLOP => \"Invalid Opcode\",\n\n T_DEVICE => \"Device Not Available\",\n\n T_DBLFLT => \"Double Fault\",\n\n T_COPROC => \"Coporocessor Segment Overrun\",\n\n T_TSS => \"Invalid TSS\",\n\n T_SEGNP => \"Segment Not Present\",\n\n T_STACK => \"Stack Fault\",\n\n T_GPFLT => \"General Protection\",\n\n T_PGFLT => \"Page Fault\",\n\n T_RES => \"(unknown trap)\",\n\n T_FPERR => \"x87 FPU Floating-Point Error\",\n\n T_ALIGN => \"Alignment Check\",\n", "file_path": "src/trap.rs", "rank": 66, "score": 51161.37331640208 }, { "content": "/// Wait until disk to be ready.\n\nfn ide_wait_ready(check_error: bool) -> bool {\n\n let mut r: u8;\n\n\n\n loop {\n\n // ref. 7.2.13 Status register in Spec\n\n r = x86::inb(PRIMARY_COMMAND_BASE_REG + REG_STATUS);\n\n if (r & (SR_BSY | SR_DRDY)) == SR_DRDY {\n\n break;\n\n }\n\n }\n\n\n\n !check_error || ((r & (SR_DWF | SR_ERR)) == 0)\n\n}\n\n\n", "file_path": "src/ide.rs", "rank": 67, "score": 50848.24051901529 }, { "content": "#[repr(align(16))]\n\nstruct GlobalDescriptorTable([SegDesc; 5 + MAX_NUM_CPU]);\n\n\n\n/// Global descriptor table.\n\n///\n\n/// Set up global descriptor table (GDT) with separate segments for\n\n/// kernel mode and user mode. Segments serve many purposes on the x86.\n\n/// We don't use any of their memory-mapping capabilities, but we need\n\n/// them to switch privilege levels.\n\n///\n\n/// The kernel and user segments are identical except for the DPL.\n\n/// To load the SS register, the CPL must equal the DPL. Thus,\n\n/// we must duplicate the segments for the user and the kernel.\n\n///\n\n/// In particular, the last argument to the SEG macro used in the\n\n/// definition of gdt specifies the Descriptor Privilege Level (DPL)\n\n/// of that descriptor: 0 for kernel and 3 for user.\n\nstatic mut GDT: GlobalDescriptorTable = GlobalDescriptorTable([\n\n // NULL\n\n SegDesc::new(0x0, 0x0, 0x0, 0x0),\n\n // kernel code segment\n", "file_path": "src/gdt.rs", "rank": 68, "score": 50328.75026533862 }, { "content": "#[doc(hidden)]\n\npub fn _print(args: fmt::Arguments) {\n\n writer().write_fmt(args).unwrap();\n\n}\n\n\n\n#[allow(dead_code)]\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\n#[repr(u8)]\n\npub enum Color {\n\n Black = 0,\n\n Blue = 1,\n\n Green = 2,\n\n Cyan = 3,\n\n Red = 4,\n\n Magenda = 5,\n\n Brown = 6,\n\n LightGray = 7,\n\n DarkGray = 8,\n\n LightBlue = 9,\n\n LightGreen = 10,\n\n LightCyan = 11,\n\n LightRed = 12,\n\n Pink = 13,\n\n Yellow = 14,\n\n White = 15,\n\n}\n\n\n", "file_path": "src/vga_buffer.rs", "rank": 69, "score": 49993.82547259709 }, { "content": "#[alloc_error_handler]\n\nfn alloc_error_handler(layout: alloc::alloc::Layout) -> ! {\n\n panic!(\"allocation error: {:?}\", layout);\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 70, "score": 48902.029106040005 }, { "content": "fn round_down_u32(x: u32, base: u32) -> u32 {\n\n (x / base) * base\n\n}\n\n\n", "file_path": "src/pmap.rs", "rank": 71, "score": 48422.06653246233 }, { "content": "fn round_up_u32(x: u32, base: u32) -> u32 {\n\n ((x - 1 + base) / base) * base\n\n}\n\n\n", "file_path": "src/pmap.rs", "rank": 72, "score": 48422.06653246233 }, { "content": "/// Calculate a bitmap brock appropriate for blockno\n\nfn block_for_bitmap(blockno: u32, sb: &SuperBlock) -> u32 {\n\n blockno / (BPB as u32) + sb.bmap_start\n\n}\n\n\n", "file_path": "src/fs.rs", "rank": 73, "score": 46307.0846916736 }, { "content": "/// Return inode block corresponding to a passed inum.\n\nfn block_for_inode(inum: u32, sb: &SuperBlock) -> u32 {\n\n inum / (IPB as u32) + sb.inode_start\n\n}\n\n\n", "file_path": "src/fs.rs", "rank": 74, "score": 46307.0846916736 }, { "content": "/// Allocate a file descriptor for the given file.\n\n/// Takes over file reference from caller on success.\n\n/// Return the passed ent when an error occurred.\n\nfn fd_alloc(ent: FileTableEntry) -> Result<FileDescriptor, FileTableEntry> {\n\n let cur_env = env::cur_env_mut().unwrap();\n\n cur_env.fd_alloc(ent)\n\n}\n\n\n\npub(crate) fn open(path: *const u8, mode: u32) -> Result<FileDescriptor, SysError> {\n\n log::begin_op();\n\n\n\n let ip = if mode & O_CREATE != 0 {\n\n match create(path, InodeType::File, 0, 0) {\n\n Ok(ip) => Ok(ip),\n\n Err(err) => {\n\n log::end_op();\n\n Err(err)\n\n }\n\n }\n\n } else {\n\n match fs::namei(path) {\n\n Some(ip) => Ok(ip),\n\n None => {\n", "file_path": "src/sysfile.rs", "rank": 75, "score": 42785.54402828835 }, { "content": "void umain(int argc, char **argv) {\n\n sys_mknod(\"console\", 1, 1);\n\n int fd = open(\"console\", O_RDWR); // stdin\n\n sys_dup(fd); // stdout\n\n sys_dup(fd); // stderr\n\n\n\n int child = sys_fork();\n\n if (child < 0) {\n\n printf(\"Error in fork\\n\");\n\n return;\n\n } else if (child == 0) {\n\n // child\n\n sys_exec(\"/sh\", NULL, 0);\n\n } else {\n\n // parent\n\n wait_env_id(child);\n\n }\n", "file_path": "user/init.c", "rank": 76, "score": 36449.251556579744 }, { "content": " lock: &'a AtomicUsize,\n\n data: NonNull<T>,\n\n #[doc(hidden)]\n\n _invariant: PhantomData<&'a mut T>,\n\n}\n\n\n\n// Same unsafe impls as `std::sync::RwLock`\n\nunsafe impl<T: ?Sized + Send> Send for RwLock<T> {}\n\nunsafe impl<T: ?Sized + Send + Sync> Sync for RwLock<T> {}\n\n\n\nimpl<T> RwLock<T> {\n\n /// Creates a new spinlock wrapping the supplied data.\n\n ///\n\n /// May be used statically:\n\n ///\n\n /// ```\n\n /// use spin;\n\n ///\n\n /// static RW_LOCK: spin::RwLock<()> = spin::RwLock::new(());\n\n ///\n", "file_path": "src/rwlock.rs", "rank": 81, "score": 29.484889895056916 }, { "content": "// Some of codes come from https://github.com/redox-os/kernel/blob/master/src/allocator/linked_list.rs\n\n\n\nuse core::alloc::{GlobalAlloc, Layout};\n\nuse core::ptr::NonNull;\n\nuse linked_list_allocator::Heap;\n\n\n\nstatic mut HEAP: Option<Heap> = None;\n\n\n\npub struct HeapAllocator;\n\n\n\nimpl HeapAllocator {\n\n pub unsafe fn init(offset: usize, size: usize) {\n\n HEAP = Some(Heap::new(offset, size));\n\n }\n\n}\n\n\n\nunsafe impl GlobalAlloc for HeapAllocator {\n\n unsafe fn alloc(&self, layout: Layout) -> *mut u8 {\n\n let heap = HEAP.as_mut().expect(\"HEAP is not initialized yet\");\n\n match heap.allocate_first_fit(layout) {\n", "file_path": "src/allocator.rs", "rank": 84, "score": 24.730979665405478 }, { "content": "\n\nunsafe impl Send for BufCache {}\n\nunsafe impl Sync for BufCache {}\n\n\n\nimpl BufCache {\n\n const fn new() -> BufCache {\n\n BufCache {\n\n entries: [None; NBUF],\n\n }\n\n }\n\n\n\n pub(crate) fn get(&mut self, dev: u32, blockno: u32) -> BufCacheHandler {\n\n let mut empty_entry = None;\n\n\n\n // Is the block already cached?\n\n for entry_opt in self.entries.iter_mut() {\n\n match entry_opt {\n\n None => {\n\n empty_entry = Some(entry_opt);\n\n }\n", "file_path": "src/buf.rs", "rank": 85, "score": 24.065401589010477 }, { "content": "}\n\n\n\n// Same unsafe impls as `std::sync::Mutex`\n\nunsafe impl<T: ?Sized + Send> Sync for Mutex<T> {}\n\nunsafe impl<T: ?Sized + Send> Send for Mutex<T> {}\n\n\n\nimpl<T> Mutex<T> {\n\n /// Creates a new spinlock wrapping the supplied data.\n\n ///\n\n /// May be used statically:\n\n ///\n\n /// ```\n\n /// use spinlock;\n\n ///\n\n /// static MUTEX: spinlock::Mutex<()> = spinlock::Mutex::new(());\n\n ///\n\n /// fn demo() {\n\n /// let lock = MUTEX.lock();\n\n /// // do something with lock\n\n /// drop(lock);\n", "file_path": "src/spinlock.rs", "rank": 87, "score": 23.624678437011212 }, { "content": "// This file is base on spin crate (MIT license). See COPYRIGHT for copyright information.\n\n// spin-rs (https://github.com/mvdnes/spin-rs)\n\n\n\nuse core::cell::UnsafeCell;\n\nuse core::fmt::Formatter;\n\nuse core::marker::PhantomData;\n\nuse core::ops::{Deref, DerefMut};\n\nuse core::ptr::NonNull;\n\nuse core::sync::atomic::{spin_loop_hint as cpu_relax, AtomicUsize, Ordering};\n\nuse core::{fmt, mem};\n\n\n\n/// A reader-writer lock\n\n///\n\n/// This type of lock allows a number of readers or at most one writer at any\n\n/// point in time. The write portion of this lock typically allows modification\n\n/// of the underlying data (exclusive access) and the read portion of this lock\n\n/// typically allows for read-only access (shared access).\n\n///\n\n/// The type parameter `T` represents the data that this lock protects. It is\n\n/// required that `T` satisfies `Send` to be shared across tasks and `Sync` to\n", "file_path": "src/rwlock.rs", "rank": 89, "score": 22.13200097988771 }, { "content": "// This file is base on spin crate (MIT license). See COPYRIGHT for copyright information.\n\n// spin-rs (https://github.com/mvdnes/spin-rs)\n\n\n\nuse core::cell::UnsafeCell;\n\nuse core::fmt;\n\nuse core::fmt::Formatter;\n\nuse core::ops::{Deref, DerefMut};\n\nuse core::sync::atomic::{spin_loop_hint as cpu_relax, AtomicBool, Ordering};\n\n\n\npub(crate) struct Mutex<T: ?Sized> {\n\n lock: AtomicBool,\n\n data: UnsafeCell<T>,\n\n}\n\n\n\n/// A guard to which the protected data can be accessed\n\n///\n\n/// When the guard falls out of scope it will release the lock\n\npub(crate) struct MutexGuard<'a, T: ?Sized + 'a> {\n\n lock: &'a AtomicBool,\n\n data: &'a mut T,\n", "file_path": "src/spinlock.rs", "rank": 90, "score": 19.943724419244923 }, { "content": " fn deref(&self) -> &PageDirectory {\n\n unsafe { &*self.0 }\n\n }\n\n}\n\n\n\nimpl DerefMut for KernelPageDirectory {\n\n fn deref_mut(&mut self) -> &mut PageDirectory {\n\n unsafe { &mut *self.0 }\n\n }\n\n}\n\n\n\n// MUST be initialized first with `init()`\n\n// Get the lock of KERN_PGDIR first if you use both of KERN_PGDIR and PAGE_ALLOCATOR.\n\nstatic PAGE_ALLOCATOR: Mutex<PageAllocator> = Mutex::new(PageAllocator {\n\n page_free_list: null_mut(),\n\n pages: null_mut(),\n\n});\n\n\n\n#[repr(align(4096))]\n\npub(crate) struct CpuStack([u8; KSTKSIZE as usize]);\n", "file_path": "src/pmap.rs", "rank": 91, "score": 19.475373928999005 }, { "content": " /// Force unlock the spinlock.\n\n ///\n\n /// This is *extremely* unsafe if the lock is not held by the current\n\n /// thread. However, this can be useful in some instances for exposing the\n\n /// lock to FFI that doesn't know how to deal with RAII.\n\n ///\n\n /// If the lock isn't held, this is a no-op.\n\n pub unsafe fn force_unlock(&self) {\n\n self.lock.store(false, Ordering::Release);\n\n }\n\n\n\n /// Tries to lock the mutex. If it is already locked,\n\n /// it will return None. Otherwise it returns a guard within Some.\n\n pub fn try_lock(&self) -> Option<MutexGuard<T>> {\n\n if self.lock.compare_and_swap(false, true, Ordering::Acquire) == false {\n\n Some(MutexGuard {\n\n lock: &self.lock,\n\n data: unsafe { &mut *self.data.get() },\n\n })\n\n } else {\n", "file_path": "src/spinlock.rs", "rank": 92, "score": 19.203666559313596 }, { "content": "use crate::console;\n\nuse crate::constants::*;\n\nuse crate::fs::Inode;\n\nuse crate::once::Once;\n\nuse alloc::boxed::Box;\n\n\n\npub(crate) struct DevSw {\n\n /// Return None if device is not prepared for read.\n\n pub(crate) read: Box<dyn Fn(&Inode, *mut u8, usize) -> Option<i32>>,\n\n pub(crate) write: Box<dyn Fn(&Inode, *const u8, usize) -> i32>,\n\n}\n\n\n", "file_path": "src/device.rs", "rank": 93, "score": 18.820282268901504 }, { "content": "\n\nimpl<'rwlock, T: ?Sized> Deref for RwLockWriteGuard<'rwlock, T> {\n\n type Target = T;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n unsafe { self.data.as_ref() }\n\n }\n\n}\n\n\n\nimpl<'rwlock, T: ?Sized> DerefMut for RwLockWriteGuard<'rwlock, T> {\n\n fn deref_mut(&mut self) -> &mut T {\n\n unsafe { self.data.as_mut() }\n\n }\n\n}\n\n\n\nimpl<'rwlock, T: ?Sized> Drop for RwLockReadGuard<'rwlock, T> {\n\n fn drop(&mut self) {\n\n debug_assert!(self.lock.load(Ordering::Relaxed) & !(WRITER | UPGRADED) > 0);\n\n self.lock.fetch_sub(READER, Ordering::Release);\n\n }\n", "file_path": "src/rwlock.rs", "rank": 94, "score": 18.23773215068718 }, { "content": " #[inline(always)]\n\n fn try_write_internal(&self, strong: bool) -> Option<RwLockWriteGuard<T>> {\n\n if compare_exchange(\n\n &self.lock,\n\n 0,\n\n WRITER,\n\n Ordering::Acquire,\n\n Ordering::Relaxed,\n\n strong,\n\n )\n\n .is_ok()\n\n {\n\n Some(RwLockWriteGuard {\n\n lock: &self.lock,\n\n data: unsafe { NonNull::new_unchecked(self.data.get()) },\n\n _invariant: PhantomData,\n\n })\n\n } else {\n\n None\n\n }\n", "file_path": "src/rwlock.rs", "rank": 95, "score": 18.23385862241281 }, { "content": "// ref. https://wiki.osdev.org/8259_PIC\n\n// ref. [8259A doc](https://pdos.csail.mit.edu/6.828/2018/readings/hardware/8259A.pdf)\n\n\n\nuse crate::spinlock::{Mutex, MutexGuard};\n\nuse crate::trap::consts::IRQ_OFFSET;\n\nuse crate::x86;\n\nuse consts::*;\n\nuse core::sync::atomic::{AtomicBool, Ordering};\n\n\n\nstatic DID_INIT: AtomicBool = AtomicBool::new(false);\n\nstatic IRQ_MASK_8259A: Mutex<u16> = Mutex::new(0xffff & !((1 << IRQ_SLAVE) as u16));\n\n\n\nmod consts {\n\n // I/O ports to communicate with 8259 PIC\n\n // IRQs 0-7 for Master, IRQs 8-15 for Slave\n\n pub(crate) const IO_MASTER_COMMAND: u16 = 0x20;\n\n pub(crate) const IO_MASTER_DATA: u16 = 0x21;\n\n pub(crate) const IO_SLAVE_COMMAND: u16 = 0xA0;\n\n pub(crate) const IO_SLAVE_DATA: u16 = 0xA1;\n\n\n", "file_path": "src/picirq.rs", "rank": 96, "score": 17.622452828345335 }, { "content": " }\n\n\n\n pub(crate) fn get_mut(&mut self) -> &mut T {\n\n // We know statically that there are no other references to `self`, so\n\n // there's no need to lock the inner lock.\n\n unsafe { &mut *self.data.get() }\n\n }\n\n}\n\n\n\nimpl<T: ?Sized + fmt::Debug> fmt::Debug for RwLock<T> {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n match self.try_read() {\n\n Some(guard) => write!(f, \"RwLock {{ data: \")\n\n .and_then(|()| (&*guard).fmt(f))\n\n .and_then(|()| write!(f, \"}}\")),\n\n None => write!(f, \"RwLock {{ <locked> }}\"),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/rwlock.rs", "rank": 97, "score": 17.503006327205654 }, { "content": " let value = self.lock.fetch_add(READER, Ordering::Acquire);\n\n\n\n // We check the UPGRADED bit here so that new readers are prevented when\n\n // an UPGRADED lock is held. This helps reduce writer starvation.\n\n if value & (WRITER | UPGRADED) != 0 {\n\n // Lock is taken, undo.\n\n self.lock.fetch_sub(READER, Ordering::Release);\n\n None\n\n } else {\n\n Some(RwLockReadGuard {\n\n lock: &self.lock,\n\n data: unsafe { NonNull::new_unchecked(self.data.get()) },\n\n })\n\n }\n\n }\n\n\n\n /// Force decrement the reader count.\n\n ///\n\n /// This is *extremely* unsafe if there are outstanding `RwLockReadGuard`s\n\n /// live, or if called more times than `read` has been called, but can be\n", "file_path": "src/rwlock.rs", "rank": 98, "score": 17.381846626724766 }, { "content": " // Note that virtual addresses are handled in the parent page mapping (... is it right?)\n\n // unsafe { util::memmove(pa.to_va(), va, PGSIZE as usize) };\n\n unsafe { util::memmove(pa.to_va(), pte.addr().to_va(), PGSIZE as usize) };\n\n }\n\n }\n\n va += PGSIZE;\n\n }\n\n }\n\n}\n\n\n\nimpl Index<usize> for PageDirectory {\n\n type Output = PDE;\n\n fn index(&self, index: usize) -> &Self::Output {\n\n &self.entries[index]\n\n }\n\n}\n\n\n\nimpl IndexMut<usize> for PageDirectory {\n\n fn index_mut(&mut self, index: usize) -> &mut Self::Output {\n\n &mut self.entries[index]\n", "file_path": "src/pmap.rs", "rank": 99, "score": 17.033742264250243 } ]
Rust
src/server/agents.rs
bofh69/crater
29d2aebfa4723bba8e64889e728953d223418447
use chrono::Duration; use chrono::{DateTime, Utc}; use db::{Database, QueryUtils}; use errors::*; use experiments::{Assignee, Experiment}; use server::tokens::Tokens; use std::collections::HashSet; const INACTIVE_AFTER: i64 = 300; #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum AgentStatus { Working, Idle, Unreachable, } pub struct Agent { name: String, experiment: Option<Experiment>, last_heartbeat: Option<DateTime<Utc>>, git_revision: Option<String>, } impl Agent { fn with_experiment(mut self, db: &Database) -> Result<Self> { self.experiment = Experiment::run_by(db, &Assignee::Agent(self.name.clone()))?; Ok(self) } pub fn git_revision(&self) -> Option<&String> { self.git_revision.as_ref() } pub fn name(&self) -> &str { &self.name } pub fn assigned_experiment(&self) -> Option<&Experiment> { self.experiment.as_ref() } pub fn last_heartbeat(&self) -> Option<&DateTime<Utc>> { self.last_heartbeat.as_ref() } pub fn status(&self) -> AgentStatus { if let Some(ref heartbeat) = self.last_heartbeat { if Utc::now() - Duration::seconds(INACTIVE_AFTER) < *heartbeat { if self.experiment.is_some() { return AgentStatus::Working; } else { return AgentStatus::Idle; } } } AgentStatus::Unreachable } } #[derive(Clone)] pub struct Agents { db: Database, } impl Agents { pub fn new(db: Database, tokens: &Tokens) -> Result<Self> { let agents = Agents { db }; agents.synchronize(tokens)?; Ok(agents) } fn synchronize(&self, tokens: &Tokens) -> Result<()> { self.db.transaction(|trans| { let mut real = tokens.agents.values().collect::<HashSet<&String>>(); for agent in self.all()? { if !real.remove(&agent.name) { trans.execute("DELETE FROM agents WHERE name = ?1;", &[&agent.name])?; } } for missing in &real { trans.execute( "INSERT INTO agents (name) VALUES (?1);", &[&missing.as_str()], )?; } Ok(()) }) } pub fn all(&self) -> Result<Vec<Agent>> { self.db .query("SELECT * FROM agents ORDER BY name;", &[], |row| { Agent { name: row.get("name"), last_heartbeat: row.get("last_heartbeat"), git_revision: row.get("git_revision"), experiment: None, } })?.into_iter() .map(|agent| agent.with_experiment(&self.db)) .collect() } #[cfg(test)] fn get(&self, name: &str) -> Result<Option<Agent>> { let row = self .db .get_row("SELECT * FROM agents WHERE name = ?1;", &[&name], |row| { Agent { name: row.get("name"), last_heartbeat: row.get("last_heartbeat"), git_revision: row.get("git_revision"), experiment: None, } })?; Ok(if let Some(agent) = row { Some(agent.with_experiment(&self.db)?) } else { None }) } pub fn record_heartbeat(&self, agent: &str) -> Result<()> { let changes = self.db.execute( "UPDATE agents SET last_heartbeat = ?1 WHERE name = ?2;", &[&Utc::now(), &agent], )?; assert_eq!(changes, 1); Ok(()) } pub fn set_git_revision(&self, agent: &str, revision: &str) -> Result<()> { let changes = self.db.execute( "UPDATE agents SET git_revision = ?1 WHERE name = ?2;", &[&revision, &agent], )?; assert_eq!(changes, 1); Ok(()) } } #[cfg(test)] mod tests { use super::{AgentStatus, Agents}; use actions::CreateExperiment; use config::Config; use db::Database; use experiments::{Assignee, Experiment}; use server::tokens::Tokens; #[test] fn test_agents_synchronize() { let db = Database::temp().unwrap(); let agents = Agents::new(db, &Tokens::default()).unwrap(); let mut tokens = Tokens::default(); tokens.agents.insert("token1".into(), "agent1".into()); tokens.agents.insert("token2".into(), "agent2".into()); agents.synchronize(&tokens).unwrap(); assert_eq!( agents .all() .unwrap() .into_iter() .map(|a| a.name) .collect::<Vec<_>>(), vec!["agent1".to_string(), "agent2".to_string()] ); tokens.agents.remove("token1"); tokens.agents.insert("token3".into(), "agent3".into()); agents.synchronize(&tokens).unwrap(); assert_eq!( agents .all() .unwrap() .into_iter() .map(|a| a.name) .collect::<Vec<_>>(), vec!["agent2".to_string(), "agent3".to_string()] ); } #[test] fn test_heartbeat_recording() { let db = Database::temp().unwrap(); let mut tokens = Tokens::default(); tokens.agents.insert("token".into(), "agent".into()); let agents = Agents::new(db, &tokens).unwrap(); let agent = agents.get("agent").unwrap().unwrap(); assert!(agent.last_heartbeat.is_none()); agents.record_heartbeat("agent").unwrap(); let agent = agents.get("agent").unwrap().unwrap(); let first_heartbeat = agent.last_heartbeat.unwrap(); agents.record_heartbeat("agent").unwrap(); let agent = agents.get("agent").unwrap().unwrap(); assert!(first_heartbeat < agent.last_heartbeat.unwrap()); } #[test] fn test_agent_status() { let db = Database::temp().unwrap(); let config = Config::default(); let mut tokens = Tokens::default(); tokens.agents.insert("token".into(), "agent".into()); let agents = Agents::new(db.clone(), &tokens).unwrap(); ::crates::lists::setup_test_lists(&db, &config).unwrap(); let agent = agents.get("agent").unwrap().unwrap(); assert_eq!(agent.status(), AgentStatus::Unreachable); agents.record_heartbeat("agent").unwrap(); let agent = agents.get("agent").unwrap().unwrap(); assert_eq!(agent.status(), AgentStatus::Idle); CreateExperiment::dummy("dummy") .apply(&db, &config) .unwrap(); Experiment::next(&db, &Assignee::Agent("agent".to_string())).unwrap(); let agent = agents.get("agent").unwrap().unwrap(); assert_eq!(agent.status(), AgentStatus::Working); } }
use chrono::Duration; use chrono::{DateTime, Utc}; use db::{Database, QueryUtils}; use errors::*; use experiments::{Assignee, Experiment}; use server::tokens::Tokens; use std::collections::HashSet; const INACTIVE_AFTER: i64 = 300; #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum AgentStatus { Working, Idle, Unreachable, } pub struct Agent { name: String, experiment: Option<Experiment>, last_heartbeat: Option<DateTime<Utc>>, git_revision: Option<String>, } impl Agent { fn with_experiment(mut self, db: &Database) -> Result<Self> { self.experiment = Experiment::run_by(db, &Assignee::Agent(self.name.clone()))?; Ok(self) } pub fn git_revision(&self) -> Option<&String> { self.git_revision.as_ref() } pub fn name(&self) -> &str { &self.name } pub fn assigned_experiment(&self) -> Option<&Experiment> { self.experiment.as_ref() } pub fn last_heartbeat(&self) -> Option<&DateTime<Utc>> { self.last_heartbeat.as_ref() } pub fn status(&self) -> AgentStatus { if let Some(ref heartbeat) = self.last_heartbeat { if Utc::now() - Duration::seconds(INACTIVE_AFTER) < *heartbeat { if self.experiment.is_some() { return AgentStatus::Working; } else { return AgentStatus::Idle; } } } AgentStatus::Unreachable } } #[derive(Clone)] pub struct Agents { db: Database, } impl Agents { pub fn new(db: Database, tokens: &Tokens) -> Result<Self> { let agents = Agents { db }; agents.synchronize(tokens)?; Ok(agents) } fn synchronize(&self, tokens: &Tokens) -> Result<()> { self.db.transaction(|trans| { let mut real = tokens.agents.values().collect::<HashSet<&String>>(); for agent in self.all()? { if !real.remove(&agent.name) { trans.execute("DELETE FROM agents WHERE name = ?1;", &[&agent.name])?; } } for missing in &real { trans.execute( "INSERT INTO agents (name) VALUES (?1);", &[&missing.as_str()], )?; } Ok(()) }) } pub fn all(&self) -> Result<Vec<Agent>> { self.db .query("SELECT * FROM agents ORDER BY name;", &[], |row| { Agent { name: row.get("name"), last_heartbeat: row.get("last_heartbeat"), git_revision: row.get("git_revision"), experiment: None, } })?.into_iter() .map(|agent| agent.with_experiment(&self.db)) .collect() } #[cfg(test)] fn get(&self, name: &str) -> Result<Option<Agent>> { let row = self .db .get_row("SELECT * FROM agents WHERE name = ?1;", &[&name], |row| { Agent { name: row.get("name"), last_heartbeat: row.get("last_heartbeat"), git_revision: row.get("git_revision"), experiment: None, } })?;
} pub fn record_heartbeat(&self, agent: &str) -> Result<()> { let changes = self.db.execute( "UPDATE agents SET last_heartbeat = ?1 WHERE name = ?2;", &[&Utc::now(), &agent], )?; assert_eq!(changes, 1); Ok(()) } pub fn set_git_revision(&self, agent: &str, revision: &str) -> Result<()> { let changes = self.db.execute( "UPDATE agents SET git_revision = ?1 WHERE name = ?2;", &[&revision, &agent], )?; assert_eq!(changes, 1); Ok(()) } } #[cfg(test)] mod tests { use super::{AgentStatus, Agents}; use actions::CreateExperiment; use config::Config; use db::Database; use experiments::{Assignee, Experiment}; use server::tokens::Tokens; #[test] fn test_agents_synchronize() { let db = Database::temp().unwrap(); let agents = Agents::new(db, &Tokens::default()).unwrap(); let mut tokens = Tokens::default(); tokens.agents.insert("token1".into(), "agent1".into()); tokens.agents.insert("token2".into(), "agent2".into()); agents.synchronize(&tokens).unwrap(); assert_eq!( agents .all() .unwrap() .into_iter() .map(|a| a.name) .collect::<Vec<_>>(), vec!["agent1".to_string(), "agent2".to_string()] ); tokens.agents.remove("token1"); tokens.agents.insert("token3".into(), "agent3".into()); agents.synchronize(&tokens).unwrap(); assert_eq!( agents .all() .unwrap() .into_iter() .map(|a| a.name) .collect::<Vec<_>>(), vec!["agent2".to_string(), "agent3".to_string()] ); } #[test] fn test_heartbeat_recording() { let db = Database::temp().unwrap(); let mut tokens = Tokens::default(); tokens.agents.insert("token".into(), "agent".into()); let agents = Agents::new(db, &tokens).unwrap(); let agent = agents.get("agent").unwrap().unwrap(); assert!(agent.last_heartbeat.is_none()); agents.record_heartbeat("agent").unwrap(); let agent = agents.get("agent").unwrap().unwrap(); let first_heartbeat = agent.last_heartbeat.unwrap(); agents.record_heartbeat("agent").unwrap(); let agent = agents.get("agent").unwrap().unwrap(); assert!(first_heartbeat < agent.last_heartbeat.unwrap()); } #[test] fn test_agent_status() { let db = Database::temp().unwrap(); let config = Config::default(); let mut tokens = Tokens::default(); tokens.agents.insert("token".into(), "agent".into()); let agents = Agents::new(db.clone(), &tokens).unwrap(); ::crates::lists::setup_test_lists(&db, &config).unwrap(); let agent = agents.get("agent").unwrap().unwrap(); assert_eq!(agent.status(), AgentStatus::Unreachable); agents.record_heartbeat("agent").unwrap(); let agent = agents.get("agent").unwrap().unwrap(); assert_eq!(agent.status(), AgentStatus::Idle); CreateExperiment::dummy("dummy") .apply(&db, &config) .unwrap(); Experiment::next(&db, &Assignee::Agent("agent".to_string())).unwrap(); let agent = agents.get("agent").unwrap().unwrap(); assert_eq!(agent.status(), AgentStatus::Working); } }
Ok(if let Some(agent) = row { Some(agent.with_experiment(&self.db)?) } else { None })
call_expression
[ { "content": "pub fn execute(db: &mut Connection) -> Result<()> {\n\n // If the database version is 0, create the migrations table and bump it\n\n let version: i32 = db.query_row(\"PRAGMA user_version;\", &[], |r| r.get(0))?;\n\n if version == 0 {\n\n db.execute(\"CREATE TABLE migrations (name TEXT PRIMARY KEY);\", &[])?;\n\n db.execute(\"PRAGMA user_version = 1;\", &[])?;\n\n }\n\n\n\n let executed_migrations = {\n\n let mut prepared = db.prepare(\"SELECT name FROM migrations;\")?;\n\n let mut result = HashSet::new();\n\n for value in prepared.query_map(&[], |row| -> String { row.get(\"name\") })? {\n\n result.insert(value?);\n\n }\n\n\n\n result\n\n };\n\n\n\n for &(name, ref migration) in &migrations() {\n\n if !executed_migrations.contains(&name.to_string()) {\n", "file_path": "src/db/migrations.rs", "rank": 0, "score": 263625.0637562888 }, { "content": "pub fn run(url: &str, token: &str, threads_count: usize) -> Result<()> {\n\n let agent = Agent::new(url, token)?;\n\n let db = results::ResultsUploader::new(&agent.api);\n\n\n\n run_heartbeat(url, token);\n\n\n\n loop {\n\n let ex = agent.experiment()?;\n\n run_graph::run_ex(&ex, &db, threads_count, &agent.config)?;\n\n agent.api.complete_experiment()?;\n\n }\n\n}\n", "file_path": "src/agent/mod.rs", "rank": 1, "score": 263548.23832694127 }, { "content": "#[allow(unused_variables)]\n\npub fn render_template<C: Serialize>(name: &str, context: &C) -> Result<String> {\n\n // On debug builds the cache is rebuilt every time to pick up changed templates\n\n let tera_owned: Tera;\n\n let tera;\n\n\n\n #[cfg(debug_assertions)]\n\n {\n\n tera_owned = build_tera_cache()?;\n\n tera = &tera_owned;\n\n }\n\n\n\n #[cfg(not(debug_assertions))]\n\n {\n\n tera = &TERA_CACHE;\n\n }\n\n\n\n Ok(tera.render(name, context)?)\n\n}\n", "file_path": "src/assets.rs", "rank": 2, "score": 258192.4723712359 }, { "content": "pub fn load(name: &str) -> Result<&Asset> {\n\n if let Some(ref asset) = ASSETS.get(name) {\n\n Ok(asset)\n\n } else {\n\n bail!(\n\n \"unknown static file (did you add it to src/assets.rs?): {}\",\n\n name\n\n );\n\n }\n\n}\n\n\n", "file_path": "src/assets.rs", "rank": 3, "score": 257790.26407349744 }, { "content": "fn store_experiment_name(db: &Database, issue: &Issue, name: &str) -> Result<()> {\n\n // Store the provided experiment name to provide it automatically on next command\n\n // We don't have to worry about conflicts here since the table is defined with\n\n // ON CONFLICT IGNORE.\n\n db.execute(\n\n \"INSERT INTO saved_names (issue, experiment) VALUES (?1, ?2);\",\n\n &[&issue.number, &name],\n\n )?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/server/routes/webhooks/commands.rs", "rank": 4, "score": 251359.20895444992 }, { "content": "pub fn endpoint_experiment(name: String, data: Arc<Data>) -> Result<Response<Body>> {\n\n if let Some(ex) = Experiment::get(&data.db, &name)? {\n\n let (completed_jobs, total_jobs) = ex.raw_progress(&data.db)?;\n\n\n\n let (duration, estimated_end, average_job_duration) = if completed_jobs > 0\n\n && total_jobs > 0\n\n {\n\n if let Some(started_at) = ex.started_at {\n\n let res = if let Some(completed_at) = ex.completed_at {\n\n let total = completed_at.signed_duration_since(started_at);\n\n (\n\n Some(total),\n\n None,\n\n Some((total / completed_jobs as i32).num_seconds()),\n\n )\n\n } else {\n\n let total = Utc::now().signed_duration_since(started_at);\n\n let job_duration = total / completed_jobs as i32;\n\n (\n\n None,\n", "file_path": "src/server/routes/ui/experiments.rs", "rank": 5, "score": 237375.52203513094 }, { "content": "fn get_name(db: &Database, issue: &Issue, name: Option<String>) -> Result<String> {\n\n if let Some(name) = name {\n\n store_experiment_name(db, issue, &name)?;\n\n Ok(name)\n\n } else if let Some(default) = default_experiment_name(db, issue)? {\n\n Ok(default)\n\n } else {\n\n Err(\"missing experiment name\".into())\n\n }\n\n}\n\n\n", "file_path": "src/server/routes/webhooks/commands.rs", "rank": 6, "score": 234437.84113854406 }, { "content": "fn run_heartbeat(url: &str, token: &str) {\n\n let api = AgentApi::new(url, token);\n\n\n\n thread::spawn(move || loop {\n\n if let Err(e) = api.heartbeat().chain_err(|| \"failed to send heartbeat\") {\n\n utils::report_error(&e);\n\n }\n\n thread::sleep(Duration::from_secs(60));\n\n });\n\n}\n\n\n", "file_path": "src/agent/mod.rs", "rank": 7, "score": 233947.08432648762 }, { "content": "fn installed_binary(name: &str) -> String {\n\n format!(\"{}/bin/{}{}\", *CARGO_HOME, name, EXE_SUFFIX)\n\n}\n\n\n", "file_path": "src/toolchain.rs", "rank": 8, "score": 224093.69909441948 }, { "content": "pub fn capture_shas<DB: WriteResults>(ex: &Experiment, crates: &[Crate], db: &DB) -> Result<()> {\n\n for krate in crates {\n\n if let Crate::GitHub(ref repo) = *krate {\n\n let dir = repo.mirror_dir();\n\n let r = RunCommand::new(\"git\", &[\"rev-parse\", \"HEAD\"])\n\n .cd(&dir)\n\n .run_capture();\n\n\n\n let sha = match r {\n\n Ok((stdout, _)) => if let Some(shaline) = stdout.get(0) {\n\n if !shaline.is_empty() {\n\n info!(\"sha for GitHub repo {}: {}\", repo.slug(), shaline);\n\n shaline.to_string()\n\n } else {\n\n bail!(\"bogus output from git log for {}\", dir.display());\n\n }\n\n } else {\n\n bail!(\"bogus output from git log for {}\", dir.display());\n\n },\n\n Err(e) => {\n", "file_path": "src/ex_prepare.rs", "rank": 9, "score": 222126.30652380834 }, { "content": "fn default_experiment_name(db: &Database, issue: &Issue) -> Result<Option<String>> {\n\n let name = db.get_row(\n\n \"SELECT experiment FROM saved_names WHERE issue = ?1\",\n\n &[&issue.number],\n\n |r| r.get(0),\n\n )?;\n\n\n\n Ok(if let Some(name) = name {\n\n Some(name)\n\n } else if issue.pull_request.is_some() {\n\n Some(format!(\"pr-{}\", issue.number))\n\n } else {\n\n None\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{default_experiment_name, store_experiment_name};\n\n use db::Database;\n", "file_path": "src/server/routes/webhooks/commands.rs", "rank": 10, "score": 220593.47594839134 }, { "content": "pub fn shallow_clone_or_pull(url: &str, dir: &Path) -> Result<()> {\n\n let url = frob_url(url);\n\n\n\n if !dir.exists() {\n\n info!(\"cloning {} into {}\", url, dir.display());\n\n let r = RunCommand::new(\n\n \"git\",\n\n &[\"clone\", \"--depth\", \"1\", &url, &dir.to_string_lossy()],\n\n ).run()\n\n .chain_err(|| format!(\"unable to clone {}\", url));\n\n\n\n if r.is_err() && dir.exists() {\n\n fs::remove_dir_all(dir)?;\n\n }\n\n\n\n r\n\n } else {\n\n info!(\"pulling existing url {} into {}\", url, dir.display());\n\n RunCommand::new(\"git\", &[\"fetch\", \"--all\"]).cd(dir).run()?;\n\n RunCommand::new(\"git\", &[\"reset\", \"--hard\", \"@{upstream}\"])\n\n .cd(dir)\n\n .run()\n\n .chain_err(|| format!(\"unable to pull {}\", url))\n\n }\n\n}\n\n\n", "file_path": "src/git.rs", "rank": 11, "score": 215280.94606917005 }, { "content": "/// Builds the docker container image, 'crater', what will be used\n\n/// to isolate builds from each other. This expects the Dockerfile\n\n/// to exist in the `docker` directory, at runtime.\n\npub fn build_container(docker_env: &str) -> Result<()> {\n\n let dockerfile = format!(\"docker/Dockerfile.{}\", docker_env);\n\n RunCommand::new(\n\n \"docker\",\n\n &[\"build\", \"-f\", &dockerfile, \"-t\", IMAGE_NAME, \"docker\"],\n\n ).enable_timeout(false)\n\n .run()\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub enum MountPerms {\n\n ReadWrite,\n\n ReadOnly,\n\n}\n\n\n", "file_path": "src/docker.rs", "rank": 12, "score": 204956.60415600767 }, { "content": "/// Load and parse and environment variable.\n\nfn get_env<T>(name: &str) -> Result<T>\n\nwhere\n\n T: FromStr,\n\n T::Err: ::std::error::Error + Send + 'static,\n\n{\n\n env::var(name)\n\n .chain_err(|| {\n\n format!{\"Need to specify {:?} in environment or `.env`.\", name}\n\n })?.parse()\n\n .chain_err(|| format!{\"Couldn't parse {:?}.\", name})\n\n}\n", "file_path": "src/cli.rs", "rank": 13, "score": 201324.44533546193 }, { "content": "fn dl_registry(name: &str, vers: &str, dir: &Path) -> Result<()> {\n\n if dir.exists() {\n\n info!(\n\n \"crate {}-{} exists at {}. skipping\",\n\n name,\n\n vers,\n\n dir.display()\n\n );\n\n return Ok(());\n\n }\n\n info!(\"downloading crate {}-{} to {}\", name, vers, dir.display());\n\n let url = format!(\"{0}/{1}/{1}-{2}.crate\", CRATES_ROOT, name, vers);\n\n let bin = ::utils::http::get(&url).chain_err(|| format!(\"unable to download {}\", url))?;\n\n\n\n fs::create_dir_all(&dir)?;\n\n\n\n let mut tar = Archive::new(GzDecoder::new(bin));\n\n let r = unpack_without_first_dir(&mut tar, dir).chain_err(|| \"unable to unpack crate tarball\");\n\n\n\n if r.is_err() {\n\n let _ = ::utils::fs::remove_dir_all(dir);\n\n }\n\n\n\n r\n\n}\n\n\n", "file_path": "src/crates/sources/registry.rs", "rank": 14, "score": 197136.59294186425 }, { "content": "#[cfg_attr(feature = \"cargo-clippy\", allow(too_many_arguments))]\n\npub fn run_test<DB: WriteResults>(\n\n config: &Config,\n\n action: &str,\n\n ex: &Experiment,\n\n tc: &Toolchain,\n\n krate: &Crate,\n\n db: &DB,\n\n quiet: bool,\n\n test_fn: fn(&Config, &Experiment, &Path, &Toolchain, bool) -> Result<TestResult>,\n\n) -> Result<RunTestResult> {\n\n if let Some(res) = db.get_result(ex, tc, krate)? {\n\n info!(\"skipping crate {}. existing result: {}\", krate, res);\n\n Ok(RunTestResult {\n\n result: res,\n\n skipped: true,\n\n })\n\n } else {\n\n with_work_crate(ex, tc, krate, |source_path| {\n\n with_frobbed_toml(ex, krate, source_path)?;\n\n with_captured_lockfile(config, ex, krate, source_path)?;\n", "file_path": "src/ex_run.rs", "rank": 15, "score": 187080.4634211215 }, { "content": "pub fn generate_report<DB: ReadResults>(\n\n db: &DB,\n\n config: &Config,\n\n ex: &Experiment,\n\n) -> Result<TestResults> {\n\n let shas = db.load_all_shas(ex)?;\n\n let res = ex\n\n .crates\n\n .clone()\n\n .into_iter()\n\n .map(|krate| {\n\n // Any errors here will turn into unknown results\n\n let crate_results = ex.toolchains.iter().map(|tc| -> Result<BuildTestResult> {\n\n let res = db\n\n .load_test_result(ex, tc, &krate)?\n\n .ok_or_else(|| \"no result\")?;\n\n\n\n Ok(BuildTestResult {\n\n res,\n\n log: crate_to_path_fragment(tc, &krate, true)\n", "file_path": "src/report/mod.rs", "rank": 16, "score": 187080.4634211215 }, { "content": "pub fn get_client_for_bucket(bucket: &str) -> Result<Box<S3>> {\n\n let make_client = |region| -> Result<S3Client> {\n\n let credentials = DefaultCredentialsProvider::new().unwrap();\n\n Ok(S3Client::new_with(HttpClient::new()?, credentials, region))\n\n };\n\n let client = make_client(Region::UsEast1)?;\n\n let response = client\n\n .get_bucket_location(GetBucketLocationRequest {\n\n bucket: bucket.into(),\n\n }).sync()\n\n .chain_err(|| \"S3 failure to get bucket location\")?;\n\n let region = match response.location_constraint.as_ref() {\n\n Some(region) if region == \"\" => Region::UsEast1,\n\n Some(region) => region.parse().chain_err(|| \"Unknown bucket region.\")?,\n\n None => bail!{\"Couldn't determine bucket region\"},\n\n };\n\n\n\n Ok(Box::new(make_client(region)?))\n\n}\n\n\n", "file_path": "src/report/s3.rs", "rank": 17, "score": 183775.96103421145 }, { "content": "pub fn run_ex<DB: WriteResults + Sync>(\n\n ex: &Experiment,\n\n db: &DB,\n\n threads_count: usize,\n\n config: &Config,\n\n) -> Result<()> {\n\n let res = run_ex_inner(ex, db, threads_count, config);\n\n\n\n // Remove all the target dirs even if the experiment failed\n\n let target_dir = &::toolchain::ex_target_dir(&ex.name);\n\n if target_dir.exists() {\n\n utils::fs::remove_dir_all(target_dir)?;\n\n }\n\n\n\n res\n\n}\n\n\n", "file_path": "src/run_graph.rs", "rank": 18, "score": 178625.44662095833 }, { "content": "pub fn report_error(e: &Error) {\n\n error!(\"{}\", e);\n\n\n\n for e in e.iter().skip(1) {\n\n error!(\"caused by: {}\", e)\n\n }\n\n\n\n if let Some(backtrace) = e.backtrace() {\n\n error!(\"{:?}\", backtrace);\n\n }\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 19, "score": 176120.88090065782 }, { "content": "pub fn ex_target_dir(ex_name: &str) -> PathBuf {\n\n TARGET_DIR.join(ex_name)\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub enum CargoState {\n\n Locked,\n\n Unlocked,\n\n}\n\n\n\nlazy_static! {\n\n /// This is the main toolchain used by Crater for everything not experiment-specific, such as\n\n /// generating lockfiles or fetching dependencies.\n\n pub static ref MAIN_TOOLCHAIN: Toolchain = Toolchain {\n\n source: ToolchainSource::Dist {\n\n name: \"stable\".to_string()\n\n },\n\n rustflags: None,\n\n };\n\n}\n", "file_path": "src/toolchain.rs", "rank": 20, "score": 175248.93951577356 }, { "content": "fn froml_path(ex_name: &str, name: &str, vers: &str) -> PathBuf {\n\n froml_dir(ex_name).join(format!(\"{}-{}.Cargo.toml\", name, vers))\n\n}\n\n\n", "file_path": "src/ex_prepare.rs", "rank": 21, "score": 172071.9927167133 }, { "content": "fn crate_to_name(c: &Crate, shas: &HashMap<GitHubRepo, String>) -> Result<String> {\n\n Ok(match *c {\n\n Crate::Registry(ref details) => format!(\"{}-{}\", details.name, details.version),\n\n Crate::GitHub(ref repo) => {\n\n if let Some(sha) = shas.get(repo) {\n\n format!(\"{}.{}.{}\", repo.org, repo.name, sha)\n\n } else {\n\n format!(\"{}.{}\", repo.org, repo.name)\n\n }\n\n }\n\n Crate::Local(ref name) => format!(\"{} (local)\", name),\n\n })\n\n}\n\n\n", "file_path": "src/report/mod.rs", "rank": 22, "score": 168605.4036500411 }, { "content": "fn frob_url(url: &str) -> String {\n\n // With https git will interactively ask for a password for private repos.\n\n // Switch to the unauthenticated git protocol to just generate an error instead.\n\n url.replace(\"https://\", \"git://\")\n\n}\n", "file_path": "src/git.rs", "rank": 23, "score": 166146.8834130289 }, { "content": "#[cfg_attr(feature = \"cargo-clippy\", allow(match_ref_pats))]\n\npub fn frob_toml(ex: &Experiment, krate: &Crate) -> Result<()> {\n\n if let Crate::Registry(ref details) = *krate {\n\n fs::create_dir_all(&froml_dir(&ex.name))?;\n\n let source = krate.dir();\n\n let out = froml_path(&ex.name, &details.name, &details.version);\n\n\n\n let mut frobber = TomlFrobber::new(krate, &source)?;\n\n frobber.frob();\n\n frobber.save(&out)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/ex_prepare.rs", "rank": 24, "score": 165990.78578821436 }, { "content": "fn cmd(args: &[&str]) -> Option<String> {\n\n if let Ok(out) = Command::new(args[0]).args(&args[1..]).output() {\n\n if out.status.success() {\n\n return Some(String::from_utf8_lossy(&out.stdout).trim().to_string());\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "build.rs", "rank": 25, "score": 165491.63457190953 }, { "content": "pub fn write_logs_archives<DB: ReadResults, W: ReportWriter>(\n\n db: &DB,\n\n ex: &Experiment,\n\n dest: &W,\n\n config: &Config,\n\n) -> Result<Vec<Archive>> {\n\n let mut archives = Vec::new();\n\n let mut all = TarBuilder::new(GzEncoder::new(Vec::new(), Compression::default()));\n\n let mut by_comparison = HashMap::new();\n\n\n\n for krate in &ex.crates {\n\n if config.should_skip(krate) {\n\n continue;\n\n }\n\n\n\n let res1 = db.load_test_result(ex, &ex.toolchains[0], krate)?;\n\n let res2 = db.load_test_result(ex, &ex.toolchains[1], krate)?;\n\n let comparison = compare(config, krate, res1, res2);\n\n\n\n for tc in &ex.toolchains {\n", "file_path": "src/report/archives.rs", "rank": 26, "score": 164131.87373633616 }, { "content": "pub fn gen<DB: ReadResults, W: ReportWriter + Display>(\n\n db: &DB,\n\n ex: &Experiment,\n\n dest: &W,\n\n config: &Config,\n\n) -> Result<()> {\n\n let res = generate_report(db, config, ex)?;\n\n\n\n info!(\"writing results to {}\", dest);\n\n info!(\"writing metadata\");\n\n dest.write_string(\n\n \"results.json\",\n\n serde_json::to_string(&res)?.into(),\n\n &mime::APPLICATION_JSON,\n\n )?;\n\n dest.write_string(\n\n \"config.json\",\n\n serde_json::to_string(&ex)?.into(),\n\n &mime::APPLICATION_JSON,\n\n )?;\n", "file_path": "src/report/mod.rs", "rank": 27, "score": 163982.69749751338 }, { "content": "fn lockfile(ex_name: &str, krate: &Crate) -> Result<PathBuf> {\n\n let name = match *krate {\n\n Crate::Registry(ref details) => format!(\"reg-{}-{}.lock\", details.name, details.version),\n\n Crate::GitHub(ref repo) => format!(\"gh-{}-{}.lock\", repo.org, repo.name),\n\n Crate::Local(ref name) => format!(\"local-{}.lock\", name),\n\n };\n\n Ok(lockfile_dir(ex_name).join(name))\n\n}\n\n\n", "file_path": "src/ex_prepare.rs", "rank": 28, "score": 163773.33685581706 }, { "content": "#[inline]\n\nfn url_encode(input: &str) -> String {\n\n utf8_percent_encode(input, REPORT_ENCODE_SET).to_string()\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct TestResults {\n\n crates: Vec<CrateResult>,\n\n}\n\n\n", "file_path": "src/report/mod.rs", "rank": 29, "score": 162615.2763487649 }, { "content": "fn render_template<C: Serialize>(name: &str, context: &C) -> Result<Response<Body>> {\n\n let mut resp = Response::new(assets::render_template(name, context)?.into());\n\n resp.headers_mut()\n\n .insert(CONTENT_TYPE, HeaderValue::from_static(\"text/html\"));\n\n Ok(resp)\n\n}\n", "file_path": "src/server/routes/ui/mod.rs", "rank": 30, "score": 161734.3030645722 }, { "content": "fn init_toolchain_from_dist(toolchain: &str) -> Result<()> {\n\n info!(\"installing toolchain {}\", toolchain);\n\n utils::try_hard(|| {\n\n RunCommand::new(\n\n &installed_binary(\"rustup\"),\n\n &[\"toolchain\", \"install\", toolchain],\n\n ).local_rustup()\n\n .run()\n\n .chain_err(|| format!(\"unable to install toolchain {} via rustup\", toolchain))\n\n })\n\n}\n\n\n", "file_path": "src/toolchain.rs", "rank": 31, "score": 160202.42848275998 }, { "content": "pub fn run(config: Config) -> Result<()> {\n\n let db = Database::open()?;\n\n let tokens = tokens::Tokens::load()?;\n\n let github = GitHubApi::new(&tokens);\n\n let agents = Agents::new(db.clone(), &tokens)?;\n\n let bot_username = github.username()?;\n\n let acl = ACL::new(&config, &github)?;\n\n\n\n info!(\"bot username: {}\", bot_username);\n\n\n\n let data = Data {\n\n bot_username,\n\n config,\n\n github,\n\n tokens,\n\n agents,\n\n db: db.clone(),\n\n reports_worker: reports::ReportsWorker::new(),\n\n acl,\n\n };\n", "file_path": "src/server/mod.rs", "rank": 32, "score": 154834.80237390302 }, { "content": "pub fn run(host: &str, data: &Data, issue: &Issue, args: RunArgs) -> Result<()> {\n\n let name = get_name(&data.db, issue, args.name)?;\n\n\n\n ::actions::CreateExperiment {\n\n name: name.clone(),\n\n toolchains: [\n\n args.start.ok_or_else(|| \"missing start toolchain\")?,\n\n args.end.ok_or_else(|| \"missing end toolchain\")?,\n\n ],\n\n mode: args.mode.unwrap_or(Mode::BuildAndTest),\n\n crates: args.crates.unwrap_or(CrateSelect::Full),\n\n cap_lints: args.cap_lints.unwrap_or(CapLints::Forbid),\n\n priority: args.priority.unwrap_or(0),\n\n github_issue: Some(GitHubIssue {\n\n api_url: issue.url.clone(),\n\n html_url: issue.html_url.clone(),\n\n number: issue.number,\n\n }),\n\n }.apply(&data.db, &data.config)?;\n\n\n", "file_path": "src/server/routes/webhooks/commands.rs", "rank": 33, "score": 154810.6692492289 }, { "content": "pub fn dump_dot(ex: &Experiment, config: &Config, dest: &Path) -> Result<()> {\n\n info!(\"computing the tasks graph...\");\n\n let graph = build_graph(&ex, config);\n\n\n\n info!(\"dumping the tasks graph...\");\n\n ::std::fs::write(dest, format!(\"{:?}\", Dot::new(&graph.graph)).as_bytes())?;\n\n\n\n info!(\"tasks graph available in {}\", dest.to_string_lossy());\n\n\n\n Ok(())\n\n}\n", "file_path": "src/run_graph.rs", "rank": 34, "score": 153441.9698379726 }, { "content": "pub fn with_frobbed_toml(ex: &Experiment, krate: &Crate, path: &Path) -> Result<()> {\n\n let (crate_name, crate_vers) = match *krate {\n\n Crate::Registry(ref details) => (details.name.clone(), details.version.clone()),\n\n _ => return Ok(()),\n\n };\n\n let src_froml = &froml_path(&ex.name, &crate_name, &crate_vers);\n\n let dst_froml = &path.join(\"Cargo.toml\");\n\n if src_froml.exists() {\n\n info!(\"using frobbed toml {}\", src_froml.display());\n\n fs::copy(src_froml, dst_froml).chain_err(|| {\n\n format!(\n\n \"unable to copy frobbed toml from {} to {}\",\n\n src_froml.display(),\n\n dst_froml.display()\n\n )\n\n })?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/ex_prepare.rs", "rank": 35, "score": 153441.9698379726 }, { "content": "pub fn endpoint_list(data: Arc<Data>) -> Result<Response<Body>> {\n\n let mut agents = Vec::new();\n\n for agent in &data.agents.all()? {\n\n let (status_class, status_pretty, show_assigned) = match agent.status() {\n\n AgentStatus::Working => (\"orange\", \"Working\", true),\n\n AgentStatus::Idle => (\"green\", \"Online\", false),\n\n AgentStatus::Unreachable => (\"red\", \"Unreachable\", false),\n\n };\n\n\n\n agents.push(AgentData {\n\n name: agent.name().to_string(),\n\n status_class,\n\n status_pretty,\n\n last_heartbeat: agent\n\n .last_heartbeat()\n\n .map(|time| time.to_rfc3339_opts(SecondsFormat::Secs, true)),\n\n assigned_experiment: if show_assigned {\n\n agent.assigned_experiment().map(|ex| ex.name.clone())\n\n } else {\n\n None\n", "file_path": "src/server/routes/ui/agents.rs", "rank": 36, "score": 153222.1983527417 }, { "content": "pub fn endpoint_queue(data: Arc<Data>) -> Result<Response<Body>> {\n\n let mut queued = Vec::new();\n\n let mut running = Vec::new();\n\n let mut needs_report = Vec::new();\n\n let mut generating_report = Vec::new();\n\n let mut report_failed = Vec::new();\n\n\n\n for experiment in Experiment::all(&data.db)? {\n\n // Don't include completed experiments in the queue\n\n if experiment.status == Status::Completed {\n\n continue;\n\n }\n\n\n\n let ex = ExperimentData::new(&data, &experiment)?;\n\n\n\n match experiment.status {\n\n Status::Queued => queued.push(ex),\n\n Status::Running => running.push(ex),\n\n Status::NeedsReport => needs_report.push(ex),\n\n Status::GeneratingReport => generating_report.push(ex),\n", "file_path": "src/server/routes/ui/experiments.rs", "rank": 37, "score": 153049.1975581536 }, { "content": "fn handle_errors(err: Rejection) -> ::std::result::Result<Response<Body>, Rejection> {\n\n match err.status() {\n\n StatusCode::NOT_FOUND | StatusCode::METHOD_NOT_ALLOWED => {\n\n Ok(ApiResponse::not_found().into_response().unwrap())\n\n }\n\n StatusCode::FORBIDDEN => Ok(ApiResponse::unauthorized().into_response().unwrap()),\n\n _ => Err(err),\n\n }\n\n}\n", "file_path": "src/server/routes/agent.rs", "rank": 38, "score": 151324.08958348498 }, { "content": "fn parse_token(authorization: &str) -> Option<&str> {\n\n let mut segments = authorization.split(' ');\n\n if let Some(scope) = segments.next() {\n\n if scope == \"CraterToken\" {\n\n if let Some(token) = segments.next() {\n\n if segments.next().is_none() {\n\n return Some(token);\n\n }\n\n }\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "src/server/auth.rs", "rank": 39, "score": 149984.21523588727 }, { "content": "pub fn routes(\n\n data: Arc<Data>,\n\n) -> impl Filter<Extract = (Response<Body>,), Error = Rejection> + Clone {\n\n let data_cloned = data.clone();\n\n let data_filter = warp::any().map(move || data_cloned.clone());\n\n\n\n let config = warp::get2()\n\n .and(warp::path(\"config\"))\n\n .and(warp::path::index())\n\n .and(data_filter.clone())\n\n .and(auth_filter(data.clone(), TokenType::Agent))\n\n .map(endpoint_config);\n\n\n\n let next_experiment = warp::get2()\n\n .and(warp::path(\"next-experiment\"))\n\n .and(warp::path::index())\n\n .and(data_filter.clone())\n\n .and(auth_filter(data.clone(), TokenType::Agent))\n\n .map(endpoint_next_experiment);\n\n\n", "file_path": "src/server/routes/agent.rs", "rank": 40, "score": 149744.01069800122 }, { "content": "fn generate_report(data: &Data, ex: &Experiment, results: &DatabaseDB) -> Result<()> {\n\n let client = S3Client::new_with(\n\n HttpClient::new()?,\n\n data.tokens.reports_bucket.to_aws_credentials(),\n\n data.tokens.reports_bucket.region.to_region()?,\n\n );\n\n let dest = format!(\"s3://{}/{}\", data.tokens.reports_bucket.bucket, &ex.name);\n\n let writer = report::S3Writer::create(Box::new(client), dest.parse()?)?;\n\n\n\n report::gen(results, &ex, &writer, &data.config)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/server/reports.rs", "rank": 41, "score": 147611.63224015397 }, { "content": "struct Agent {\n\n api: AgentApi,\n\n config: Config,\n\n}\n\n\n\nimpl Agent {\n\n fn new(url: &str, token: &str) -> Result<Self> {\n\n info!(\"connecting to crater server {}...\", url);\n\n\n\n let api = AgentApi::new(url, token);\n\n let config = api.config()?;\n\n\n\n info!(\"connected to the crater server!\");\n\n info!(\"assigned agent name: {}\", config.agent_name);\n\n\n\n Ok(Agent {\n\n api,\n\n config: config.crater_config,\n\n })\n\n }\n\n\n\n fn experiment(&self) -> Result<Experiment> {\n\n info!(\"asking the server for a new experiment...\");\n\n Ok(self.api.next_experiment()?)\n\n }\n\n}\n\n\n", "file_path": "src/agent/mod.rs", "rank": 42, "score": 146768.70024543905 }, { "content": "fn parse_features(path: &Path) -> Result<Vec<String>> {\n\n let mut features = Vec::new();\n\n let contents = ::std::fs::read_to_string(path)?;\n\n for (hash_idx, _) in contents.match_indices('#') {\n\n let contents = &contents[hash_idx + 1..];\n\n let contents = eat_token(Some(contents), \"!\").or_else(|| Some(contents));\n\n let contents = eat_token(contents, \"[\");\n\n let contents = eat_token(contents, \"feature\");\n\n let new_features = parse_list(contents, \"(\", \")\");\n\n features.extend_from_slice(&new_features);\n\n }\n\n\n\n fn eat_token<'a>(s: Option<&'a str>, tok: &str) -> Option<&'a str> {\n\n eat_whitespace(s).and_then(|s| {\n\n if s.starts_with(tok) {\n\n Some(&s[tok.len()..])\n\n } else {\n\n None\n\n }\n\n })\n", "file_path": "src/ex_run.rs", "rank": 43, "score": 146260.81360670234 }, { "content": "fn init_toolchain_from_ci(alt: bool, sha: &str) -> Result<()> {\n\n // Ensure rustup-toolchain-install-master is installed\n\n let bin = installed_binary(\"rustup-toolchain-install-master\");\n\n if !Path::new(&bin).exists() {\n\n info!(\"installing rustup-toolchain-install-master\");\n\n utils::try_hard(|| {\n\n RunCommand::new(\n\n &installed_binary(\"cargo\"),\n\n &[\"install\", \"rustup-toolchain-install-master\"],\n\n ).local_rustup()\n\n .run()\n\n .chain_err(|| \"unable to install rustup-toolchain-install-master\")\n\n })?;\n\n }\n\n\n\n if alt {\n\n info!(\"installing toolchain {}-alt\", sha);\n\n } else {\n\n info!(\"installing toolchain {}\", sha);\n\n }\n", "file_path": "src/toolchain.rs", "rank": 44, "score": 145711.09348375647 }, { "content": "struct ExperimentDBRecord {\n\n name: String,\n\n mode: String,\n\n cap_lints: String,\n\n toolchain_start: String,\n\n toolchain_end: String,\n\n priority: i32,\n\n created_at: DateTime<Utc>,\n\n started_at: Option<DateTime<Utc>>,\n\n completed_at: Option<DateTime<Utc>>,\n\n github_issue: Option<String>,\n\n github_issue_url: Option<String>,\n\n github_issue_number: Option<i32>,\n\n status: String,\n\n assigned_to: Option<String>,\n\n report_url: Option<String>,\n\n}\n\n\n\nimpl ExperimentDBRecord {\n\n fn from_row(row: &Row) -> Self {\n", "file_path": "src/experiments.rs", "rank": 45, "score": 145388.4266083419 }, { "content": "fn crate_to_url(c: &Crate, shas: &HashMap<GitHubRepo, String>) -> Result<String> {\n\n Ok(match *c {\n\n Crate::Registry(ref details) => format!(\n\n \"https://crates.io/crates/{}/{}\",\n\n details.name, details.version\n\n ),\n\n Crate::GitHub(ref repo) => {\n\n if let Some(sha) = shas.get(repo) {\n\n format!(\"https://github.com/{}/{}/tree/{}\", repo.org, repo.name, sha)\n\n } else {\n\n format!(\"https://github.com/{}/{}\", repo.org, repo.name)\n\n }\n\n }\n\n Crate::Local(ref name) => {\n\n format!(\"{}/tree/master/local-crates/{}\", ::CRATER_REPO_URL, name)\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/report/mod.rs", "rank": 46, "score": 144035.1851300145 }, { "content": "fn crate_work_dir(ex_name: &str, toolchain: &Toolchain) -> PathBuf {\n\n let mut dir = TEST_SOURCE_DIR.clone();\n\n if let Some(thread) = ::std::thread::current().name() {\n\n dir = dir.join(thread);\n\n }\n\n dir.join(ex_name).join(toolchain.to_string())\n\n}\n\n\n", "file_path": "src/ex_prepare.rs", "rank": 47, "score": 143396.51194469768 }, { "content": "fn process_command(host: &str, sender: &str, body: &str, issue: &Issue, data: &Data) -> Result<()> {\n\n let start = format!(\"@{} \", data.bot_username);\n\n for line in body.lines() {\n\n if !line.starts_with(&start) {\n\n continue;\n\n }\n\n\n\n let command = line[line.find(' ').unwrap()..].trim();\n\n if command == \"\" {\n\n continue;\n\n }\n\n\n\n if !data.acl.allowed(sender) {\n\n Message::new()\n\n .line(\n\n \"lock\",\n\n \"**Error:** you're not allowed to interact with this bot.\",\n\n ).note(\n\n \"key\",\n\n format!(\n", "file_path": "src/server/routes/webhooks/mod.rs", "rank": 48, "score": 141601.06075610858 }, { "content": "fn endpoint_assets(path: String) -> Result<Response<Body>> {\n\n if let Ok(asset) = assets::load(&path) {\n\n if let Ok(content) = asset.content() {\n\n let mut resp = Response::new(content.into_owned().into());\n\n resp.headers_mut().insert(\n\n CONTENT_TYPE,\n\n HeaderValue::from_str(asset.mime().as_ref()).unwrap(),\n\n );\n\n return Ok(resp);\n\n }\n\n }\n\n\n\n error_404()\n\n}\n\n\n", "file_path": "src/server/routes/ui/mod.rs", "rank": 49, "score": 140784.4566032206 }, { "content": "pub fn with_work_crate<F, R>(\n\n ex: &Experiment,\n\n toolchain: &Toolchain,\n\n krate: &Crate,\n\n f: F,\n\n) -> Result<R>\n\nwhere\n\n F: Fn(&Path) -> Result<R>,\n\n{\n\n let src_dir = krate.dir();\n\n let dest_dir = crate_work_dir(&ex.name, toolchain);\n\n info!(\n\n \"creating temporary build dir for {} in {}\",\n\n krate,\n\n dest_dir.display()\n\n );\n\n\n\n ::utils::fs::copy_dir(&src_dir, &dest_dir)?;\n\n let r = f(&dest_dir);\n\n ::utils::fs::remove_dir_all(&dest_dir)?;\n\n r\n\n}\n\n\n", "file_path": "src/ex_prepare.rs", "rank": 50, "score": 135950.4838014783 }, { "content": "pub fn ping(data: &Data, issue: &Issue) -> Result<()> {\n\n Message::new()\n\n .line(\"ping_pong\", \"**Pong!**\")\n\n .send(&issue.url, data)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/server/routes/webhooks/commands.rs", "rank": 51, "score": 135938.49956951686 }, { "content": "pub fn reload_acl(data: &Data, issue: &Issue) -> Result<()> {\n\n data.acl.refresh_cache(&data.github)?;\n\n\n\n Message::new()\n\n .line(\"hammer_and_wrench\", \"List of authorized users reloaded!\")\n\n .send(&issue.url, data)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/server/routes/webhooks/commands.rs", "rank": 52, "score": 133464.71571881976 }, { "content": "fn error_404() -> Result<Response<Body>> {\n\n let mut resp = render_template(\n\n \"ui/404.html\",\n\n &ErrorContext {\n\n layout: LayoutContext::new(),\n\n },\n\n )?;\n\n\n\n *resp.status_mut() = StatusCode::NOT_FOUND;\n\n Ok(resp)\n\n}\n\n\n", "file_path": "src/server/routes/ui/mod.rs", "rank": 53, "score": 131883.3165011797 }, { "content": "fn froml_dir(ex_name: &str) -> PathBuf {\n\n EXPERIMENT_DIR.join(ex_name).join(\"fromls\")\n\n}\n\n\n", "file_path": "src/ex_prepare.rs", "rank": 54, "score": 130546.86197121844 }, { "content": "fn lockfile_dir(ex_name: &str) -> PathBuf {\n\n EXPERIMENT_DIR.join(ex_name).join(\"lockfiles\")\n\n}\n\n\n", "file_path": "src/ex_prepare.rs", "rank": 55, "score": 130546.86197121844 }, { "content": "fn migrations() -> Vec<(&'static str, MigrationKind)> {\n\n let mut migrations = Vec::new();\n\n\n\n migrations.push((\n\n \"initial\",\n\n MigrationKind::SQL(\n\n \"\n\n CREATE TABLE experiments (\n\n name TEXT PRIMARY KEY,\n\n mode TEXT NOT NULL,\n\n cap_lints TEXT NOT NULL,\n\n\n\n toolchain_start TEXT NOT NULL,\n\n toolchain_end TEXT NOT NULL,\n\n\n\n priority INTEGER NOT NULL,\n\n created_at DATETIME NOT NULL,\n\n status TEXT NOT NULL,\n\n github_issue TEXT,\n\n github_issue_url TEXT,\n", "file_path": "src/db/migrations.rs", "rank": 56, "score": 130411.98889485675 }, { "content": "pub fn redirect<F, R>(path: &Path, f: F) -> Result<R>\n\nwhere\n\n F: FnOnce() -> Result<R>,\n\n{\n\n let file = file_drain(path);\n\n let term = TERM_DRAIN.clone();\n\n\n\n let drain = slog::Duplicate(term, file).fuse();\n\n slog_scope::scope(&slog::Logger::root(drain, slog_o!()), f)\n\n}\n\n\n\nlazy_static! {\n\n static ref START_TIME: Instant = Instant::now();\n\n}\n\n\n\nlazy_static! {\n\n static ref TERM_DRAIN: Arc<slog::Fuse<Mutex<slog_term::CompactFormat<slog_term::TermDecorator>>>> = {\n\n let plain = slog_term::TermDecorator::new().stdout().build();\n\n let term = Mutex::new(slog_term::CompactFormat::new(plain).build()).fuse();\n\n Arc::new(term)\n\n };\n\n}\n\n\n", "file_path": "src/log.rs", "rank": 57, "score": 130404.13335083105 }, { "content": "fn run_ex_inner<DB: WriteResults + Sync>(\n\n ex: &Experiment,\n\n db: &DB,\n\n threads_count: usize,\n\n config: &Config,\n\n) -> Result<()> {\n\n info!(\"computing the tasks graph...\");\n\n let graph = Mutex::new(build_graph(ex, config));\n\n\n\n info!(\"preparing the execution...\");\n\n for tc in &ex.toolchains {\n\n tc.prepare()?;\n\n }\n\n\n\n info!(\"running tasks in {} threads...\", threads_count);\n\n\n\n // An HashMap is used instead of an HashSet because Thread is not Eq+Hash\n\n let parked_threads: Mutex<HashMap<thread::ThreadId, thread::Thread>> =\n\n Mutex::new(HashMap::new());\n\n\n", "file_path": "src/run_graph.rs", "rank": 58, "score": 130157.34498181824 }, { "content": "fn endpoint_heartbeat(data: Arc<Data>, auth: AuthDetails) -> Result<Response<Body>> {\n\n if let Some(rev) = auth.git_revision {\n\n data.agents.set_git_revision(&auth.name, &rev)?;\n\n }\n\n\n\n data.agents.record_heartbeat(&auth.name)?;\n\n Ok(ApiResponse::Success { result: true }.into_response()?)\n\n}\n\n\n", "file_path": "src/server/routes/agent.rs", "rank": 59, "score": 129707.91923600958 }, { "content": "fn endpoint_next_experiment(data: Arc<Data>, auth: AuthDetails) -> Result<Response<Body>> {\n\n let next = Experiment::next(&data.db, &Assignee::Agent(auth.name.clone()))?;\n\n\n\n let result = if let Some((new, mut ex)) = next {\n\n if new {\n\n if let Some(ref github_issue) = ex.github_issue {\n\n Message::new()\n\n .line(\n\n \"construction\",\n\n format!(\n\n \"Experiment **`{}`** is now **running** on agent `{}`.\",\n\n ex.name, auth.name,\n\n ),\n\n ).send(&github_issue.api_url, &data)?;\n\n }\n\n }\n\n\n\n ex.remove_completed_crates(&data.db)?;\n\n Some(ex)\n\n } else {\n\n None\n\n };\n\n\n\n Ok(ApiResponse::Success { result }.into_response()?)\n\n}\n\n\n", "file_path": "src/server/routes/agent.rs", "rank": 60, "score": 126470.2877462122 }, { "content": "fn endpoint_complete_experiment(data: Arc<Data>, auth: AuthDetails) -> Result<Response<Body>> {\n\n let mut ex = Experiment::run_by(&data.db, &Assignee::Agent(auth.name.clone()))?\n\n .ok_or(\"no experiment run by this agent\")?;\n\n\n\n ex.set_status(&data.db, Status::NeedsReport)?;\n\n info!(\"experiment {} completed, marked as needs-report\", ex.name);\n\n data.reports_worker.wake(); // Ensure the reports worker is awake\n\n\n\n Ok(ApiResponse::Success { result: true }.into_response()?)\n\n}\n\n\n", "file_path": "src/server/routes/agent.rs", "rank": 61, "score": 126470.2877462122 }, { "content": "fn handle_results(resp: Result<Response<Body>>) -> Response<Body> {\n\n match resp {\n\n Ok(resp) => resp,\n\n Err(err) => ApiResponse::internal_error(err.to_string())\n\n .into_response()\n\n .unwrap(),\n\n }\n\n}\n\n\n", "file_path": "src/server/routes/agent.rs", "rank": 62, "score": 126396.084652649 }, { "content": "fn write_logs<DB: ReadResults, W: ReportWriter>(\n\n db: &DB,\n\n ex: &Experiment,\n\n dest: &W,\n\n config: &Config,\n\n) -> Result<()> {\n\n let num_crates = ex.crates.len();\n\n let progress_every = (num_crates / PROGRESS_FRACTION) + 1;\n\n for (i, krate) in ex.crates.iter().enumerate() {\n\n if i % progress_every == 0 {\n\n info!(\"wrote logs for {}/{} crates\", i, num_crates)\n\n }\n\n\n\n if config.should_skip(krate) {\n\n continue;\n\n }\n\n\n\n for tc in &ex.toolchains {\n\n let log_path = crate_to_path_fragment(tc, krate, false).join(\"log.txt\");\n\n let content = db\n", "file_path": "src/report/mod.rs", "rank": 63, "score": 124459.60132538999 }, { "content": "fn handle_errors(err: Rejection) -> ::std::result::Result<Response<Body>, Rejection> {\n\n match err.status() {\n\n StatusCode::NOT_FOUND | StatusCode::METHOD_NOT_ALLOWED => match error_404() {\n\n Ok(resp) => Ok(resp),\n\n Err(err) => {\n\n error!(\"failed to render 404 page!\");\n\n ::utils::report_error(&err);\n\n Ok(error_500())\n\n }\n\n },\n\n _ => Err(err),\n\n }\n\n}\n\n\n", "file_path": "src/server/routes/ui/mod.rs", "rank": 64, "score": 124009.05040860947 }, { "content": "pub fn abort(data: &Data, issue: &Issue, args: AbortArgs) -> Result<()> {\n\n let name = get_name(&data.db, issue, args.name)?;\n\n\n\n ::actions::DeleteExperiment { name: name.clone() }.apply(&data.db, &data.config)?;\n\n\n\n Message::new()\n\n .line(\"wastebasket\", format!(\"Experiment **`{}`** deleted!\", name))\n\n .set_label(Label::ExperimentCompleted)\n\n .send(&issue.url, data)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/server/routes/webhooks/commands.rs", "rank": 65, "score": 123230.21051523821 }, { "content": "pub fn edit(data: &Data, issue: &Issue, args: EditArgs) -> Result<()> {\n\n let name = get_name(&data.db, issue, args.name)?;\n\n\n\n let changed = ::actions::EditExperiment {\n\n name: name.clone(),\n\n toolchains: [args.start, args.end],\n\n crates: args.crates,\n\n mode: args.mode,\n\n cap_lints: args.cap_lints,\n\n priority: args.priority,\n\n }.apply(&data.db, &data.config)?;\n\n\n\n if changed {\n\n Message::new()\n\n .line(\n\n \"memo\",\n\n format!(\"Configuration of the **`{}`** experiment changed.\", name),\n\n ).send(&issue.url, data)?;\n\n } else {\n\n Message::new()\n\n .line(\"warning\", \"No changes requested.\")\n\n .send(&issue.url, data)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/server/routes/webhooks/commands.rs", "rank": 66, "score": 123230.21051523821 }, { "content": "pub fn finish() {\n\n let duration = Instant::now().duration_since(*START_TIME).as_secs();\n\n let duration = if duration < 60 {\n\n format!(\"{}s\", duration)\n\n } else {\n\n let minutes = duration / 60;\n\n let seconds = duration % 60;\n\n format!(\"{}m {}s\", minutes, seconds)\n\n };\n\n info!(\"logs: {}\", global_log_name().display());\n\n info!(\"duration: {}\", duration);\n\n}\n", "file_path": "src/log.rs", "rank": 67, "score": 122320.06756172454 }, { "content": "fn main_() -> Result<()> {\n\n cli::Crater::from_args().run()\n\n}\n", "file_path": "src/main.rs", "rank": 68, "score": 121460.31411072699 }, { "content": "pub fn retry_report(data: &Data, issue: &Issue, args: RetryReportArgs) -> Result<()> {\n\n let name = get_name(&data.db, issue, args.name)?;\n\n\n\n if let Some(mut experiment) = Experiment::get(&data.db, &name)? {\n\n if experiment.status != Status::ReportFailed {\n\n bail!(\n\n \"generation of the report of the **`{}`** experiment didn't fail!\",\n\n name\n\n );\n\n }\n\n\n\n experiment.set_status(&data.db, Status::NeedsReport)?;\n\n data.reports_worker.wake();\n\n\n\n Message::new()\n\n .line(\n\n \"hammer_and_wrench\",\n\n format!(\"Generation of the report for **`{}`** queued again.\", name),\n\n ).set_label(Label::ExperimentQueued)\n\n .send(&issue.url, data)?;\n\n\n\n Ok(())\n\n } else {\n\n bail!(\"an experiment named **`{}`** doesn't exist!\", name);\n\n }\n\n}\n\n\n", "file_path": "src/server/routes/webhooks/commands.rs", "rank": 69, "score": 119220.2374306632 }, { "content": "fn update_rustup() -> Result<()> {\n\n info!(\"updating rustup\");\n\n utils::try_hard(|| {\n\n RunCommand::new(&installed_binary(\"rustup\"), &[\"self\", \"update\"])\n\n .local_rustup()\n\n .run()\n\n .chain_err(|| \"unable to run rustup self-update\")\n\n })\n\n}\n\n\n", "file_path": "src/toolchain.rs", "rank": 70, "score": 118515.01746295136 }, { "content": "fn init_rustup() -> Result<()> {\n\n fs::create_dir_all(&*CARGO_HOME)?;\n\n fs::create_dir_all(&*RUSTUP_HOME)?;\n\n if Path::new(&installed_binary(\"rustup\")).exists() {\n\n update_rustup()?;\n\n } else {\n\n install_rustup()?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/toolchain.rs", "rank": 71, "score": 118515.01746295136 }, { "content": "fn install_rustup() -> Result<()> {\n\n info!(\"installing rustup\");\n\n let rustup_url = &format!(\n\n \"{}/{}/rustup-init{}\",\n\n RUSTUP_BASE_URL,\n\n ::HOST_TARGET,\n\n EXE_SUFFIX\n\n );\n\n let mut response = ::utils::http::get(rustup_url).chain_err(|| \"unable to download rustup\")?;\n\n\n\n let tempdir = TempDir::new(\"crater\")?;\n\n let installer = &tempdir.path().join(format!(\"rustup-init{}\", EXE_SUFFIX));\n\n {\n\n let mut file = File::create(installer)?;\n\n io::copy(&mut response, &mut file)?;\n\n native::make_executable(installer)?;\n\n }\n\n\n\n utils::try_hard(|| {\n\n RunCommand::new(&installer.to_string_lossy(), &[\"-y\", \"--no-modify-path\"])\n\n .local_rustup()\n\n .run()\n\n .chain_err(|| \"unable to run rustup-init\")\n\n })\n\n}\n\n\n", "file_path": "src/toolchain.rs", "rank": 72, "score": 118515.01746295136 }, { "content": "fn unpack_without_first_dir<R: Read>(archive: &mut Archive<R>, path: &Path) -> Result<()> {\n\n let entries = archive.entries()?;\n\n for entry in entries {\n\n let mut entry = entry?;\n\n let relpath = {\n\n let path = entry.path();\n\n let path = path?;\n\n path.into_owned()\n\n };\n\n let mut components = relpath.components();\n\n // Throw away the first path component\n\n components.next();\n\n let full_path = path.join(&components.as_path());\n\n if let Some(parent) = full_path.parent() {\n\n fs::create_dir_all(parent)?;\n\n }\n\n entry.unpack(&full_path)?;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/crates/sources/registry.rs", "rank": 73, "score": 118312.83078123111 }, { "content": "pub fn auth_filter(\n\n data: Arc<Data>,\n\n token_type: TokenType,\n\n) -> impl Filter<Extract = (AuthDetails,), Error = Rejection> + Clone {\n\n warp::header::headers_cloned().and_then(move |headers| {\n\n match check_auth(&data, &headers, token_type) {\n\n Some(details) => Ok(details),\n\n None => Err(warp::reject::forbidden()),\n\n }\n\n })\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct ACL {\n\n cached_usernames: Arc<RwLock<HashSet<String>>>,\n\n users: Vec<String>,\n\n teams: Vec<(String, String)>,\n\n}\n\n\n\nimpl ACL {\n", "file_path": "src/server/auth.rs", "rank": 74, "score": 116620.87338344482 }, { "content": "pub fn with_captured_lockfile(\n\n config: &Config,\n\n ex: &Experiment,\n\n krate: &Crate,\n\n path: &Path,\n\n) -> Result<()> {\n\n let src_lockfile = &lockfile(&ex.name, krate)?;\n\n let dst_lockfile = &path.join(\"Cargo.lock\");\n\n\n\n // Only use the local lockfile if it wasn't overridden\n\n if !config.should_update_lockfile(krate) && krate.is_repo() && dst_lockfile.exists() {\n\n return Ok(());\n\n }\n\n\n\n if src_lockfile.exists() {\n\n info!(\"using lockfile {}\", src_lockfile.display());\n\n fs::copy(src_lockfile, dst_lockfile).chain_err(|| {\n\n format!(\n\n \"unable to copy lockfile from {} to {}\",\n\n src_lockfile.display(),\n\n dst_lockfile.display()\n\n )\n\n })?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/ex_prepare.rs", "rank": 75, "score": 116620.87338344482 }, { "content": "pub fn test_check_only(\n\n config: &Config,\n\n ex: &Experiment,\n\n source_path: &Path,\n\n toolchain: &Toolchain,\n\n quiet: bool,\n\n) -> Result<TestResult> {\n\n let r = toolchain.run_cargo(\n\n config,\n\n ex,\n\n source_path,\n\n &[\"check\", \"--frozen\", \"--all\", \"--all-targets\"],\n\n CargoState::Locked,\n\n quiet,\n\n false,\n\n );\n\n\n\n if r.is_ok() {\n\n Ok(TestResult::TestPass)\n\n } else {\n\n Ok(TestResult::BuildFail)\n\n }\n\n}\n\n\n", "file_path": "src/ex_run.rs", "rank": 76, "score": 116620.87338344482 }, { "content": "pub fn capture_lockfile(\n\n config: &Config,\n\n ex: &Experiment,\n\n krate: &Crate,\n\n toolchain: &Toolchain,\n\n) -> Result<()> {\n\n fs::create_dir_all(&lockfile_dir(&ex.name))?;\n\n\n\n if !config.should_update_lockfile(krate)\n\n && krate.is_repo()\n\n && krate.dir().join(\"Cargo.lock\").exists()\n\n {\n\n info!(\"crate {} has a lockfile. skipping\", krate);\n\n return Ok(());\n\n }\n\n\n\n with_work_crate(ex, toolchain, krate, |path| {\n\n with_frobbed_toml(ex, krate, path)?;\n\n capture_lockfile_inner(config, ex, krate, path, toolchain)\n\n }).chain_err(|| format!(\"failed to generate lockfile for {}\", krate))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/ex_prepare.rs", "rank": 77, "score": 116620.87338344482 }, { "content": "pub fn test_build_only(\n\n config: &Config,\n\n ex: &Experiment,\n\n source_path: &Path,\n\n toolchain: &Toolchain,\n\n quiet: bool,\n\n) -> Result<TestResult> {\n\n let r = build(config, ex, source_path, toolchain, quiet);\n\n if r.is_ok() {\n\n Ok(TestResult::TestSkipped)\n\n } else {\n\n Ok(TestResult::BuildFail)\n\n }\n\n}\n\n\n", "file_path": "src/ex_run.rs", "rank": 78, "score": 116620.87338344482 }, { "content": "fn get_git_sha() -> Option<String> {\n\n if let Some(sha) = cmd(&[\"git\", \"rev-parse\", \"--short\", \"HEAD\"]) {\n\n let symbolic = cmd(&[\"git\", \"rev-parse\", \"--symbolic\", \"HEAD\"]).unwrap();\n\n let symbolic_full = cmd(&[\"git\", \"rev-parse\", \"--symbolic-full-name\", \"HEAD\"]).unwrap();\n\n\n\n println!(\"cargo:rerun-if-changed=.git/{}\", symbolic);\n\n if symbolic != symbolic_full {\n\n println!(\"cargo:rerun-if-changed=.git/{}\", symbolic_full);\n\n }\n\n\n\n Some(sha)\n\n } else {\n\n println!(\"cargo:warning=failed to get crater sha\");\n\n None\n\n }\n\n}\n\n\n", "file_path": "build.rs", "rank": 79, "score": 114729.76788575848 }, { "content": "pub fn fetch_crate_deps(\n\n config: &Config,\n\n ex: &Experiment,\n\n krate: &Crate,\n\n toolchain: &Toolchain,\n\n) -> Result<()> {\n\n with_work_crate(ex, toolchain, krate, |path| {\n\n with_frobbed_toml(ex, krate, path)?;\n\n with_captured_lockfile(config, ex, krate, path)?;\n\n\n\n let args = &[\"fetch\", \"--locked\", \"--manifest-path\", \"Cargo.toml\"];\n\n toolchain\n\n .run_cargo(config, ex, path, args, CargoState::Unlocked, false, true)\n\n .chain_err(|| format!(\"unable to fetch deps for {}\", krate))?;\n\n\n\n Ok(())\n\n })\n\n}\n", "file_path": "src/ex_prepare.rs", "rank": 80, "score": 114099.98629395213 }, { "content": "pub fn routes(\n\n data: Arc<Data>,\n\n) -> impl Filter<Extract = (Response<Body>,), Error = Rejection> + Clone {\n\n let data_filter = warp::any().map(move || data.clone());\n\n\n\n warp::post2()\n\n .and(warp::path::index())\n\n .and(data_filter)\n\n .and(warp::header::headers_cloned())\n\n .and(warp::body::concat())\n\n .map(|data: Arc<Data>, headers: HeaderMap, body: FullBody| {\n\n let mut resp: Response<Body>;\n\n match receive_endpoint(data, headers, body) {\n\n Ok(()) => resp = Response::new(\"OK\\n\".into()),\n\n Err(err) => {\n\n error!(\"error while processing webhook\");\n\n ::utils::report_error(&err);\n\n\n\n resp = Response::new(format!(\"Error: {}\\n\", err).into());\n\n *resp.status_mut() = StatusCode::INTERNAL_SERVER_ERROR;\n\n }\n\n }\n\n\n\n resp\n\n })\n\n}\n", "file_path": "src/server/routes/webhooks/mod.rs", "rank": 81, "score": 114099.98629395213 }, { "content": "pub fn routes(\n\n data: Arc<Data>,\n\n) -> impl Filter<Extract = (Response<Body>,), Error = Rejection> + Clone {\n\n let data_filter = warp::any().map(move || data.clone());\n\n\n\n let queue = warp::get2()\n\n .and(warp::path::index())\n\n .and(data_filter.clone())\n\n .map(experiments::endpoint_queue);\n\n\n\n let experiment = warp::get2()\n\n .and(warp::path(\"ex\"))\n\n .and(warp::path::param())\n\n .and(warp::path::index())\n\n .and(data_filter.clone())\n\n .map(experiments::endpoint_experiment);\n\n\n\n let agents = warp::get2()\n\n .and(warp::path(\"agents\"))\n\n .and(warp::path::index())\n", "file_path": "src/server/routes/ui/mod.rs", "rank": 82, "score": 114099.98629395213 }, { "content": "pub fn test_build_and_test(\n\n config: &Config,\n\n ex: &Experiment,\n\n source_path: &Path,\n\n toolchain: &Toolchain,\n\n quiet: bool,\n\n) -> Result<TestResult> {\n\n let build_r = build(config, ex, source_path, toolchain, quiet);\n\n let test_r = if build_r.is_ok() {\n\n Some(test(config, ex, source_path, toolchain, quiet))\n\n } else {\n\n None\n\n };\n\n\n\n Ok(match (build_r, test_r) {\n\n (Err(_), None) => TestResult::BuildFail,\n\n (Ok(_), Some(Err(_))) => TestResult::TestFail,\n\n (Ok(_), Some(Ok(_))) => TestResult::TestPass,\n\n (_, _) => unreachable!(),\n\n })\n\n}\n\n\n", "file_path": "src/ex_run.rs", "rank": 83, "score": 114099.98629395213 }, { "content": "pub fn test_find_unstable_features(\n\n _config: &Config,\n\n _ex: &Experiment,\n\n source_path: &Path,\n\n _toolchain: &Toolchain,\n\n _quiet: bool,\n\n) -> Result<TestResult> {\n\n use walkdir::*;\n\n\n\n fn is_hidden(entry: &DirEntry) -> bool {\n\n entry\n\n .file_name()\n\n .to_str()\n\n .map(|s| s.starts_with('.'))\n\n .unwrap_or(false)\n\n }\n\n\n\n let mut features = HashSet::new();\n\n\n\n for entry in WalkDir::new(source_path)\n", "file_path": "src/ex_run.rs", "rank": 84, "score": 111765.78254459519 }, { "content": "pub fn report_panic(e: &Any) {\n\n if let Some(e) = e.downcast_ref::<String>() {\n\n error!(\"panicked: {}\", e)\n\n } else if let Some(e) = e.downcast_ref::<&'static str>() {\n\n error!(\"panicked: {}\", e)\n\n } else {\n\n error!(\"panicked\")\n\n }\n\n}\n", "file_path": "src/utils/mod.rs", "rank": 85, "score": 109736.85154737864 }, { "content": "fn build_tera_cache() -> Result<Tera> {\n\n let mut templates = Vec::new();\n\n for (name, content) in TEMPLATES.iter() {\n\n templates.push((*name, String::from_utf8(content.load()?.into_owned())?));\n\n }\n\n\n\n let to_add = templates\n\n .iter()\n\n .map(|(n, c)| (*n, c as &str))\n\n .collect::<Vec<_>>();\n\n\n\n let mut tera = Tera::default();\n\n tera.add_raw_templates(to_add)?;\n\n Ok(tera)\n\n}\n\n\n", "file_path": "src/assets.rs", "rank": 86, "score": 109635.3459337828 }, { "content": "#[derive(Serialize)]\n\nstruct AgentData {\n\n name: String,\n\n status_class: &'static str,\n\n status_pretty: &'static str,\n\n last_heartbeat: Option<String>,\n\n assigned_experiment: Option<String>,\n\n git_revision: Option<String>,\n\n}\n\n\n", "file_path": "src/server/routes/ui/agents.rs", "rank": 87, "score": 106332.30406289776 }, { "content": "#[derive(Serialize)]\n\nstruct ExperimentData {\n\n name: String,\n\n status_class: &'static str,\n\n status_pretty: &'static str,\n\n mode: &'static str,\n\n assigned_to: Option<String>,\n\n progress: u8,\n\n priority: i32,\n\n}\n\n\n\nimpl ExperimentData {\n\n fn new(data: &Data, experiment: &Experiment) -> Result<Self> {\n\n let (status_class, status_pretty) = match experiment.status {\n\n Status::Queued => (\"\", \"Queued\"),\n\n Status::Running => (\"orange\", \"Running\"),\n\n Status::NeedsReport => (\"orange\", \"Needs report\"),\n\n Status::GeneratingReport => (\"orange\", \"Generating report\"),\n\n Status::ReportFailed => (\"red\", \"Report failed\"),\n\n Status::Completed => (\"green\", \"Completed\"),\n\n };\n", "file_path": "src/server/routes/ui/experiments.rs", "rank": 88, "score": 106040.1408251434 }, { "content": "#[derive(Serialize)]\n\nstruct ExperimentExt {\n\n #[serde(flatten)]\n\n common: ExperimentData,\n\n\n\n github_url: Option<String>,\n\n report_url: Option<String>,\n\n\n\n created_at: String,\n\n started_at: Option<String>,\n\n completed_at: Option<String>,\n\n\n\n total_jobs: u32,\n\n completed_jobs: u32,\n\n duration: Option<String>,\n\n estimated_end: Option<String>,\n\n average_job_duration: Option<String>,\n\n}\n\n\n", "file_path": "src/server/routes/ui/experiments.rs", "rank": 89, "score": 106040.1408251434 }, { "content": "#[derive(Serialize)]\n\nstruct ExperimentContext {\n\n experiment: ExperimentExt,\n\n layout: LayoutContext,\n\n}\n\n\n", "file_path": "src/server/routes/ui/experiments.rs", "rank": 90, "score": 106040.1408251434 }, { "content": "fn endpoint_config(data: Arc<Data>, auth: AuthDetails) -> Result<Response<Body>> {\n\n Ok(ApiResponse::Success {\n\n result: AgentConfig {\n\n agent_name: auth.name,\n\n crater_config: data.config.clone(),\n\n },\n\n }.into_response()?)\n\n}\n\n\n", "file_path": "src/server/routes/agent.rs", "rank": 91, "score": 105034.36396219293 }, { "content": "pub trait ReadResults {\n\n fn load_all_shas(&self, ex: &Experiment) -> Result<HashMap<GitHubRepo, String>>;\n\n fn load_log(\n\n &self,\n\n ex: &Experiment,\n\n toolchain: &Toolchain,\n\n krate: &Crate,\n\n ) -> Result<Option<Vec<u8>>>;\n\n fn load_test_result(\n\n &self,\n\n ex: &Experiment,\n\n toolchain: &Toolchain,\n\n krate: &Crate,\n\n ) -> Result<Option<TestResult>>;\n\n}\n\n\n", "file_path": "src/results/mod.rs", "rank": 92, "score": 104069.25229507682 }, { "content": "pub trait WriteResults {\n\n fn get_result(\n\n &self,\n\n ex: &Experiment,\n\n toolchain: &Toolchain,\n\n krate: &Crate,\n\n ) -> Result<Option<TestResult>>;\n\n fn record_sha(&self, ex: &Experiment, repo: &GitHubRepo, sha: &str) -> Result<()>;\n\n fn record_result<F>(\n\n &self,\n\n ex: &Experiment,\n\n toolchain: &Toolchain,\n\n krate: &Crate,\n\n f: F,\n\n ) -> Result<TestResult>\n\n where\n\n F: FnOnce() -> Result<TestResult>;\n\n}\n\n\n", "file_path": "src/results/mod.rs", "rank": 93, "score": 104069.25229507682 }, { "content": "pub trait DeleteResults {\n\n fn delete_all_results(&self, ex: &Experiment) -> Result<()>;\n\n fn delete_result(&self, ex: &Experiment, toolchain: &Toolchain, krate: &Crate) -> Result<()>;\n\n}\n\n\n\nstring_enum!(pub enum TestResult {\n\n BuildFail => \"build-fail\",\n\n TestFail => \"test-fail\",\n\n TestSkipped => \"test-skipped\",\n\n TestPass => \"test-pass\",\n\n Error => \"error\",\n\n});\n", "file_path": "src/results/mod.rs", "rank": 94, "score": 104069.25229507682 }, { "content": "fn verify_signature(secret: &str, payload: &[u8], raw_signature: &str) -> bool {\n\n // The signature must have a =\n\n if !raw_signature.contains('=') {\n\n return false;\n\n }\n\n\n\n // Split the raw signature to get the algorithm and the signature\n\n let splitted: Vec<&str> = raw_signature.split('=').collect();\n\n let algorithm = &splitted[0];\n\n let hex_signature = splitted\n\n .iter()\n\n .skip(1)\n\n .map(|i| *i)\n\n .collect::<Vec<&str>>()\n\n .join(\"=\");\n\n\n\n // Convert the signature from hex\n\n let signature = if let Ok(converted) = ::utils::hex::from_hex(&hex_signature) {\n\n converted\n\n } else {\n", "file_path": "src/server/routes/webhooks/mod.rs", "rank": 95, "score": 103200.86053368505 }, { "content": "pub fn init() -> slog_scope::GlobalLoggerGuard {\n\n let _ = START_TIME.deref();\n\n\n\n fs::create_dir_all(&*LOG_DIR).expect(\"Could create log directory.\");\n\n let file = file_drain(global_log_name());\n\n let term = TERM_DRAIN.clone();\n\n\n\n let drain = slog::Duplicate(term, file).fuse();\n\n let _guard = slog_scope::set_global_logger(slog::Logger::root(drain, slog_o!{}));\n\n\n\n info!(\n\n \"program args: {}\",\n\n env::args().skip(1).collect::<Vec<_>>().join(\" \")\n\n );\n\n\n\n _guard\n\n}\n\n\n", "file_path": "src/log.rs", "rank": 96, "score": 102714.24357789037 }, { "content": "enum MigrationKind {\n\n SQL(&'static str),\n\n Code(Box<Fn(&Transaction) -> ::rusqlite::Result<()>>),\n\n}\n\n\n", "file_path": "src/db/migrations.rs", "rank": 97, "score": 101892.72766630875 }, { "content": "#[derive(Debug)]\n\nstruct ConnectionCustomizer;\n\n\n\nimpl CustomizeConnection<Connection, ::rusqlite::Error> for ConnectionCustomizer {\n\n fn on_acquire(&self, conn: &mut Connection) -> ::std::result::Result<(), ::rusqlite::Error> {\n\n conn.execute(\"PRAGMA foreign_keys = ON;\", &[])?;\n\n Ok(())\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Database {\n\n pool: Pool<SqliteConnectionManager>,\n\n // The tempfile is stored here to drop it after all the connections are closed\n\n tempfile: Option<Arc<NamedTempFile>>,\n\n}\n\n\n\nimpl Database {\n\n pub fn open() -> Result<Self> {\n\n let path = WORK_DIR.join(DATABASE_PATH);\n\n if !path.exists() {\n", "file_path": "src/db/mod.rs", "rank": 98, "score": 101030.65727244527 }, { "content": "pub fn write_html_report<W: ReportWriter>(\n\n ex: &Experiment,\n\n res: &TestResults,\n\n available_archives: Vec<Archive>,\n\n dest: &W,\n\n) -> Result<()> {\n\n let js_in = assets::load(\"report.js\")?;\n\n let css_in = assets::load(\"report.css\")?;\n\n write_report(ex, res, false, \"index.html\", dest)?;\n\n write_report(ex, res, true, \"full.html\", dest)?;\n\n write_downloads(ex, available_archives, dest)?;\n\n\n\n info!(\"copying static assets\");\n\n dest.write_bytes(\"report.js\", js_in.content()?.into_owned(), js_in.mime())?;\n\n dest.write_bytes(\"report.css\", css_in.content()?.into_owned(), css_in.mime())?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/report/html.rs", "rank": 99, "score": 100696.1738386552 } ]
Rust
src/main.rs
museun/tinge
aee80a24bcb7ba868934f21a7c5b0eb33c4d0243
use filetime::FileTime; use std::fs::{self, File}; use std::path::Path; const USAGE: &str = " tinge. change file access and modification times Usage: tinge [-acm] [-r <file>] <file>> Options: -a Change access time -c Do not create file if it exists -m Change modification time -r <file> Use access and modification times from this file "; fn error(msg: &str) -> ! { eprintln!("Error! {}", msg); eprintln!("{}", USAGE); std::process::exit(1); } #[derive(Debug)] struct Args { access: bool, no_create: bool, modify: bool, source: Option<String>, file: String, } impl Args { pub fn parse() -> Args { let mut access = None; let mut no_create = None; let mut modify = None; let mut replacement = None; let mut source = None; let mut file = None; macro_rules! check { ($flag:expr, $data:expr, $err:expr) => {{ if $flag.is_some() { error($err) } $flag.replace($data); }}; } for arg in std::env::args().skip(1) { if arg.starts_with('-') { for ch in arg[1..].chars() { match ch { 'a' => check!(access, true, "-a flag already specified"), 'c' => check!(no_create, true, "-c flag already specified"), 'm' => check!(modify, true, "-m flag already specified"), 'r' => check!(replacement, true, "-r flag already specified"), _ => {} }; } continue; } let s = arg .chars() .skip_while(|c| c.is_whitespace()) .take_while(|c| !c.is_whitespace()); if replacement.is_some() && source.is_none() { source.replace(s.collect::<String>()); continue; } if file.is_none() { file.replace(s.collect::<String>()); } } if file.is_none() || file.as_ref().map(|d| d.len()) == Some(0) { error("a filename must be provided") } Self { access: access.unwrap_or_default(), no_create: no_create.unwrap_or_default(), modify: modify.unwrap_or_default(), source, file: file.unwrap(), } } } struct TempFile<'a>(&'a str); impl<'a> TempFile<'a> { pub fn create(p: &'a str) -> Self { let _ = File::create(p).unwrap(); TempFile(p) } } impl<'a> Drop for TempFile<'a> { fn drop(&mut self) { let _ = fs::remove_file(self.0); } } fn main() { let Args { access, no_create, modify, source, file, } = Args::parse(); const TEMP: &str = "___touch"; let _temp = TempFile::create(TEMP); let path = Path::new(&file); if path.exists() && no_create { return; } if !path.exists() { let _ = File::create(&file); } let df = fs::metadata(&file).unwrap(); let dt = fs::metadata(TEMP).unwrap(); let mut tatime = FileTime::from_last_access_time(&dt); let mut tmtime = FileTime::from_last_modification_time(&dt); if let Some(source) = source { let p = Path::new(&source); if p.exists() { let fi = fs::metadata(&source).unwrap(); tatime = FileTime::from_last_access_time(&fi); tmtime = FileTime::from_last_modification_time(&fi); } else { error("cannot access reference file"); } } let (fatime, fmtime) = ( FileTime::from_last_access_time(&df), FileTime::from_last_modification_time(&df), ); let (access, modify) = match (access, modify) { (true, false) => (tatime, fmtime), (false, true) => (fatime, tmtime), (true, true) => (tatime, tmtime), _ => return, }; let _ = filetime::set_file_times(file, access, modify); }
use filetime::FileTime; use std::fs::{self, File}; use std::path::Path; const USAGE: &str = " tinge. change file access and modification times Usage: tinge [-acm] [-r <file>] <file>> Options: -a Change access time -c Do not create file if it exists -m Change modification time -r <file> Use access and modification times from this file "; fn error(msg: &str) -> ! { eprintln!("Error! {}", msg); eprintln!("{}", USAGE); std::process::exit(1); } #[derive(Debug)] struct Args { access: bool, no_create: bool, modify: bool, source: Option<String>, file: String, } impl Args { pub fn parse() -> Args { let mut access = None; let mut no_create = None; let mut modify = None; let mut replacement = None; let mut source = None; let mut file = None; macro_rules! check { ($flag:expr, $data:expr, $err:expr) => {{ if $flag.is_some() { error($err) } $flag.replace($data); }}; } for arg in std::env::args().skip(1) { if arg.starts_with('-') { for ch in arg[1..].chars() { match ch { 'a' => check!(access, true, "-a flag already specified"), 'c' => check!(no_create, true, "-c flag already specified"), 'm' => check!(modify, true, "-m flag already specified"), 'r' => check!(replacement, true, "-r flag already specified"), _ => {} }; } continue; } let s = arg .chars() .skip_while(|c| c.is_whitespace()) .take_while(|c| !c.is_whitespace()); if replacement.is_some() && source.is_none() { source.replace(s.collect::<String>()); continue; } if file.is_none() { file.replace(s.collect::<String>()); } } if file.is_none() || file.as_ref().map(|d| d.len()) == Some(0) { error("a filename must be provided") } Self { access: access.unwrap_or_default(), no_create: no_create.unwrap_or_default(), modify: modify.unwrap_or_default(), source, file: file.unwrap(), } } } struct TempFile<'a>(&'a str); impl<'a> TempFile<'a> { pub fn create(p: &'a str) -> Self { let _ = File::create(p).unwrap(); TempFile(p) } } impl<'a> Drop for TempFile<'a> { fn drop(&mut self) { let _ = fs::remove_file(self.0); } } fn main() { let Args { access, no_create, modify, source, file, } = Args::parse(); const TEMP: &str = "___touch"; let _temp = TempFile::create(TEMP); let path = Path::new(&file); if path.exists() && no_create { return; } if !path.exists() { let _ = File::create(&file); } let df = fs::metadata(&file).unwrap(); let dt = fs::metadata(TEMP).unwrap(); let mut tatime = FileTime::from_last_access_time(&dt); let mut tmtime = FileTime::from_last_modification_time(&dt); if let Some(source) = source { let p = Path::new(&source); if p.exists() { let fi = fs::metadata(&source).unwrap(); tatime = FileTime::from_last_access_time(&fi); tmtime = FileTime::from_last_modification_time(&fi); } else { error("cannot access reference file"); } } let (fatime, fmtime) = ( FileTime::from_last_access_time(&df), FileTime::from_last_modification_time(&df), ); let (access, modify) = match (access, modif
y) { (true, false) => (tatime, fmtime), (false, true) => (fatime, tmtime), (true, true) => (tatime, tmtime), _ => return, }; let _ = filetime::set_file_times(file, access, modify); }
function_block-function_prefixed
[]
Rust
day16/src/main.rs
obi1kenobi/advent-of-code-2020
51f1c462642dc59eae8fadf26cc635bce063678d
use std::{ collections::{HashMap, HashSet}, fs, }; fn main() { let contents = fs::read_to_string( "/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day16/input.txt", ) .unwrap(); let groups: Vec<_> = contents.trim().split("\n\n").collect(); let fields_info = groups[0]; let your_ticket_info = groups[1]; let nearby_tickets_info = groups[2]; let fields: Vec<_> = fields_info.trim().split("\n").collect(); let field_valid_ranges: Vec<_> = fields .iter() .map(|&x| x.split(": ").last().unwrap()) .flat_map(|x| x.split(" or ")) .collect(); let field_valid_rules: Vec<(i64, i64)> = field_valid_ranges .iter() .map(|&x| x.split("-").map(|y| y.parse().unwrap())) .map(|mut y| (y.next().unwrap(), y.last().unwrap())) .collect(); let your_ticket_data: Vec<_> = your_ticket_info.trim().split("\n").collect(); assert!(your_ticket_data[0] == "your ticket:"); let your_ticket_numbers: Vec<i64> = your_ticket_data[1] .trim() .split(",") .map(|x| x.parse().unwrap()) .collect(); let nearby_tickets_data: Vec<_> = nearby_tickets_info.trim().split("\n").collect(); assert!(nearby_tickets_data[0] == "nearby tickets:"); let nearby_tickets: Vec<Vec<i64>> = nearby_tickets_data[1..nearby_tickets_data.len()] .iter() .map(|&ticket| { ticket .trim() .split(",") .map(|y| y.parse().unwrap()) .collect() }) .collect(); let (part1_soln, mut valid_tickets) = solve_part1(&field_valid_rules, &nearby_tickets); println!("{}", part1_soln); valid_tickets.push(your_ticket_numbers.clone()); println!( "{}", solve_part2(&fields, &valid_tickets, &your_ticket_numbers) ); } fn solve_part1( field_valid_rules: &Vec<(i64, i64)>, nearby_tickets: &Vec<Vec<i64>>, ) -> (i64, Vec<Vec<i64>>) { let mut result: i64 = 0; let mut valid_tickets: Vec<Vec<i64>> = Vec::new(); for nearby_ticket in nearby_tickets { let mut is_valid_ticket = true; for value in nearby_ticket { let is_valid_value: bool = field_valid_rules .iter() .filter(|(lower, upper)| lower <= value && upper >= value) .next() .is_some(); if !is_valid_value { is_valid_ticket = false; result += value; } } if is_valid_ticket { valid_tickets.push(nearby_ticket.clone()); } } (result, valid_tickets) } fn solve_part2(fields: &Vec<&str>, valid_tickets: &Vec<Vec<i64>>, your_ticket: &Vec<i64>) -> i64 { let field_names: Vec<_> = fields .iter() .map(|&x| x.split(": ").next().unwrap()) .collect(); let field_rule_elements: Vec<Vec<&str>> = fields .iter() .map(|&x| x.split(": ").last().unwrap().split(" or ").collect()) .collect(); let field_rule_ranges: Vec<Vec<(i64, i64)>> = field_rule_elements .iter() .map(|ranges| { ranges .iter() .map(|&range| range.split("-").map(|value| value.parse::<i64>().unwrap())) .map(|mut iter| (iter.next().unwrap(), iter.next().unwrap())) .collect() }) .collect(); let mut possible_ticket_index_to_field_index: Vec<HashSet<usize>> = Vec::new(); for _ in 0..your_ticket.len() { possible_ticket_index_to_field_index.push((0..fields.len()).collect()); } for ticket in valid_tickets { for (ticket_index, ticket_value) in ticket.iter().enumerate() { let valid_mappings: HashSet<_> = field_rule_ranges .iter() .enumerate() .filter(|(_, rule)| { rule.iter() .filter(|(lower, upper)| lower <= ticket_value && upper >= ticket_value) .next() .is_some() }) .map(|(rule_index, _)| rule_index) .collect(); let prior_options = &possible_ticket_index_to_field_index[ticket_index]; let subsequent_options = prior_options .intersection(&valid_mappings) .copied() .collect(); possible_ticket_index_to_field_index[ticket_index] = subsequent_options; } } let mut field_index_to_ticket_index: HashMap<usize, usize> = HashMap::new(); while field_index_to_ticket_index.len() < fields.len() { for (ticket_index, possible_field_indexes) in possible_ticket_index_to_field_index.iter().enumerate() { let remaining_field_indexes: HashSet<_> = possible_field_indexes .difference(&field_index_to_ticket_index.keys().copied().collect()) .copied() .collect(); if remaining_field_indexes.len() == 1 { field_index_to_ticket_index.insert( remaining_field_indexes.iter().next().unwrap().clone(), ticket_index, ); } } } let mut result: i64 = 1; for (field_index, field_name) in field_names.iter().enumerate() { if field_name.starts_with("departure") { let ticket_index = field_index_to_ticket_index[&field_index]; result *= your_ticket[ticket_index]; } } result }
use std::{ collections::{HashMap, HashSet}, fs, }; fn main() { let contents = fs::read_to_string( "/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day16/input.txt", ) .unwrap(); let groups: Vec<_> = contents.trim().split("\n\n").collect(); let fields_info = groups[0];
fn solve_part1( field_valid_rules: &Vec<(i64, i64)>, nearby_tickets: &Vec<Vec<i64>>, ) -> (i64, Vec<Vec<i64>>) { let mut result: i64 = 0; let mut valid_tickets: Vec<Vec<i64>> = Vec::new(); for nearby_ticket in nearby_tickets { let mut is_valid_ticket = true; for value in nearby_ticket { let is_valid_value: bool = field_valid_rules .iter() .filter(|(lower, upper)| lower <= value && upper >= value) .next() .is_some(); if !is_valid_value { is_valid_ticket = false; result += value; } } if is_valid_ticket { valid_tickets.push(nearby_ticket.clone()); } } (result, valid_tickets) } fn solve_part2(fields: &Vec<&str>, valid_tickets: &Vec<Vec<i64>>, your_ticket: &Vec<i64>) -> i64 { let field_names: Vec<_> = fields .iter() .map(|&x| x.split(": ").next().unwrap()) .collect(); let field_rule_elements: Vec<Vec<&str>> = fields .iter() .map(|&x| x.split(": ").last().unwrap().split(" or ").collect()) .collect(); let field_rule_ranges: Vec<Vec<(i64, i64)>> = field_rule_elements .iter() .map(|ranges| { ranges .iter() .map(|&range| range.split("-").map(|value| value.parse::<i64>().unwrap())) .map(|mut iter| (iter.next().unwrap(), iter.next().unwrap())) .collect() }) .collect(); let mut possible_ticket_index_to_field_index: Vec<HashSet<usize>> = Vec::new(); for _ in 0..your_ticket.len() { possible_ticket_index_to_field_index.push((0..fields.len()).collect()); } for ticket in valid_tickets { for (ticket_index, ticket_value) in ticket.iter().enumerate() { let valid_mappings: HashSet<_> = field_rule_ranges .iter() .enumerate() .filter(|(_, rule)| { rule.iter() .filter(|(lower, upper)| lower <= ticket_value && upper >= ticket_value) .next() .is_some() }) .map(|(rule_index, _)| rule_index) .collect(); let prior_options = &possible_ticket_index_to_field_index[ticket_index]; let subsequent_options = prior_options .intersection(&valid_mappings) .copied() .collect(); possible_ticket_index_to_field_index[ticket_index] = subsequent_options; } } let mut field_index_to_ticket_index: HashMap<usize, usize> = HashMap::new(); while field_index_to_ticket_index.len() < fields.len() { for (ticket_index, possible_field_indexes) in possible_ticket_index_to_field_index.iter().enumerate() { let remaining_field_indexes: HashSet<_> = possible_field_indexes .difference(&field_index_to_ticket_index.keys().copied().collect()) .copied() .collect(); if remaining_field_indexes.len() == 1 { field_index_to_ticket_index.insert( remaining_field_indexes.iter().next().unwrap().clone(), ticket_index, ); } } } let mut result: i64 = 1; for (field_index, field_name) in field_names.iter().enumerate() { if field_name.starts_with("departure") { let ticket_index = field_index_to_ticket_index[&field_index]; result *= your_ticket[ticket_index]; } } result }
let your_ticket_info = groups[1]; let nearby_tickets_info = groups[2]; let fields: Vec<_> = fields_info.trim().split("\n").collect(); let field_valid_ranges: Vec<_> = fields .iter() .map(|&x| x.split(": ").last().unwrap()) .flat_map(|x| x.split(" or ")) .collect(); let field_valid_rules: Vec<(i64, i64)> = field_valid_ranges .iter() .map(|&x| x.split("-").map(|y| y.parse().unwrap())) .map(|mut y| (y.next().unwrap(), y.last().unwrap())) .collect(); let your_ticket_data: Vec<_> = your_ticket_info.trim().split("\n").collect(); assert!(your_ticket_data[0] == "your ticket:"); let your_ticket_numbers: Vec<i64> = your_ticket_data[1] .trim() .split(",") .map(|x| x.parse().unwrap()) .collect(); let nearby_tickets_data: Vec<_> = nearby_tickets_info.trim().split("\n").collect(); assert!(nearby_tickets_data[0] == "nearby tickets:"); let nearby_tickets: Vec<Vec<i64>> = nearby_tickets_data[1..nearby_tickets_data.len()] .iter() .map(|&ticket| { ticket .trim() .split(",") .map(|y| y.parse().unwrap()) .collect() }) .collect(); let (part1_soln, mut valid_tickets) = solve_part1(&field_valid_rules, &nearby_tickets); println!("{}", part1_soln); valid_tickets.push(your_ticket_numbers.clone()); println!( "{}", solve_part2(&fields, &valid_tickets, &your_ticket_numbers) ); }
function_block-function_prefix_line
[ { "content": "fn main() {\n\n let contents = fs::read_to_string(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day24/input.txt\",\n\n )\n\n .unwrap();\n\n\n\n let all_directions: Vec<_> = contents.trim().split('\\n').map(parse_directions).collect();\n\n\n\n println!(\"{}\", solve_part1(&all_directions));\n\n println!(\"{}\", solve_part2(&all_directions));\n\n}\n\n\n", "file_path": "day24/src/main.rs", "rank": 0, "score": 78707.57334749348 }, { "content": "fn main() {\n\n let contents = fs::read_to_string(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day14/input.txt\",\n\n )\n\n .unwrap();\n\n\n\n let lines: Vec<_> = contents.trim().split(\"\\n\").collect();\n\n\n\n println!(\"{}\", solve_part1(&lines));\n\n println!(\"{}\", solve_part2(&lines));\n\n}\n\n\n", "file_path": "day14/src/main.rs", "rank": 1, "score": 78707.57334749348 }, { "content": "fn main() {\n\n let contents = fs::read_to_string(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day13/input.txt\",\n\n )\n\n .unwrap();\n\n\n\n let lines: Vec<_> = contents.trim().split(\"\\n\").collect();\n\n\n\n println!(\"{}\", solve_part2(\"7,13,x,x,59,x,31,19\"));\n\n println!(\"{}\", solve_part2(\"17,x,13,19\"));\n\n println!(\"{}\", solve_part2(\"67,7,59,61\"));\n\n\n\n println!(\"{}\", solve_part1(&lines));\n\n println!(\"{}\", solve_part2(&lines[1]));\n\n}\n\n\n", "file_path": "day13/src/main.rs", "rank": 2, "score": 78707.57334749348 }, { "content": "fn main() {\n\n let contents = fs::read_to_string(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day8/input.txt\",\n\n )\n\n .unwrap();\n\n\n\n let lines: Vec<_> = contents.trim().split(\"\\n\").collect();\n\n\n\n println!(\"{}\", solve_part1(&lines));\n\n println!(\"{}\", solve_part2(&lines));\n\n}\n\n\n", "file_path": "day8/src/main.rs", "rank": 3, "score": 78707.57334749348 }, { "content": "fn main() {\n\n let contents = fs::read_to_string(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day18/input.txt\",\n\n )\n\n .unwrap();\n\n\n\n let lines: Vec<_> = contents.trim().split(\"\\n\").collect();\n\n\n\n println!(\"{}\", solve_part1(&lines));\n\n println!(\"{}\", solve_part2(&lines));\n\n}\n\n\n", "file_path": "day18/src/main.rs", "rank": 4, "score": 78707.57334749348 }, { "content": "fn main() {\n\n let contents = fs::read_to_string(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day10/input.txt\",\n\n )\n\n .unwrap();\n\n\n\n let lines: Vec<i64> = contents\n\n .trim()\n\n .split(\"\\n\")\n\n .map(|x| x.parse().unwrap())\n\n .collect();\n\n\n\n let (one_diff, three_diff) = solve_part1(&lines);\n\n println!(\"{} * {} = {}\", one_diff, three_diff, one_diff * three_diff);\n\n println!(\"{}\", solve_part2(&lines));\n\n}\n\n\n", "file_path": "day10/src/main.rs", "rank": 5, "score": 78707.57334749348 }, { "content": "fn main() {\n\n let contents = fs::read_to_string(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day12/input.txt\",\n\n )\n\n .unwrap();\n\n\n\n let lines: Vec<_> = contents.trim().split(\"\\n\").collect();\n\n\n\n println!(\"{}\", solve_part1(&lines));\n\n println!(\"{}\", solve_part2(&lines));\n\n}\n\n\n", "file_path": "day12/src/main.rs", "rank": 6, "score": 78707.57334749348 }, { "content": "fn main() {\n\n let contents = fs::read_to_string(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day6/input.txt\",\n\n )\n\n .unwrap();\n\n\n\n let groups: Vec<Vec<_>> = contents\n\n .trim()\n\n .split(\"\\n\\n\")\n\n .map(|x| x.split(\"\\n\").collect())\n\n .collect();\n\n\n\n println!(\"{}\", solve_part1(&groups));\n\n println!(\"{}\", solve_part2(&groups));\n\n}\n\n\n", "file_path": "day6/src/main.rs", "rank": 7, "score": 78707.57334749348 }, { "content": "fn main() {\n\n let contents = fs::read_to_string(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day23/input.txt\",\n\n )\n\n .unwrap();\n\n\n\n let lines: Vec<_> = contents.trim().split('\\n').collect();\n\n assert_eq!(lines.len(), 1);\n\n let cups: VecDeque<i64> = lines[0]\n\n .chars()\n\n .map(|x| x.to_string().parse().unwrap())\n\n .collect();\n\n\n\n println!(\"{}\", solve_part1(&cups, 100));\n\n println!(\"{}\", solve_part2(&cups, 10000000));\n\n}\n\n\n", "file_path": "day23/src/main.rs", "rank": 8, "score": 78707.57334749348 }, { "content": "fn main() {\n\n let contents = fs::read_to_string(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day17/input.txt\",\n\n )\n\n .unwrap();\n\n\n\n let cubes: Vec<Vec<char>> = contents\n\n .trim()\n\n .split(\"\\n\")\n\n .map(|x| x.chars().collect())\n\n .collect();\n\n\n\n println!(\"{}\", solve_part1(&cubes));\n\n println!(\"{}\", solve_part2(&cubes));\n\n}\n\n\n", "file_path": "day17/src/main.rs", "rank": 9, "score": 78707.57334749348 }, { "content": "fn main() {\n\n let contents = fs::read_to_string(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day25/input.txt\",\n\n )\n\n .unwrap();\n\n\n\n let subject_number: i64 = 7;\n\n let modulus: i64 = 20201227;\n\n let lines: Vec<i64> = contents\n\n .trim()\n\n .split('\\n')\n\n .map(|x| x.parse().unwrap())\n\n .collect();\n\n let card_pubkey = lines[0];\n\n let door_pubkey = lines[1];\n\n\n\n println!(\n\n \"{}\",\n\n solve_part1(subject_number, modulus, card_pubkey, door_pubkey)\n\n );\n\n}\n\n\n", "file_path": "day25/src/main.rs", "rank": 10, "score": 78707.57334749348 }, { "content": "fn main() {\n\n let contents = fs::read_to_string(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day2/input.txt\",\n\n )\n\n .unwrap();\n\n\n\n solve_part1(&contents);\n\n solve_part2(&contents);\n\n}\n\n\n", "file_path": "day2/src/main.rs", "rank": 11, "score": 78707.57334749348 }, { "content": "fn main() {\n\n let (rules, patterns) = parse_file(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day19/input.txt\",\n\n );\n\n\n\n println!(\"{}\", solve_part1(&rules, &patterns));\n\n\n\n let mut cyclic_rules: HashMap<usize, Rule> = rules.clone();\n\n cyclic_rules.insert(8, Rule::OneOrMore(8, 42));\n\n cyclic_rules.insert(11, Rule::MatchingPrefixAndSuffix(11, 42, 31));\n\n println!(\"{}\", solve_part2(&cyclic_rules, &patterns));\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn test_avoid_false_positive_match_part1() {\n\n let path = \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day19/sample_input5.txt\";\n\n let (mut rules, patterns) = crate::parse_file(path);\n\n\n", "file_path": "day19/src/main.rs", "rank": 12, "score": 78707.57334749348 }, { "content": "fn main() {\n\n let contents = fs::read_to_string(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day7/input.txt\",\n\n )\n\n .unwrap();\n\n\n\n let lines: Vec<_> = contents.trim().split(\"\\n\").collect();\n\n\n\n let edges: HashMap<&str, Vec<(usize, &str)>> = parse_rules(lines);\n\n\n\n println!(\"{}\", solve_part1(&edges));\n\n println!(\"{}\", solve_part2(&edges));\n\n}\n\n\n", "file_path": "day7/src/main.rs", "rank": 13, "score": 78707.57334749348 }, { "content": "fn main() {\n\n let contents = fs::read_to_string(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day5/input.txt\",\n\n )\n\n .unwrap();\n\n\n\n let lines: Vec<_> = contents.trim().split(\"\\n\").collect();\n\n\n\n println!(\"{}\", solve_part1(&lines));\n\n println!(\"{}\", solve_part2(&lines));\n\n}\n\n\n", "file_path": "day5/src/main.rs", "rank": 14, "score": 78707.57334749348 }, { "content": "fn main() {\n\n let contents = fs::read_to_string(\"./input.txt\").unwrap();\n\n let target_sum = 2020;\n\n\n\n let numbers_in_file = contents\n\n .trim()\n\n .split(\"\\n\")\n\n .map(|x| x.parse::<i32>().unwrap())\n\n .collect::<Vec<i32>>();\n\n\n\n let (pair_num_a, pair_num_b) = find_pair_sum(target_sum, &numbers_in_file).unwrap();\n\n println!(\n\n \"{} * {} = {}\",\n\n pair_num_a,\n\n pair_num_b,\n\n pair_num_a * pair_num_b\n\n );\n\n\n\n let (triplet_num_a, triplet_num_b, triplet_num_c) =\n\n find_triplet_sum(target_sum, &numbers_in_file).unwrap();\n\n println!(\n\n \"{} * {} * {} = {}\",\n\n triplet_num_a,\n\n triplet_num_b,\n\n triplet_num_c,\n\n triplet_num_a * triplet_num_b * triplet_num_c\n\n );\n\n}\n\n\n", "file_path": "day1/src/main.rs", "rank": 15, "score": 78707.57334749348 }, { "content": "fn main() {\n\n let contents = fs::read_to_string(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day22/input.txt\",\n\n )\n\n .unwrap();\n\n\n\n let players: Vec<Vec<_>> = contents\n\n .trim()\n\n .split(\"\\n\\n\")\n\n .map(|x| x.trim().split(\"\\n\").collect())\n\n .collect();\n\n assert_eq!(players[0][0], \"Player 1:\");\n\n assert_eq!(players[1][0], \"Player 2:\");\n\n\n\n let player_decks: Vec<_> = players.iter().map(parse_player).collect();\n\n\n\n println!(\"{}\", solve_part1(&player_decks[0], &player_decks[1]));\n\n println!(\"{}\", solve_part2(&player_decks[0], &player_decks[1]));\n\n}\n\n\n", "file_path": "day22/src/main.rs", "rank": 16, "score": 78707.57334749348 }, { "content": "fn main() {\n\n let contents = fs::read_to_string(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day15/input.txt\",\n\n )\n\n .unwrap();\n\n\n\n let numbers: Vec<i64> = contents\n\n .trim()\n\n .split(\",\")\n\n .map(|x| x.parse().unwrap())\n\n .collect();\n\n\n\n println!(\"{}\", solve(&numbers, 2020));\n\n println!(\"{}\", solve(&numbers, 30000000));\n\n}\n\n\n", "file_path": "day15/src/main.rs", "rank": 17, "score": 78707.57334749348 }, { "content": "fn main() {\n\n let contents = fs::read_to_string(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day20/input.txt\",\n\n )\n\n .unwrap();\n\n\n\n let tiles: Vec<_> = contents.trim().split(\"\\n\\n\").map(parse_tile).collect();\n\n let tile_variants = get_all_variants_for_tile_set(&tiles);\n\n let tile_size = tiles[0].image.len();\n\n\n\n println!(\"{}\", solve_part1(&tiles, &tile_variants, tile_size));\n\n println!(\"{}\", solve_part2(&tiles, &tile_variants, tile_size));\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::{horizontal_flip_image, right_rotate_image};\n\n\n\n #[test]\n\n fn test_rotation() {\n", "file_path": "day20/src/main.rs", "rank": 18, "score": 78707.57334749348 }, { "content": "fn main() {\n\n let contents = fs::read_to_string(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day21/input.txt\",\n\n )\n\n .unwrap();\n\n\n\n let data: Vec<_> = contents.trim().split(\"\\n\").map(parse_line).collect();\n\n let (allergen_to_food, food_to_possible_allergens) = assemble_initial_allergen_data(&data);\n\n\n\n println!(\"{}\", solve_part1(&data, &food_to_possible_allergens));\n\n println!(\n\n \"{}\",\n\n solve_part2(&allergen_to_food, &food_to_possible_allergens)\n\n );\n\n}\n", "file_path": "day21/src/main.rs", "rank": 20, "score": 78707.57334749348 }, { "content": "fn main() {\n\n let contents = fs::read_to_string(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day11/input.txt\",\n\n )\n\n .unwrap();\n\n\n\n let seats: Vec<Vec<char>> = contents\n\n .trim()\n\n .split(\"\\n\")\n\n .map(|x| x.chars().collect())\n\n .collect();\n\n\n\n println!(\"{}\", solve_part1(&seats));\n\n println!(\"{}\", solve_part2(&seats));\n\n}\n\n\n", "file_path": "day11/src/main.rs", "rank": 21, "score": 78707.57334749348 }, { "content": "fn main() {\n\n let contents = fs::read_to_string(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day4/input.txt\",\n\n )\n\n .unwrap();\n\n\n\n let passports: Vec<Vec<_>> = contents\n\n .trim()\n\n .split(\"\\n\\n\")\n\n .map(|x| x.split_ascii_whitespace().map(|y| y.trim()).collect())\n\n .collect();\n\n\n\n println!(\"{}\", solve(&passports));\n\n}\n\n\n", "file_path": "day4/src/main.rs", "rank": 22, "score": 78707.57334749348 }, { "content": "fn main() {\n\n let contents = fs::read_to_string(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day3/input.txt\",\n\n )\n\n .unwrap();\n\n\n\n let map: Vec<Vec<char>> = contents\n\n .trim()\n\n .split(\"\\n\")\n\n .map(|x| x.chars().collect())\n\n .collect();\n\n\n\n println!(\"{}\", solve(&map, 1, 3));\n\n\n\n let slope_1_1 = solve(&map, 1, 1);\n\n let slope_1_3 = solve(&map, 1, 3);\n\n let slope_1_5 = solve(&map, 1, 5);\n\n let slope_1_7 = solve(&map, 1, 7);\n\n let slope_2_1 = solve(&map, 2, 1);\n\n println!(\n\n \"{} * {} * {} * {} * {} = {}\",\n\n slope_1_1,\n\n slope_1_3,\n\n slope_1_5,\n\n slope_1_7,\n\n slope_2_1,\n\n slope_1_1 * slope_1_3 * slope_1_5 * slope_1_7 * slope_2_1,\n\n )\n\n}\n\n\n", "file_path": "day3/src/main.rs", "rank": 23, "score": 78707.57334749348 }, { "content": "fn main() {\n\n let contents = fs::read_to_string(\n\n \"/mnt/c/Users/predrag/Dropbox/Documents/Code/advent-of-code-2020/day9/input.txt\",\n\n )\n\n .unwrap();\n\n\n\n let numbers: Vec<i64> = contents\n\n .trim()\n\n .split(\"\\n\")\n\n .map(|x| x.parse().unwrap())\n\n .collect();\n\n\n\n let preamble_length: usize = 25;\n\n let solution_part1 = solve_part1(&numbers, preamble_length);\n\n println!(\"{}\", solution_part1);\n\n println!(\"{}\", solve_part2(&numbers, solution_part1));\n\n}\n\n\n", "file_path": "day9/src/main.rs", "rank": 24, "score": 78707.57334749348 }, { "content": "fn solve_part1(contents: &String) {\n\n let mut valid_passwords = 0;\n\n for line in contents.trim().split(\"\\n\") {\n\n let components: Vec<&str> = line.split(\":\").map(|x| x.trim()).collect();\n\n assert!(components.len() == 2);\n\n let policy = components[0];\n\n let password = components[1];\n\n\n\n let elements: Vec<&str> = policy.split(\" \").collect();\n\n assert!(elements.len() == 2);\n\n let range_spec = elements[0];\n\n let character = elements[1].chars().next().unwrap();\n\n\n\n let range: Vec<i32> = range_spec.split(\"-\").map(|x| x.parse().unwrap()).collect();\n\n assert!(range.len() == 2);\n\n assert!(range[0] <= range[1]);\n\n\n\n let occurrences: i32 = password.chars().map(|c| (c == character) as i32).sum();\n\n if occurrences >= range[0] && occurrences <= range[1] {\n\n valid_passwords += 1;\n\n }\n\n }\n\n\n\n println!(\"{}\", valid_passwords);\n\n}\n\n\n", "file_path": "day2/src/main.rs", "rank": 25, "score": 71136.35891760633 }, { "content": "fn solve_part2(contents: &String) {\n\n let mut valid_passwords = 0;\n\n for line in contents.trim().split(\"\\n\") {\n\n let components: Vec<&str> = line.split(\":\").map(|x| x.trim()).collect();\n\n assert!(components.len() == 2);\n\n let policy = components[0];\n\n let password = components[1];\n\n\n\n let elements: Vec<&str> = policy.split(\" \").collect();\n\n assert!(elements.len() == 2);\n\n let positions_spec = elements[0];\n\n let character_spec = elements[1].chars().next().unwrap();\n\n\n\n let positions: Vec<i32> = positions_spec\n\n .split(\"-\")\n\n .map(|x| x.parse().unwrap())\n\n .collect();\n\n assert!(positions.len() == 2);\n\n\n\n let mut has_invalid_chars = false;\n", "file_path": "day2/src/main.rs", "rank": 26, "score": 71136.35891760633 }, { "content": "fn solve_part2(groups: &Vec<Vec<&str>>) -> usize {\n\n let mut total_positive_answers = 0;\n\n\n\n for group in groups.iter() {\n\n let mut initialized = false;\n\n let mut answers: HashSet<char> = HashSet::new();\n\n\n\n for person in group.iter().cloned() {\n\n if !initialized {\n\n answers = person.chars().collect();\n\n initialized = true;\n\n } else {\n\n answers = answers\n\n .intersection(&person.chars().collect())\n\n .cloned()\n\n .collect();\n\n }\n\n }\n\n\n\n total_positive_answers += answers.len();\n\n }\n\n\n\n total_positive_answers\n\n}\n", "file_path": "day6/src/main.rs", "rank": 27, "score": 63828.42830935285 }, { "content": "fn solve_part1(groups: &Vec<Vec<&str>>) -> usize {\n\n let mut total_positive_answers = 0;\n\n\n\n for group in groups.iter() {\n\n let mut answers: HashSet<char> = HashSet::new();\n\n\n\n for person in group.iter().cloned() {\n\n answers.extend(person.chars());\n\n }\n\n\n\n total_positive_answers += answers.len();\n\n }\n\n\n\n total_positive_answers\n\n}\n\n\n", "file_path": "day6/src/main.rs", "rank": 28, "score": 63828.42830935285 }, { "content": "fn is_valid_sum(current_number: i64, window: &HashSet<i64>) -> bool {\n\n for number in window.iter() {\n\n let remainder = current_number - number;\n\n if remainder != *number && window.contains(&remainder) {\n\n return true;\n\n }\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "day9/src/main.rs", "rank": 29, "score": 56592.176921436636 }, { "content": "fn calculate_initial_flipped_tiles(all_directions: &[Vec<HexDirection>]) -> HashSet<(i64, i64)> {\n\n let mut flipped_tiles: HashSet<(i64, i64)> = HashSet::new();\n\n\n\n for directions in all_directions {\n\n let flipped_tile = get_coordinates_from_directions(directions);\n\n\n\n if !flipped_tiles.insert(flipped_tile) {\n\n let removed = flipped_tiles.remove(&flipped_tile);\n\n assert!(removed);\n\n }\n\n }\n\n\n\n flipped_tiles\n\n}\n\n\n", "file_path": "day24/src/main.rs", "rank": 30, "score": 55350.572323141474 }, { "content": "fn solve_part2(\n\n allergen_to_food: &HashMap<&str, HashSet<&str>>,\n\n food_to_allergens: &HashMap<&str, HashSet<&str>>,\n\n) -> String {\n\n let mut allergen_in_possible_foods = allergen_to_food.clone();\n\n let mut food_to_possible_allergens: HashMap<&str, HashSet<&str>> = food_to_allergens\n\n .iter()\n\n .filter_map(|(&food, allergens)| {\n\n if !allergens.is_empty() {\n\n Some((food, allergens.clone()))\n\n } else {\n\n None\n\n }\n\n })\n\n .collect();\n\n\n\n let allergen_to_food: BTreeMap<&str, &str> = loop {\n\n let mut converged = true;\n\n for (&_allergen, possibilities) in &allergen_in_possible_foods {\n\n assert!(possibilities.len() > 0);\n", "file_path": "day21/src/main.rs", "rank": 31, "score": 45764.627698391254 }, { "content": "fn solve_part2(\n\n tiles: &Vec<Tile>,\n\n tile_variants: &HashMap<(i64, usize), TileVariant>,\n\n tile_size: usize,\n\n) -> usize {\n\n let edge_index = make_edge_index(tile_variants);\n\n let mut tile_map = make_empty_tile_map(tiles);\n\n let corner_tiles = find_corner_tiles(tiles, tile_variants, &edge_index);\n\n\n\n fill_tile_map(tile_variants, &edge_index, &mut tile_map, &corner_tiles);\n\n\n\n let full_image = make_combined_image_from_tile_map(tile_variants, &tile_map, tile_size);\n\n\n\n let (monster_x_dim, monster_y_dim, monster_indexes) = get_monster_image_indexes();\n\n\n\n let mut rotated_full_image = full_image.clone();\n\n let mut flipped_full_image = horizontal_flip_image(&full_image);\n\n let mut rotations: usize = 0;\n\n loop {\n\n if let Some(solution) = find_sea_monster_data(\n", "file_path": "day20/src/main.rs", "rank": 33, "score": 45764.627698391254 }, { "content": "fn solve_part1(\n\n data: &Vec<(Vec<&str>, Vec<&str>)>,\n\n food_to_possible_allergens: &HashMap<&str, HashSet<&str>>,\n\n) -> usize {\n\n let no_allergen_foods: HashSet<_> = food_to_possible_allergens\n\n .iter()\n\n .filter_map(|(&food, possible_allergens)| {\n\n if possible_allergens.is_empty() {\n\n Some(food)\n\n } else {\n\n None\n\n }\n\n })\n\n .collect();\n\n\n\n let mut result = 0usize;\n\n for (foods, _) in data {\n\n result += foods\n\n .iter()\n\n .filter_map(|&x| -> Option<usize> { no_allergen_foods.get(x).map(|_| 1usize) })\n\n .sum::<usize>();\n\n }\n\n result\n\n}\n\n\n", "file_path": "day21/src/main.rs", "rank": 34, "score": 45764.627698391254 }, { "content": "fn match_rule(\n\n rules: &HashMap<usize, Rule>,\n\n rule_to_evaluate: &Rule,\n\n rule_stack: &mut Vec<usize>,\n\n pattern: &str,\n\n) -> bool {\n\n match rule_to_evaluate {\n\n Rule::Literal(_, target) => pattern.strip_prefix(target).map_or(false, |remainder| {\n\n match_pattern_with_cyclic_rules(rules, rule_stack, remainder)\n\n }),\n\n Rule::Compound(_, possibilities) => {\n\n let mut possibility_sets = possibilities.iter();\n\n loop {\n\n let original_stack_len = rule_stack.len();\n\n match possibility_sets.next() {\n\n None => break false,\n\n Some(possibility_set) => {\n\n rule_stack.extend(possibility_set.iter().rev());\n\n\n\n let current_stack_len = rule_stack.len();\n", "file_path": "day19/src/main.rs", "rank": 35, "score": 45764.627698391254 }, { "content": "fn reconstruct_board(\n\n clockwise_neighbor: &HashMap<i64, i64>,\n\n current_cup: i64,\n\n num_cups: usize,\n\n) -> (VecDeque<i64>, usize) {\n\n let mut current_cup_index: usize = 0;\n\n let mut final_cups = VecDeque::new();\n\n let mut starting_cup: i64 = 1;\n\n for index in 0..num_cups {\n\n if starting_cup == current_cup {\n\n current_cup_index = index;\n\n }\n\n final_cups.push_back(starting_cup);\n\n starting_cup = clockwise_neighbor[&starting_cup];\n\n }\n\n\n\n (final_cups, current_cup_index)\n\n}\n\n\n", "file_path": "day23/src/main.rs", "rank": 36, "score": 45764.627698391254 }, { "content": "fn solve_part1(\n\n tiles: &Vec<Tile>,\n\n tile_variants: &HashMap<(i64, usize), TileVariant>,\n\n _tile_size: usize,\n\n) -> i64 {\n\n let edge_index = make_edge_index(tile_variants);\n\n\n\n find_corner_tiles(tiles, tile_variants, &edge_index)\n\n .iter()\n\n .fold(1, |acc, elem| acc * elem)\n\n}\n\n\n", "file_path": "day20/src/main.rs", "rank": 37, "score": 45764.627698391254 }, { "content": "fn find_corner_tiles(\n\n tiles: &Vec<Tile>,\n\n tile_variants: &HashMap<(i64, usize), TileVariant>,\n\n edge_index: &HashMap<Vec<char>, Vec<((i64, usize), Edge)>>,\n\n) -> Vec<i64> {\n\n let mut result = vec![];\n\n\n\n for tile in tiles {\n\n let mut best_matched_variant_unmatched_directions: Vec<Edge> = Edge::variants().collect();\n\n\n\n for variant_id in 0..8usize {\n\n let key = (tile.id, variant_id);\n\n let variant = &tile_variants[&key];\n\n\n\n let mut unmatched_edge_directions: Vec<Edge> = Vec::new();\n\n for edge_dir in Edge::variants() {\n\n let possible_matches = get_other_tile_matches_for_variant_edge(\n\n edge_index,\n\n tile.id,\n\n &variant.get_edge(&edge_dir),\n", "file_path": "day20/src/main.rs", "rank": 38, "score": 44728.956985065626 }, { "content": "fn handle_operator_precedence(\n\n pre_operator_expr: Expression,\n\n op: Operator,\n\n post_operator_tokens: &[Token],\n\n) -> (Expression, &[Token]) {\n\n let (mut post_operator_expr, mut remaining_tokens) =\n\n parse_from_expression_start_tokens_part2(post_operator_tokens);\n\n let (next_expr, next_tokens) = match op {\n\n Operator::Add => (post_operator_expr, remaining_tokens),\n\n Operator::Multiply => {\n\n // coalesce right first, if possible\n\n match remaining_tokens.first() {\n\n None => (post_operator_expr, remaining_tokens),\n\n Some(token) => {\n\n let mut op_token = token;\n\n loop {\n\n let next_operator = match op_token {\n\n Token::Operator(oper) => oper.clone(),\n\n _ => unreachable!(),\n\n };\n", "file_path": "day18/src/main.rs", "rank": 39, "score": 44728.956985065626 }, { "content": "fn make_edge_index(\n\n tile_variants: &HashMap<(i64, usize), TileVariant>,\n\n) -> HashMap<Vec<char>, Vec<((i64, usize), Edge)>> {\n\n let mut result = HashMap::new();\n\n\n\n for (key, variant) in tile_variants {\n\n for edge_kind in Edge::variants() {\n\n let edge = variant.get_edge(&edge_kind);\n\n\n\n result\n\n .entry(edge)\n\n .or_insert(Vec::new())\n\n .push((*key, edge_kind));\n\n }\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "day20/src/main.rs", "rank": 40, "score": 44728.956985065626 }, { "content": "fn fill_tile_map(\n\n tile_variants: &HashMap<(i64, usize), TileVariant>,\n\n edge_index: &HashMap<Vec<char>, Vec<((i64, usize), Edge)>>,\n\n mut tile_map: &mut Vec<Vec<(i64, usize)>>,\n\n corner_tiles: &Vec<i64>,\n\n) {\n\n // fill in the top left corner\n\n add_top_left_corner_to_tile_map(\n\n tile_variants,\n\n &edge_index,\n\n &mut tile_map,\n\n corner_tiles.first().unwrap().clone(),\n\n );\n\n\n\n // fill in the top row of the tile map\n\n for y in 1..tile_map[0].len() {\n\n let neighbor_tile_variant_key = &tile_map[0][y - 1];\n\n let neighbor_edge_direction = Edge::Right;\n\n let neighbor_edge = &tile_variants[neighbor_tile_variant_key].get_right_edge();\n\n tile_map[0][y] = find_tile_variant_with_neighbor(\n", "file_path": "day20/src/main.rs", "rank": 41, "score": 44728.956985065626 }, { "content": "fn match_pattern_with_cyclic_rules(\n\n rules: &HashMap<usize, Rule>,\n\n rule_stack: &mut Vec<usize>, // evaluate tail first\n\n pattern: &str,\n\n) -> bool {\n\n match rule_stack.pop() {\n\n None => pattern.is_empty(),\n\n Some(rule_to_evaluate) => {\n\n let current_rule = &rules[&rule_to_evaluate];\n\n\n\n let result = match_rule(rules, current_rule, rule_stack, pattern);\n\n\n\n rule_stack.push(rule_to_evaluate); // replace rule on stack before returning\n\n\n\n result\n\n }\n\n }\n\n}\n\n\n", "file_path": "day19/src/main.rs", "rank": 42, "score": 43760.93047779171 }, { "content": "fn find_sea_monster_data(\n\n full_image: &Vec<Vec<char>>,\n\n monster_x_dim: usize,\n\n monster_y_dim: usize,\n\n monster_indexes: &Vec<(usize, usize)>,\n\n) -> Option<usize> {\n\n let mut monsters_found = 0usize;\n\n for root_x in 0..(full_image.len() - monster_x_dim) {\n\n for root_y in 0..(full_image[0].len() - monster_y_dim) {\n\n let mut monster_found = true;\n\n for (monster_x, monster_y) in monster_indexes.iter().cloned() {\n\n let x = root_x + monster_x;\n\n let y = root_y + monster_y;\n\n\n\n if full_image[x][y] != '#' {\n\n monster_found = false;\n\n break;\n\n }\n\n }\n\n\n", "file_path": "day20/src/main.rs", "rank": 43, "score": 43760.93047779171 }, { "content": "fn match_rule_with_pattern_prefix(\n\n rule_to_match: usize,\n\n rules: &HashMap<usize, Rule>,\n\n pattern: &str,\n\n) -> Option<String> {\n\n // Some(\"\") = full match, None = not a match at all\n\n let current_rule = &rules[&rule_to_match];\n\n\n\n match current_rule {\n\n Rule::Literal(_, target) => pattern.strip_prefix(target).map(|x| x.to_string()),\n\n Rule::Compound(_, possibilities) => {\n\n let mut outcome: Option<String> = None;\n\n for possibility_set in possibilities {\n\n let mut rest_of_pattern = pattern.to_string();\n\n let mut possibility_set_success = true;\n\n for next_rule in possibility_set {\n\n if let Some(remainder) =\n\n match_rule_with_pattern_prefix(*next_rule, rules, &rest_of_pattern)\n\n {\n\n // continue matching\n", "file_path": "day19/src/main.rs", "rank": 44, "score": 43760.93047779171 }, { "content": "fn find_tile_variant_with_neighbor(\n\n edge_index: &HashMap<Vec<char>, Vec<((i64, usize), Edge)>>,\n\n neighbor_tile_variant_key: &(i64, usize),\n\n neighbor_edge_direction: &Edge,\n\n neighbor_edge: &Vec<char>,\n\n) -> (i64, usize) {\n\n let result_tile_edge_direction = neighbor_edge_direction.opposite();\n\n\n\n let possible_results: Vec<_> = get_other_tile_matches_for_variant_edge(\n\n edge_index,\n\n neighbor_tile_variant_key.0,\n\n neighbor_edge,\n\n neighbor_edge_direction,\n\n )\n\n .iter()\n\n .filter_map(|&(key, direction)| {\n\n if direction == result_tile_edge_direction {\n\n Some(key)\n\n } else {\n\n None\n\n }\n\n })\n\n .collect();\n\n\n\n assert!(possible_results.len() == 1);\n\n possible_results[0]\n\n}\n\n\n", "file_path": "day20/src/main.rs", "rank": 45, "score": 43760.93047779171 }, { "content": "fn transpose_map_of_sets<'a>(\n\n map_of_sets: &HashMap<&'a str, HashSet<&'a str>>,\n\n) -> HashMap<&'a str, HashSet<&'a str>> {\n\n let mut result = HashMap::new();\n\n for (&key, values) in map_of_sets.iter() {\n\n for &value in values {\n\n result.entry(value).or_insert(HashSet::new()).insert(key);\n\n }\n\n }\n\n result\n\n}\n\n\n", "file_path": "day21/src/main.rs", "rank": 46, "score": 43687.05115736165 }, { "content": "fn make_combined_image_from_tile_map(\n\n tile_variants: &HashMap<(i64, usize), TileVariant>,\n\n tile_map: &Vec<Vec<(i64, usize)>>,\n\n tile_size: usize,\n\n) -> Vec<Vec<char>> {\n\n let borderless_tile_size = tile_size - 2;\n\n\n\n let full_image_dimension = borderless_tile_size * tile_map.len();\n\n let mut full_image: Vec<Vec<char>> = Vec::new();\n\n let mut full_image_row: Vec<char> = Vec::new();\n\n full_image_row.resize(full_image_dimension, '?');\n\n full_image.resize(full_image_dimension, full_image_row);\n\n\n\n for tile_x in 0..tile_map.len() {\n\n let full_image_offset_x = tile_x * borderless_tile_size;\n\n for tile_y in 0..tile_map[0].len() {\n\n let full_image_offset_y = tile_y * borderless_tile_size;\n\n\n\n let tile_variant = &tile_variants[&tile_map[tile_x][tile_y]];\n\n for img_x in 1..(tile_size - 1) {\n", "file_path": "day20/src/main.rs", "rank": 47, "score": 42854.13054775179 }, { "content": "fn get_other_tile_matches_for_variant_edge(\n\n edge_index: &HashMap<Vec<char>, Vec<((i64, usize), Edge)>>,\n\n tile_id: i64,\n\n edge: &Vec<char>,\n\n edge_direction: &Edge,\n\n) -> Vec<((i64, usize), Edge)> {\n\n let opposite_edge = edge_direction.opposite();\n\n\n\n edge_index[edge]\n\n .iter()\n\n .filter(|((id, _), edge_dir)| *id != tile_id && *edge_dir == opposite_edge)\n\n .cloned()\n\n .collect()\n\n}\n\n\n", "file_path": "day20/src/main.rs", "rank": 48, "score": 42854.13054775179 }, { "content": "fn assemble_initial_allergen_data<'a>(\n\n data: &Vec<(Vec<&'a str>, Vec<&'a str>)>,\n\n) -> (\n\n HashMap<&'a str, HashSet<&'a str>>,\n\n HashMap<&'a str, HashSet<&'a str>>,\n\n) {\n\n let mut allergen_to_food: HashMap<&str, HashSet<&str>> = HashMap::new();\n\n for (foods, allergens) in data {\n\n let mentioned_foods: HashSet<&str> = foods.iter().cloned().collect();\n\n for &allergen in allergens {\n\n let current_suspect_foods = allergen_to_food.get(allergen).unwrap_or(&mentioned_foods);\n\n let remaining_suspect_foods: HashSet<_> = current_suspect_foods\n\n .intersection(&mentioned_foods)\n\n .cloned()\n\n .collect();\n\n\n\n allergen_to_food.insert(allergen, remaining_suspect_foods);\n\n }\n\n }\n\n\n", "file_path": "day21/src/main.rs", "rank": 49, "score": 42719.02465008774 }, { "content": "fn add_top_left_corner_to_tile_map(\n\n tile_variants: &HashMap<(i64, usize), TileVariant>,\n\n edge_index: &HashMap<Vec<char>, Vec<((i64, usize), Edge)>>,\n\n tile_map: &mut Vec<Vec<(i64, usize)>>,\n\n corner_id: i64,\n\n) {\n\n for variant_id in 0..8 {\n\n let tile_variant_key = (corner_id, variant_id);\n\n let variant = &tile_variants[&tile_variant_key];\n\n\n\n let top_edge_matches = get_other_tile_matches_for_variant_edge(\n\n edge_index,\n\n corner_id,\n\n &variant.get_top_edge(),\n\n &Edge::Top,\n\n );\n\n let left_edge_matches = get_other_tile_matches_for_variant_edge(\n\n edge_index,\n\n corner_id,\n\n &variant.get_left_edge(),\n", "file_path": "day20/src/main.rs", "rank": 50, "score": 42002.926495405525 }, { "content": "fn evaluate_expression(expr: &Expression) -> i64 {\n\n match expr {\n\n Expression::Numeric(value) => *value,\n\n Expression::Paren(inner) => evaluate_expression(inner),\n\n Expression::Compound(left, operator, right) => {\n\n let left_value = evaluate_expression(left);\n\n let right_value = evaluate_expression(right);\n\n match operator {\n\n Operator::Add => left_value + right_value,\n\n Operator::Multiply => left_value * right_value,\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "day18/src/main.rs", "rank": 51, "score": 40107.74542032621 }, { "content": "fn int_sqrt(value: usize) -> usize {\n\n (0..=value).filter(|v| v * v == value).next().unwrap()\n\n}\n\n\n", "file_path": "day20/src/main.rs", "rank": 52, "score": 40107.74542032621 }, { "content": "fn solve_part2(line: &str) -> i64 {\n\n let depart_intervals: Vec<i64> = line.split(\",\").map(|x| x.parse().unwrap_or(-1)).collect();\n\n\n\n let mut sub: i64 = 0;\n\n let mut current_increment = depart_intervals[0];\n\n assert!(current_increment != -1);\n\n\n\n for (offset, next_id) in depart_intervals[1..depart_intervals.len()]\n\n .iter()\n\n .cloned()\n\n .enumerate()\n\n {\n\n if next_id == -1 {\n\n continue;\n\n }\n\n let mut mult: i64 = 1;\n\n let minutes_later = ((offset + 1) as i64 + next_id - (sub % next_id)) % next_id;\n\n\n\n loop {\n\n let remainder = (mult * current_increment) % next_id;\n", "file_path": "day13/src/main.rs", "rank": 53, "score": 40107.74542032621 }, { "content": "fn lcm(a: i64, b: i64) -> i64 {\n\n let common = gcd(a, b);\n\n return a / common * b;\n\n}\n", "file_path": "day13/src/main.rs", "rank": 54, "score": 39678.99897754618 }, { "content": "fn gcd(a: i64, b: i64) -> i64 {\n\n if a < b {\n\n return gcd(b, a);\n\n } else {\n\n let rem = a % b;\n\n if rem == 0 {\n\n b\n\n } else {\n\n gcd(b, rem)\n\n }\n\n }\n\n}\n\n\n", "file_path": "day13/src/main.rs", "rank": 55, "score": 39678.99897754618 }, { "content": "fn parse_tokens_part1(tokens: &[Token]) -> Expression {\n\n let (mut result, mut remainder_tokens) = parse_from_expression_start_tokens_part1(tokens);\n\n\n\n while !remainder_tokens.is_empty() {\n\n let operator = match remainder_tokens.first().unwrap() {\n\n Token::Operator(op) => op.clone(),\n\n _ => unreachable!(),\n\n };\n\n remainder_tokens = &remainder_tokens[1..remainder_tokens.len()];\n\n\n\n let (next_expression, post_expression_tokens) =\n\n parse_from_expression_start_tokens_part1(remainder_tokens);\n\n remainder_tokens = post_expression_tokens;\n\n result = Expression::Compound(Box::from(result), operator, Box::from(next_expression));\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "day18/src/main.rs", "rank": 56, "score": 39256.541367979946 }, { "content": "fn parse_expression_part1(expr: &str) -> Expression {\n\n parse_tokens_part1(tokenize_expression(expr).as_slice())\n\n}\n\n\n", "file_path": "day18/src/main.rs", "rank": 57, "score": 39256.541367979946 }, { "content": "fn parse_tile(tile_data: &str) -> Tile {\n\n let mut data = tile_data.split(\"\\n\");\n\n\n\n let header = data.next().unwrap().trim();\n\n assert!(header.starts_with(\"Tile \"));\n\n assert!(header.ends_with(\":\"));\n\n\n\n let id: i64 = header[\"Tile \".len()..header.len() - 1].parse().unwrap();\n\n let image: Vec<Vec<char>> = data.map(|x| x.chars().collect()).collect();\n\n\n\n assert!(image.len() == image[0].len()); // tiles had better be square :)\n\n\n\n Tile { id, image }\n\n}\n\n\n", "file_path": "day20/src/main.rs", "rank": 58, "score": 39256.541367979946 }, { "content": "fn parse_tokens_part2(tokens: &[Token]) -> Expression {\n\n let (mut result, mut remainder_tokens) = parse_from_expression_start_tokens_part2(tokens);\n\n\n\n while !remainder_tokens.is_empty() {\n\n let operator = match remainder_tokens.first().unwrap() {\n\n Token::Operator(op) => op.clone(),\n\n _ => unreachable!(),\n\n };\n\n remainder_tokens = &remainder_tokens[1..remainder_tokens.len()];\n\n\n\n let (next_result, next_remainder) =\n\n handle_operator_precedence(result, operator, remainder_tokens);\n\n result = next_result;\n\n remainder_tokens = next_remainder;\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "day18/src/main.rs", "rank": 59, "score": 39256.541367979946 }, { "content": "fn parse_expression_part2(expr: &str) -> Expression {\n\n parse_tokens_part2(tokenize_expression(expr).as_slice())\n\n}\n\n\n", "file_path": "day18/src/main.rs", "rank": 60, "score": 39256.541367979946 }, { "content": "fn solve_part1(lines: &Vec<&str>) -> i64 {\n\n lines\n\n .iter()\n\n .cloned()\n\n .map(parse_expression_part1)\n\n .map(|x| evaluate_expression(&x))\n\n .sum()\n\n}\n\n\n", "file_path": "day18/src/main.rs", "rank": 61, "score": 38550.56627166008 }, { "content": "fn solve_part2(adapters: &Vec<i64>) -> i64 {\n\n let mut all_adapters: Vec<i64> = adapters.clone();\n\n all_adapters.sort();\n\n\n\n let target_max_joltage = *all_adapters.last().unwrap();\n\n\n\n let mut dp: Vec<i64> = Vec::new();\n\n dp.resize((target_max_joltage + 1) as usize, 0);\n\n\n\n dp[0] = 1;\n\n\n\n for current_adapter in all_adapters.iter().cloned() {\n\n let current_joltage = current_adapter as usize;\n\n\n\n for acceptable_joltage in max(0, current_adapter - 3) as usize..current_joltage {\n\n dp[current_joltage] += dp[acceptable_joltage];\n\n }\n\n }\n\n\n\n dp[target_max_joltage as usize]\n\n}\n", "file_path": "day10/src/main.rs", "rank": 62, "score": 38550.56627166008 }, { "content": "fn solve_part1(lines: &Vec<&str>) -> i64 {\n\n let (origin_x, origin_y) = (0 as i64, 0 as i64);\n\n let mut cur_x = origin_x;\n\n let mut cur_y = origin_y;\n\n let mut facing: usize = 0;\n\n\n\n // east, south, west, north\n\n let dx: [i64; 4] = [0, 1, 0, -1];\n\n let dy: [i64; 4] = [1, 0, -1, 0];\n\n\n\n for line in lines.iter() {\n\n match line.split_at(1) {\n\n (\"F\", val) => {\n\n let dist: i64 = val.parse().unwrap();\n\n cur_x += dx[facing] * dist;\n\n cur_y += dy[facing] * dist;\n\n }\n\n (\"S\", val) => {\n\n let dist: i64 = val.parse().unwrap();\n\n cur_x += dist;\n", "file_path": "day12/src/main.rs", "rank": 63, "score": 38550.56627166008 }, { "content": "fn solve_part1(lines: &Vec<&str>) -> u64 {\n\n let mut current_mask = u64::MAX;\n\n let mut current_imprint: u64 = 0;\n\n let mut memory: HashMap<usize, u64> = HashMap::new();\n\n for &line in lines.iter() {\n\n let parts: Vec<&str> = line.split(\" = \").collect();\n\n if parts[0] == \"mask\" {\n\n current_mask = u64::MAX;\n\n current_imprint = 0;\n\n for (index, element) in parts[1].chars().rev().enumerate() {\n\n match element {\n\n 'X' => {}\n\n '0' => {\n\n current_mask &= !((1 as u64) << index);\n\n }\n\n '1' => {\n\n current_mask &= !((1 as u64) << index);\n\n current_imprint |= (1 as u64) << index;\n\n }\n\n _ => unreachable!(),\n", "file_path": "day14/src/main.rs", "rank": 64, "score": 38550.56627166008 }, { "content": "fn solve_part2(lines: &Vec<&str>) -> i64 {\n\n let (origin_x, origin_y) = (0 as i64, 0 as i64);\n\n let mut wayp_x: i64 = -1;\n\n let mut wayp_y: i64 = 10;\n\n let mut cur_x = origin_x;\n\n let mut cur_y = origin_y;\n\n\n\n #[rustfmt::skip]\n\n let rot_matrices: [[[i64; 2]; 2]; 4] = [\n\n [\n\n [1, 0],\n\n [0, 1],\n\n ], [\n\n [0, 1],\n\n [-1, 0],\n\n ], [\n\n [-1, 0],\n\n [0, -1],\n\n ], [\n\n [0, -1],\n", "file_path": "day12/src/main.rs", "rank": 65, "score": 38550.56627166008 }, { "content": "fn solve_part1(lines: &Vec<&str>) -> i64 {\n\n let start_time: i64 = lines[0].parse().unwrap();\n\n let depart_intervals: Vec<i64> = lines[1]\n\n .split(\",\")\n\n .filter(|x| *x != \"x\")\n\n .map(|x| x.parse().unwrap())\n\n .collect();\n\n\n\n let mut current_time = start_time;\n\n loop {\n\n for depart_interval in depart_intervals.iter() {\n\n if current_time % depart_interval == 0 {\n\n return (current_time - start_time) * depart_interval;\n\n }\n\n }\n\n current_time += 1;\n\n }\n\n}\n\n\n", "file_path": "day13/src/main.rs", "rank": 66, "score": 38550.56627166008 }, { "content": "fn solve_part2(lines: &Vec<&str>) -> i32 {\n\n let mut taken_seats: Vec<_> = lines.iter().cloned().map(get_seat_id).collect();\n\n taken_seats.sort();\n\n let min_seat = taken_seats.first().unwrap().clone();\n\n let max_seat = taken_seats.last().unwrap().clone();\n\n\n\n let mut free_seats: HashSet<i32> = (min_seat..=max_seat).collect();\n\n for taken_seat in taken_seats {\n\n free_seats.remove(&taken_seat);\n\n }\n\n\n\n assert!(free_seats.len() == 1);\n\n free_seats.iter().next().unwrap().clone()\n\n}\n\n\n", "file_path": "day5/src/main.rs", "rank": 67, "score": 38550.56627166008 }, { "content": "fn solve_part2(lines: &Vec<&str>) -> u64 {\n\n let mut current_mask = usize::MAX;\n\n let mut current_imprint: usize = 0;\n\n let mut memory: HashMap<usize, u64> = HashMap::new();\n\n let mut floating_bit_positions: Vec<usize> = Vec::new();\n\n\n\n for &line in lines.iter() {\n\n let parts: Vec<&str> = line.split(\" = \").collect();\n\n if parts[0] == \"mask\" {\n\n current_mask = usize::MAX;\n\n current_imprint = 0;\n\n floating_bit_positions = Vec::new();\n\n for (index, element) in parts[1].chars().rev().enumerate() {\n\n match element {\n\n 'X' => {\n\n floating_bit_positions.push(index);\n\n current_mask &= !((1 as usize) << index);\n\n }\n\n '0' => {}\n\n '1' => {\n", "file_path": "day14/src/main.rs", "rank": 68, "score": 38550.56627166008 }, { "content": "fn tokenize_expression(expr: &str) -> Vec<Token> {\n\n expr.split_ascii_whitespace()\n\n .flat_map(|x| -> Vec<&str> {\n\n let mut result: Vec<&str> = Vec::new();\n\n let mut rest_x = x;\n\n while rest_x.starts_with(\"(\") {\n\n let (part, rest) = rest_x.split_at(1);\n\n result.push(part);\n\n rest_x = rest;\n\n }\n\n result.push(rest_x);\n\n result\n\n })\n\n .flat_map(|x| -> Vec<&str> {\n\n let mut result: Vec<&str> = Vec::new();\n\n let mut rest_x = x;\n\n while rest_x.ends_with(\")\") {\n\n let (rest, part) = rest_x.split_at(rest_x.len() - 1);\n\n result.push(part);\n\n rest_x = rest;\n", "file_path": "day18/src/main.rs", "rank": 69, "score": 38550.56627166008 }, { "content": "fn solve_part1(lines: &Vec<&str>) -> i32 {\n\n lines.iter().cloned().map(get_seat_id).max().unwrap()\n\n}\n\n\n", "file_path": "day5/src/main.rs", "rank": 70, "score": 38550.56627166008 }, { "content": "fn solve_part2(lines: &Vec<&str>) -> i64 {\n\n lines\n\n .iter()\n\n .cloned()\n\n .map(parse_expression_part2)\n\n .map(|x| evaluate_expression(&x))\n\n .sum()\n\n}\n", "file_path": "day18/src/main.rs", "rank": 71, "score": 38550.56627166008 }, { "content": "fn solve_part1(lines: &Vec<&str>) -> i32 {\n\n let mut accumulator = 0;\n\n let mut visited_instructions: HashSet<usize> = HashSet::new();\n\n let mut instruction_ptr: i32 = 0;\n\n\n\n loop {\n\n if visited_instructions.contains(&(instruction_ptr as usize)) {\n\n break;\n\n } else {\n\n visited_instructions.insert(instruction_ptr as usize);\n\n match lines[instruction_ptr as usize].split_at(3) {\n\n (\"nop\", _) => instruction_ptr += 1,\n\n (\"acc\", value) => {\n\n accumulator += value.trim().parse::<i32>().unwrap();\n\n instruction_ptr += 1;\n\n }\n\n (\"jmp\", value) => {\n\n instruction_ptr += value.trim().parse::<i32>().unwrap();\n\n }\n\n _ => unreachable!(),\n\n }\n\n }\n\n }\n\n\n\n accumulator\n\n}\n\n\n", "file_path": "day8/src/main.rs", "rank": 72, "score": 38550.56627166008 }, { "content": "fn solve_part2(lines: &Vec<&str>) -> i32 {\n\n for i in 0..lines.len() {\n\n match simulate_part2(lines, i) {\n\n Some(result) => return result,\n\n None => continue,\n\n }\n\n }\n\n unreachable!();\n\n}\n\n\n", "file_path": "day8/src/main.rs", "rank": 73, "score": 38550.56627166008 }, { "content": "fn get_seat_id(boarding_pass: &str) -> i32 {\n\n let mut seat_id = 0;\n\n\n\n for current_char in boarding_pass.chars() {\n\n seat_id <<= 1;\n\n seat_id += match current_char {\n\n 'F' | 'L' => 0,\n\n 'B' | 'R' => 1,\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n seat_id\n\n}\n", "file_path": "day5/src/main.rs", "rank": 74, "score": 38455.972394058335 }, { "content": "fn solve(passports: &Vec<Vec<&str>>) -> i32 {\n\n let mut required_fields = HashSet::new();\n\n required_fields.insert(\"byr\"); // (Birth Year)\n\n required_fields.insert(\"iyr\"); // (Issue Year)\n\n required_fields.insert(\"eyr\"); // (Expiration Year)\n\n required_fields.insert(\"hgt\"); // (Height)\n\n required_fields.insert(\"hcl\"); // (Hair Color)\n\n required_fields.insert(\"ecl\"); // (Eye Color)\n\n required_fields.insert(\"pid\"); // (Passport ID)\n\n\n\n let mut optional_fields = HashSet::new();\n\n optional_fields.insert(\"cid\"); // (Country ID)\n\n\n\n let allowed_fields: HashSet<_> = required_fields.union(&optional_fields).cloned().collect();\n\n\n\n let allowed_eye_colors: HashSet<_> = vec![\"amb\", \"blu\", \"brn\", \"gry\", \"grn\", \"hzl\", \"oth\"]\n\n .iter()\n\n .cloned()\n\n .collect();\n\n\n", "file_path": "day4/src/main.rs", "rank": 75, "score": 37920.99499515999 }, { "content": "fn find_paren_expr(tokens: &[Token]) -> (&[Token], &[Token]) {\n\n let mut in_parens = 1;\n\n let tokens_minus_paren = &tokens[1..tokens.len()];\n\n for (sub_index, &token) in tokens_minus_paren.iter().enumerate() {\n\n in_parens += match token {\n\n Token::OpenParen => 1,\n\n Token::CloseParen => -1,\n\n _ => 0,\n\n };\n\n if in_parens == 0 {\n\n let contents_tokens = &tokens_minus_paren[0..sub_index];\n\n let remainder_tokens = &tokens_minus_paren[sub_index + 1..tokens_minus_paren.len()];\n\n\n\n return (contents_tokens, remainder_tokens);\n\n }\n\n }\n\n\n\n unreachable!();\n\n}\n\n\n", "file_path": "day18/src/main.rs", "rank": 76, "score": 37749.99729773846 }, { "content": "fn solve_part2(all_directions: &[Vec<HexDirection>]) -> usize {\n\n let initial_flips = calculate_initial_flipped_tiles(all_directions);\n\n\n\n let mut currently_black_tiles;\n\n let mut next_black_tiles = initial_flips;\n\n\n\n let days_to_simulate = 100;\n\n for _day in 1..=days_to_simulate {\n\n currently_black_tiles = next_black_tiles.clone();\n\n\n\n let possibly_active_tiles: HashSet<(i64, i64)> = currently_black_tiles\n\n .iter()\n\n .flat_map(|&(q, r)| {\n\n let mut result = vec![(q, r)];\n\n result.extend(\n\n HexDirection::variants()\n\n .iter()\n\n .map(HexDirection::to_coordinates)\n\n .map(|(dq, dr)| (q + dq, r + dr)),\n\n );\n", "file_path": "day24/src/main.rs", "rank": 77, "score": 37749.99729773846 }, { "content": "fn parse_directions(line: &str) -> Vec<HexDirection> {\n\n let mut result = Vec::new();\n\n let mut remaining_line = line;\n\n\n\n loop {\n\n let (next_direction, remainder) = parse_next_direction(remaining_line);\n\n remaining_line = remainder;\n\n match next_direction {\n\n None => break,\n\n Some(direction) => result.push(direction),\n\n }\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "day24/src/main.rs", "rank": 78, "score": 37749.99729773846 }, { "content": "fn solve_part1(all_directions: &[Vec<HexDirection>]) -> usize {\n\n calculate_initial_flipped_tiles(all_directions).len()\n\n}\n\n\n", "file_path": "day24/src/main.rs", "rank": 79, "score": 37749.99729773846 }, { "content": "fn parse_rule(rule_text: &str) -> (usize, Rule) {\n\n let components: Vec<_> = rule_text.split(\":\").collect();\n\n let rule_id: usize = components[0].parse().unwrap();\n\n let rule_content = components[1].trim();\n\n\n\n if rule_content.starts_with(\"\\\"\") {\n\n let data = rule_content\n\n .strip_prefix(\"\\\"\")\n\n .unwrap()\n\n .strip_suffix(\"\\\"\")\n\n .unwrap();\n\n assert!(data.len() == 1);\n\n (rule_id, Rule::Literal(rule_id, data.to_string()))\n\n } else {\n\n let composite_of = rule_content\n\n .split(\"|\")\n\n .map(|x| x.trim().split(\" \").map(|y| y.parse().unwrap()).collect())\n\n .collect();\n\n (rule_id, Rule::Compound(rule_id, composite_of))\n\n }\n\n}\n\n\n", "file_path": "day19/src/main.rs", "rank": 80, "score": 37749.99729773846 }, { "content": "fn solve_part2(seats: &Vec<Vec<char>>) -> i64 {\n\n let mut local_seats = seats.clone();\n\n loop {\n\n let new_seats = simulate_step_part2(&local_seats);\n\n if new_seats == local_seats {\n\n break;\n\n }\n\n local_seats = new_seats;\n\n }\n\n\n\n let result = local_seats\n\n .iter()\n\n .map(|x| x.iter().map(|y| if *y == '#' { 1 } else { 0 }).sum::<i64>())\n\n .sum();\n\n\n\n result\n\n}\n\n\n", "file_path": "day11/src/main.rs", "rank": 81, "score": 37120.426021238374 }, { "content": "fn solve_part1(cubes: &Vec<Vec<char>>) -> i64 {\n\n let num_cycles: usize = 6;\n\n let start_x = cubes.len();\n\n let start_y = cubes[0].len();\n\n\n\n let max_bounds = 2 * num_cycles;\n\n let max_x = start_x + max_bounds;\n\n let max_y = start_y + max_bounds;\n\n let max_z = 1 + max_bounds;\n\n\n\n let inner_vec = vec!['.'; max_z];\n\n let mid_vec: Vec<Vec<char>> = vec![inner_vec.clone(); max_y];\n\n\n\n let mut sim_from: Vec<Vec<Vec<char>>> = vec![mid_vec.clone(); max_x];\n\n let mut sim_to: Vec<Vec<Vec<char>>> = vec![mid_vec.clone(); max_x];\n\n\n\n for (x, data) in cubes.iter().enumerate() {\n\n for (y, &c) in data.iter().enumerate() {\n\n sim_to[x + num_cycles][y + num_cycles][num_cycles] = c;\n\n }\n", "file_path": "day17/src/main.rs", "rank": 82, "score": 37120.426021238374 }, { "content": "fn solve_part1(seats: &Vec<Vec<char>>) -> i64 {\n\n let mut local_seats = seats.clone();\n\n loop {\n\n let new_seats = simulate_step_part1(&local_seats);\n\n\n\n if new_seats == local_seats {\n\n break;\n\n }\n\n local_seats = new_seats;\n\n }\n\n\n\n let result = local_seats\n\n .iter()\n\n .map(|x| x.iter().map(|y| if *y == '#' { 1 } else { 0 }).sum::<i64>())\n\n .sum();\n\n\n\n result\n\n}\n\n\n", "file_path": "day11/src/main.rs", "rank": 83, "score": 37120.426021238374 }, { "content": "fn solve_part1(adapters: &Vec<i64>) -> (i64, i64) {\n\n let mut all_adapters: Vec<i64> = adapters.clone();\n\n all_adapters.sort();\n\n all_adapters.insert(0, 0);\n\n all_adapters.push(all_adapters.last().unwrap() + 3);\n\n\n\n let mut one_diff: i64 = 0;\n\n let mut three_diff: i64 = 0;\n\n\n\n let mut current_adapter = all_adapters[0];\n\n for next_adapter in all_adapters[1..all_adapters.len()].iter().cloned() {\n\n match next_adapter - current_adapter {\n\n 3 => three_diff += 1,\n\n 2 => {}\n\n 1 => one_diff += 1,\n\n _ => unreachable!(),\n\n }\n\n current_adapter = next_adapter;\n\n }\n\n\n\n (one_diff, three_diff)\n\n}\n\n\n", "file_path": "day10/src/main.rs", "rank": 84, "score": 37120.426021238374 }, { "content": "fn solve_part2(cubes: &Vec<Vec<char>>) -> i64 {\n\n let num_cycles: usize = 6;\n\n let start_x = cubes.len();\n\n let start_y = cubes[0].len();\n\n\n\n let max_bounds = 2 * num_cycles;\n\n let max_x = start_x + max_bounds;\n\n let max_y = start_y + max_bounds;\n\n let max_z = 1 + max_bounds;\n\n let max_w = 1 + max_bounds;\n\n\n\n let inner_vec = vec!['.'; max_w];\n\n let mid_vec: Vec<Vec<char>> = vec![inner_vec.clone(); max_z];\n\n let outer_vec: Vec<Vec<Vec<char>>> = vec![mid_vec.clone(); max_y];\n\n\n\n let mut sim_from: Vec<Vec<Vec<Vec<char>>>> = vec![outer_vec.clone(); max_x];\n\n let mut sim_to: Vec<Vec<Vec<Vec<char>>>> = vec![outer_vec.clone(); max_x];\n\n\n\n for (x, data) in cubes.iter().enumerate() {\n\n for (y, &c) in data.iter().enumerate() {\n", "file_path": "day17/src/main.rs", "rank": 85, "score": 37120.426021238374 }, { "content": "fn get_coordinates_from_directions(directions: &[HexDirection]) -> (i64, i64) {\n\n // https://www.redblobgames.com/grids/hexagons/#coordinates-axial\n\n let mut axial_q: i64 = 0;\n\n let mut axial_r: i64 = 0;\n\n\n\n for direction in directions {\n\n let (off_q, off_r) = direction.to_coordinates();\n\n\n\n axial_q += off_q;\n\n axial_r += off_r;\n\n }\n\n\n\n (axial_q, axial_r)\n\n}\n\n\n", "file_path": "day24/src/main.rs", "rank": 86, "score": 36995.675755904376 }, { "content": "fn score_deck(winning_deck: &VecDeque<i64>) -> i64 {\n\n let mut total_score: i64 = 0;\n\n for (index, card) in winning_deck.iter().rev().enumerate() {\n\n total_score += *card * (index + 1) as i64;\n\n }\n\n\n\n total_score\n\n}\n\n\n", "file_path": "day22/src/main.rs", "rank": 87, "score": 36995.675755904376 }, { "content": "fn make_all_tile_variants(tile: &Tile) -> Vec<TileVariant> {\n\n let original_image = &tile.image;\n\n let mut result = vec![];\n\n\n\n let mut rotated_image = original_image.clone();\n\n for rotation_id in 0..=3 {\n\n result.push(TileVariant {\n\n id: tile.id,\n\n variant: rotation_id,\n\n image: rotated_image.clone(),\n\n });\n\n rotated_image = right_rotate_image(&rotated_image);\n\n }\n\n\n\n let mut flipped_image = horizontal_flip_image(&original_image);\n\n for rotation_id in 0..=3 {\n\n result.push(TileVariant {\n\n id: tile.id,\n\n variant: 4 + rotation_id,\n\n image: flipped_image.clone(),\n\n });\n\n flipped_image = right_rotate_image(&flipped_image);\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "day20/src/main.rs", "rank": 88, "score": 36995.675755904376 }, { "content": "fn parse_player(player: &Vec<&str>) -> VecDeque<i64> {\n\n player[1..player.len()]\n\n .iter()\n\n .map(|&x| x.parse().unwrap())\n\n .collect()\n\n}\n\n\n", "file_path": "day22/src/main.rs", "rank": 89, "score": 36366.10447940429 }, { "content": "fn parse_from_expression_start_tokens_part2(tokens: &[Token]) -> (Expression, &[Token]) {\n\n match tokens.first().unwrap() {\n\n Token::OpenParen => {\n\n let (contents_tokens, remainder_tokens) = find_paren_expr(tokens);\n\n (\n\n Expression::Paren(Box::from(parse_tokens_part2(contents_tokens))),\n\n remainder_tokens,\n\n )\n\n }\n\n Token::Numeric(value) => {\n\n let remainder_tokens = &tokens[1..tokens.len()];\n\n (Expression::Numeric(*value), remainder_tokens)\n\n }\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "day18/src/main.rs", "rank": 90, "score": 36283.706701476236 }, { "content": "fn parse_from_expression_start_tokens_part1(tokens: &[Token]) -> (Expression, &[Token]) {\n\n match tokens.first().unwrap() {\n\n Token::OpenParen => {\n\n let (contents_tokens, remainder_tokens) = find_paren_expr(tokens);\n\n (\n\n Expression::Paren(Box::from(parse_tokens_part1(contents_tokens))),\n\n remainder_tokens,\n\n )\n\n }\n\n Token::Numeric(value) => {\n\n let remainder_tokens = &tokens[1..tokens.len()];\n\n (Expression::Numeric(*value), remainder_tokens)\n\n }\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "day18/src/main.rs", "rank": 91, "score": 36283.706701476236 }, { "content": "fn parse_line(line: &str) -> (Vec<&str>, Vec<&str>) {\n\n let components: Vec<_> = line\n\n .strip_suffix(\")\")\n\n .unwrap()\n\n .split(\" (contains\")\n\n .collect();\n\n\n\n assert_eq!(components.len(), 2);\n\n let foods = components[0].trim().split(\" \").map(|x| x.trim()).collect();\n\n let allergens = components[1].trim().split(\",\").map(|x| x.trim()).collect();\n\n\n\n (foods, allergens)\n\n}\n\n\n", "file_path": "day21/src/main.rs", "rank": 92, "score": 35801.17058080052 }, { "content": "fn solve(numbers: &Vec<i64>, number_at_position: usize) -> i64 {\n\n let mut position: HashMap<i64, usize> = HashMap::new();\n\n for i in 0..numbers.len() - 1 {\n\n position.insert(numbers[i], i + 1);\n\n }\n\n\n\n let mut last_number = numbers[numbers.len() - 2];\n\n let mut next_number = numbers.last().unwrap().clone();\n\n for i in numbers.len()..=number_at_position {\n\n if position.contains_key(&next_number) {\n\n let last_round = position[&next_number];\n\n last_number = next_number;\n\n next_number = (i - last_round) as i64;\n\n } else {\n\n last_number = next_number;\n\n next_number = 0;\n\n }\n\n position.insert(last_number, i);\n\n }\n\n\n\n last_number\n\n}\n", "file_path": "day15/src/main.rs", "rank": 93, "score": 35801.17058080052 }, { "content": "fn parse_next_direction(line: &str) -> (Option<HexDirection>, &str) {\n\n let mut char_iter = line.chars();\n\n\n\n match char_iter.next() {\n\n None => (None, line),\n\n Some('e') => (Some(HexDirection::East), &line[1..]),\n\n Some('w') => (Some(HexDirection::West), &line[1..]),\n\n Some('s') => match char_iter.next() {\n\n Some('e') => (Some(HexDirection::SouthEast), &line[2..]),\n\n Some('w') => (Some(HexDirection::SouthWest), &line[2..]),\n\n _ => unreachable!(),\n\n },\n\n Some('n') => match char_iter.next() {\n\n Some('e') => (Some(HexDirection::NorthEast), &line[2..]),\n\n Some('w') => (Some(HexDirection::NorthWest), &line[2..]),\n\n _ => unreachable!(),\n\n },\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "day24/src/main.rs", "rank": 94, "score": 35654.13542497615 }, { "content": "fn get_monster_image_indexes() -> (usize, usize, Vec<(usize, usize)>) {\n\n let monster = [\n\n \" # \",\n\n \"# ## ## ###\",\n\n \" # # # # # # \",\n\n ];\n\n\n\n let monster_map: Vec<Vec<char>> = monster.iter().map(|row| row.chars().collect()).collect();\n\n\n\n let monster_indexes = monster_map\n\n .iter()\n\n .enumerate()\n\n .flat_map(|(row_index, row)| {\n\n std::iter::repeat(row_index).zip(row.iter().enumerate().filter_map(|(col_index, c)| {\n\n if *c == '#' {\n\n Some(col_index)\n\n } else {\n\n None\n\n }\n\n }))\n\n })\n\n .collect();\n\n\n\n (monster_map.len(), monster_map[0].len(), monster_indexes)\n\n}\n\n\n", "file_path": "day20/src/main.rs", "rank": 95, "score": 35654.13542497615 }, { "content": "fn solve_part1(numbers: &Vec<i64>, preamble_length: usize) -> i64 {\n\n let mut valid_window: HashSet<i64> = HashSet::new();\n\n\n\n for i in 0..preamble_length {\n\n valid_window.insert(numbers[i]);\n\n }\n\n for i in preamble_length..numbers.len() {\n\n if !is_valid_sum(numbers[i], &valid_window) {\n\n return numbers[i];\n\n }\n\n\n\n valid_window.remove(&numbers[i - preamble_length]);\n\n valid_window.insert(numbers[i]);\n\n }\n\n\n\n unreachable!();\n\n}\n\n\n", "file_path": "day9/src/main.rs", "rank": 96, "score": 35089.20152637239 }, { "content": "fn solve_part2(numbers: &Vec<i64>, target_number: i64) -> i64 {\n\n let mut low_water_mark: usize = 0;\n\n let mut high_water_mark: usize = 0;\n\n let mut current_sum: i64 = 0;\n\n\n\n while high_water_mark < numbers.len() {\n\n assert!(low_water_mark <= high_water_mark);\n\n\n\n if current_sum == target_number {\n\n break;\n\n } else if current_sum < target_number {\n\n current_sum += numbers[high_water_mark];\n\n high_water_mark += 1;\n\n\n\n if current_sum == target_number {\n\n break;\n\n }\n\n } else if current_sum > target_number {\n\n current_sum -= numbers[low_water_mark];\n\n low_water_mark += 1;\n", "file_path": "day9/src/main.rs", "rank": 97, "score": 35089.20152637239 }, { "content": "fn calculate_powmod(base: i64, power: i64, modulus: i64) -> i64 {\n\n let mut value = 1i64;\n\n for _ in 0..power {\n\n value = (value * base) % modulus;\n\n }\n\n value\n\n}\n\n\n", "file_path": "day25/src/main.rs", "rank": 98, "score": 34579.43668576417 }, { "content": "fn solve_part1(initial_cups: &VecDeque<i64>, moves: usize) -> String {\n\n let (cups, _) = play_game_naively(initial_cups, moves);\n\n\n\n let one_cup_index = cups\n\n .iter()\n\n .enumerate()\n\n .filter_map(|(index, &cup)| if cup == 1 { Some(index) } else { None })\n\n .next()\n\n .unwrap();\n\n\n\n let cups_in_order: Vec<String> = (1..cups.len())\n\n .map(|offset| cups[(one_cup_index + offset) % cups.len()].to_string())\n\n .collect();\n\n\n\n cups_in_order.join(\"\")\n\n}\n\n\n", "file_path": "day23/src/main.rs", "rank": 99, "score": 34416.11544617507 } ]
Rust
benches/benchmarks/bench_curve.rs
algorand/pixel
7c90f8162d6b8c879f795cf1d2d29350abeb0a96
use super::ff::Field; use super::pairing::{bls12_381::*, CurveAffine, CurveProjective, Engine, SubgroupCheck}; use super::pixel::{PixelG1, PixelG2}; use super::rand_core::*; use super::rand_xorshift::XorShiftRng; use criterion::Criterion; #[allow(dead_code)] fn bench_group_multiplication(c: &mut Criterion) { const SAMPLES: usize = 100; let mut g1list: Vec<PixelG1> = vec![]; let mut g2list: Vec<PixelG2> = vec![]; let mut r1list: Vec<Fr> = vec![]; let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x5d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); for _i in 0..SAMPLES { g1list.push(PixelG1::random(&mut rng)); g2list.push(PixelG2::random(&mut rng)); r1list.push(Fr::random(&mut rng)); } let r2list = r1list.clone(); let mut counter = 0; c.bench_function("Pixel G1 muliplication cost", move |b| { b.iter(|| { g1list[counter].mul_assign(r1list[counter]); counter = (counter + 1) % SAMPLES; }) }); let mut counter = 0; c.bench_function("Pixel G2 muliplication cost", move |b| { b.iter(|| { g2list[counter].mul_assign(r2list[counter]); counter = (counter + 1) % SAMPLES; }) }); } #[allow(dead_code)] fn bench_membership_testing(c: &mut Criterion) { const SAMPLES: usize = 100; let mut g1list: Vec<PixelG1> = vec![]; let mut g2list: Vec<PixelG2> = vec![]; let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x5d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); for _i in 0..SAMPLES { g1list.push(PixelG1::random(&mut rng)); g2list.push(PixelG2::random(&mut rng)); } let mut counter = 0; c.bench_function("Pixel G1 membership testing cost", move |b| { b.iter(|| { g1list[counter].into_affine().in_subgroup(); counter = (counter + 1) % SAMPLES; }) }); let mut counter = 0; c.bench_function("Pixel G2 membership testing cost", move |b| { b.iter(|| { g2list[counter].into_affine().in_subgroup(); counter = (counter + 1) % SAMPLES; }) }); } #[allow(dead_code)] fn bench_pairing(c: &mut Criterion) { const SAMPLES: usize = 100; let mut g1list1: Vec<G1> = vec![]; let mut g1list2: Vec<G1> = vec![]; let mut g1list3: Vec<G1> = vec![]; let mut g2list1: Vec<G2> = vec![]; let mut g2list2: Vec<G2> = vec![]; let mut g2list3: Vec<G2> = vec![]; let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x5d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); for _i in 0..SAMPLES { g1list1.push(G1::random(&mut rng)); g1list2.push(G1::random(&mut rng)); g1list3.push(G1::random(&mut rng)); g2list1.push(G2::random(&mut rng)); g2list2.push(G2::random(&mut rng)); g2list3.push(G2::random(&mut rng)); } let mut counter = 0; let g11 = g1list1.clone(); let g21 = g2list1.clone(); c.bench_function("Single pairing cost", move |b| { b.iter(|| { Bls12::final_exponentiation(&Bls12::miller_loop( [( &(g11[counter].into_affine().prepare()), &(g21[counter].into_affine().prepare()), )] .iter(), )) .unwrap(); counter = (counter + 1) % SAMPLES; }) }); let mut counter = 0; let g11 = g1list1.clone(); let g12 = g1list2.clone(); let g21 = g2list1.clone(); let g22 = g2list2.clone(); c.bench_function("Simutaneously 2 pairing cost", move |b| { b.iter(|| { Bls12::final_exponentiation(&Bls12::miller_loop( [ ( &(g11[counter].into_affine().prepare()), &(g21[counter].into_affine().prepare()), ), ( &(g12[counter].into_affine().prepare()), &(g22[counter].into_affine().prepare()), ), ] .iter(), )) .unwrap(); counter = (counter + 1) % SAMPLES; }) }); let mut counter = 0; c.bench_function("Simutaneously 3 pairing cost", move |b| { b.iter(|| { Bls12::final_exponentiation(&Bls12::miller_loop( [ ( &(g1list1[counter].into_affine().prepare()), &(g2list1[counter].into_affine().prepare()), ), ( &(g1list2[counter].into_affine().prepare()), &(g2list2[counter].into_affine().prepare()), ), ( &(g1list3[counter].into_affine().prepare()), &(g2list3[counter].into_affine().prepare()), ), ] .iter(), )) .unwrap(); counter = (counter + 1) % SAMPLES; }) }); } criterion_group!( group_ops, bench_group_multiplication, bench_membership_testing, bench_pairing );
use super::ff::Field; use super::pairing::{bls12_381::*, CurveAffine, CurveProjective, Engine, SubgroupCheck}; use super::pixel::{PixelG1, PixelG2}; use super::rand_core::*; use super::rand_xorshift::XorShiftRng; use criterion::Criterion; #[allow(dead_code)] fn bench_group_multiplication(c: &mut Criterion) { const SAMPLES: usize = 100; let mut g1list: Vec<PixelG1> = vec![]; let mut g2list: Vec<PixelG2> = vec![]; let mut r1list: Vec<Fr> = vec![]; let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x5d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); for _i in 0..SAMPLES { g1list.push(PixelG1::random(&mut rng)); g2list.push(PixelG2::random(&mut rng)); r1list.push(Fr::random(&mut rng)); } let r2list = r1list.clone(); let mut counter = 0; c.bench_function("Pixel G1 muliplication cost", move |b| { b.ite
#[allow(dead_code)] fn bench_membership_testing(c: &mut Criterion) { const SAMPLES: usize = 100; let mut g1list: Vec<PixelG1> = vec![]; let mut g2list: Vec<PixelG2> = vec![]; let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x5d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); for _i in 0..SAMPLES { g1list.push(PixelG1::random(&mut rng)); g2list.push(PixelG2::random(&mut rng)); } let mut counter = 0; c.bench_function("Pixel G1 membership testing cost", move |b| { b.iter(|| { g1list[counter].into_affine().in_subgroup(); counter = (counter + 1) % SAMPLES; }) }); let mut counter = 0; c.bench_function("Pixel G2 membership testing cost", move |b| { b.iter(|| { g2list[counter].into_affine().in_subgroup(); counter = (counter + 1) % SAMPLES; }) }); } #[allow(dead_code)] fn bench_pairing(c: &mut Criterion) { const SAMPLES: usize = 100; let mut g1list1: Vec<G1> = vec![]; let mut g1list2: Vec<G1> = vec![]; let mut g1list3: Vec<G1> = vec![]; let mut g2list1: Vec<G2> = vec![]; let mut g2list2: Vec<G2> = vec![]; let mut g2list3: Vec<G2> = vec![]; let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x5d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); for _i in 0..SAMPLES { g1list1.push(G1::random(&mut rng)); g1list2.push(G1::random(&mut rng)); g1list3.push(G1::random(&mut rng)); g2list1.push(G2::random(&mut rng)); g2list2.push(G2::random(&mut rng)); g2list3.push(G2::random(&mut rng)); } let mut counter = 0; let g11 = g1list1.clone(); let g21 = g2list1.clone(); c.bench_function("Single pairing cost", move |b| { b.iter(|| { Bls12::final_exponentiation(&Bls12::miller_loop( [( &(g11[counter].into_affine().prepare()), &(g21[counter].into_affine().prepare()), )] .iter(), )) .unwrap(); counter = (counter + 1) % SAMPLES; }) }); let mut counter = 0; let g11 = g1list1.clone(); let g12 = g1list2.clone(); let g21 = g2list1.clone(); let g22 = g2list2.clone(); c.bench_function("Simutaneously 2 pairing cost", move |b| { b.iter(|| { Bls12::final_exponentiation(&Bls12::miller_loop( [ ( &(g11[counter].into_affine().prepare()), &(g21[counter].into_affine().prepare()), ), ( &(g12[counter].into_affine().prepare()), &(g22[counter].into_affine().prepare()), ), ] .iter(), )) .unwrap(); counter = (counter + 1) % SAMPLES; }) }); let mut counter = 0; c.bench_function("Simutaneously 3 pairing cost", move |b| { b.iter(|| { Bls12::final_exponentiation(&Bls12::miller_loop( [ ( &(g1list1[counter].into_affine().prepare()), &(g2list1[counter].into_affine().prepare()), ), ( &(g1list2[counter].into_affine().prepare()), &(g2list2[counter].into_affine().prepare()), ), ( &(g1list3[counter].into_affine().prepare()), &(g2list3[counter].into_affine().prepare()), ), ] .iter(), )) .unwrap(); counter = (counter + 1) % SAMPLES; }) }); } criterion_group!( group_ops, bench_group_multiplication, bench_membership_testing, bench_pairing );
r(|| { g1list[counter].mul_assign(r1list[counter]); counter = (counter + 1) % SAMPLES; }) }); let mut counter = 0; c.bench_function("Pixel G2 muliplication cost", move |b| { b.iter(|| { g2list[counter].mul_assign(r2list[counter]); counter = (counter + 1) % SAMPLES; }) }); }
function_block-function_prefixed
[ { "content": "// Convert a vector back to time.\n\n// Returns an error if time depth is invalid.\n\nfn vec_to_time(mut t_vec: Vec<u64>, d: usize) -> Result<u64, String> {\n\n // python code:\n\n // if tvec == []:\n\n // return 1\n\n // else:\n\n // ti = tvec.pop(0)\n\n // return 1 + (ti-1) * (pow(2,D-1)-1) + vec2time(tvec,D-1)\n\n\n\n // requires D >=1 and t in {1,2,...,2^D-1}\n\n if d == 0 {\n\n #[cfg(debug_assertions)]\n\n println!(\"Error in vec_to_time: {}\", ERR_TIME_DEPTH);\n\n return Err(ERR_TIME_DEPTH.to_owned());\n\n }\n\n if t_vec == [] {\n\n // an empty list is 1\n\n Ok(1)\n\n } else {\n\n // process t_vec[0] recursively\n\n let tmp: Vec<u64> = t_vec.drain(0..1).collect();\n\n // if t_vec[0] == 1 => proceed to the left child, so we add the time by 1\n\n // if t_vec[0] == 1 => proceed to the right child, so we add the time by 2^(d-1)\n\n Ok(1 + (tmp[0] - 1) * ((1u64 << (d - 1)) - 1) + vec_to_time(t_vec, d - 1)?)\n\n }\n\n}\n", "file_path": "src/time.rs", "rank": 0, "score": 136012.2617496765 }, { "content": "#[allow(dead_code)]\n\nfn bench_verify(c: &mut Criterion) {\n\n const SAMPLES: usize = 100;\n\n\n\n // this benchmark uses a same set of parameter\n\n let param = Pixel::param_default();\n\n\n\n // get a list of public keys\n\n let mut pklist: Vec<PublicKey> = vec![];\n\n let mut poplist: Vec<ProofOfPossession> = vec![];\n\n let mut siglist: Vec<Signature> = vec![];\n\n let msg = \"the message to be signed in benchmarking\";\n\n let max_time = (1 << param.depth()) - 1;\n\n let rngseed = \"\";\n\n for _i in 0..SAMPLES {\n\n let seed = rand::thread_rng()\n\n .gen_ascii_chars()\n\n .take(32)\n\n .collect::<String>();\n\n // generate a sk\n\n let (pk, mut sk, pop) = Pixel::key_gen(&seed, &param).unwrap();\n", "file_path": "benches/benchmarks/bench_api.rs", "rank": 2, "score": 128108.93783025231 }, { "content": "#[allow(dead_code)]\n\nfn bench_keygen(c: &mut Criterion) {\n\n // this benchmark uses a same set of parameter\n\n let param = Pixel::param_default();\n\n\n\n // benchmarking\n\n c.bench_function(\"key generation\", move |b| {\n\n b.iter(|| {\n\n // a new seed for each new key gen\n\n let seed = rand::thread_rng()\n\n .gen_ascii_chars()\n\n .take(32)\n\n .collect::<String>();\n\n let res = Pixel::key_gen(&seed, &param);\n\n assert!(res.is_ok(), res.err());\n\n })\n\n });\n\n}\n\n\n\n/// benchmark key update: update to the next time stamp\n", "file_path": "benches/benchmarks/bench_api.rs", "rank": 3, "score": 128108.93783025231 }, { "content": "#[allow(dead_code)]\n\nfn bench_param(c: &mut Criterion) {\n\n // benchmarking\n\n c.bench_function(\"param generation\", move |b| {\n\n b.iter(|| {\n\n // get a new of seeds for parameter gen\n\n let seed = rand::thread_rng()\n\n .gen_ascii_chars()\n\n .take(32)\n\n .collect::<String>();\n\n let res = Pixel::param_gen(&seed, 0);\n\n assert!(res.is_ok(), res.err());\n\n })\n\n });\n\n}\n\n\n\n/// benchmark key generation\n", "file_path": "benches/benchmarks/bench_api.rs", "rank": 4, "score": 128108.93783025231 }, { "content": "#[allow(dead_code)]\n\nfn bench_aggregation(c: &mut Criterion) {\n\n const SAMPLES: usize = 3000;\n\n\n\n // this benchmark uses the default parameter\n\n let param = Pixel::param_default();\n\n\n\n // get a list of public keys\n\n let mut pklist: Vec<PublicKey> = vec![];\n\n let mut siglist: Vec<Signature> = vec![];\n\n let msg = \"the message to be signed in benchmarking\";\n\n let rngseed = \"\";\n\n // sign at time 1 for all signatures, for fast benchmarking\n\n // let max_time = (1 << param.depth()) - 1;\n\n // let time = rand::thread_rng().gen_range(0u64, max_time - 2);\n\n let time = 1;\n\n for _i in 0..SAMPLES {\n\n let seed = rand::thread_rng()\n\n .gen_ascii_chars()\n\n .take(32)\n\n .collect::<String>();\n", "file_path": "benches/benchmarks/bench_api.rs", "rank": 5, "score": 128108.93783025231 }, { "content": "#[allow(dead_code)]\n\nfn bench_sign(c: &mut Criterion) {\n\n const SAMPLES: usize = 100;\n\n\n\n // this benchmark uses a same set of parameter\n\n let param = Pixel::param_default();\n\n\n\n // get a list of secret keys, as random time\n\n let mut sklist: Vec<SecretKey> = vec![];\n\n\n\n let msg = \"the message to be signed in benchmarking\";\n\n let max_time = (1 << param.depth()) - 1;\n\n let rngseed = \"\";\n\n for _i in 0..SAMPLES {\n\n let seed = rand::thread_rng()\n\n .gen_ascii_chars()\n\n .take(32)\n\n .collect::<String>();\n\n // generate a sk\n\n let (_, mut sk, _) = Pixel::key_gen(&seed, &param).unwrap();\n\n // delegate it to a random time\n", "file_path": "benches/benchmarks/bench_api.rs", "rank": 6, "score": 128108.93783025231 }, { "content": "#[allow(dead_code)]\n\nfn bench_gamma_list(c: &mut Criterion) {\n\n for time in 1..64 {\n\n let time_vec = TimeVec::init(time, 32).unwrap();\n\n let message = format!(\"gamma list for time {}\", time,);\n\n\n\n // benchmark gamma list generation\n\n c.bench_function(&message, move |b| {\n\n b.iter(|| {\n\n let res = time_vec.gamma_list(32);\n\n assert!(res.is_ok(), res.err());\n\n })\n\n });\n\n }\n\n}\n\n\n\ncriterion_group!(time, bench_gamma_list);\n", "file_path": "benches/benchmarks/bench_time.rs", "rank": 7, "score": 124892.36345817024 }, { "content": "#[allow(dead_code)]\n\nfn bench_sign_present(c: &mut Criterion) {\n\n const SAMPLES: usize = 100;\n\n\n\n // this benchmark uses a same set of parameter\n\n let param = Pixel::param_default();\n\n\n\n // get a list of secret keys, as random time\n\n let mut sklist: Vec<SecretKey> = vec![];\n\n let msg = \"the message to be signed in benchmarking\";\n\n let max_time = (1 << param.depth()) - 1;\n\n let rngseed = b\"\";\n\n for _i in 0..SAMPLES {\n\n let seed = rand::thread_rng()\n\n .gen_ascii_chars()\n\n .take(32)\n\n .collect::<String>();\n\n // generate a sk\n\n let (_, mut sk, _) = Pixel::key_gen(&seed, &param).unwrap();\n\n // delegate it to a random time\n\n let time = rand::thread_rng().gen_range(0u64, max_time - 2);\n", "file_path": "benches/benchmarks/bench_api.rs", "rank": 10, "score": 124892.36345817024 }, { "content": "#[allow(dead_code)]\n\nfn bench_sign_then_update(c: &mut Criterion) {\n\n const SAMPLES: usize = 100;\n\n\n\n // this benchmark uses a same set of parameter\n\n let param = Pixel::param_default();\n\n\n\n // get a list of secret keys, as random time\n\n let mut sklist: Vec<SecretKey> = vec![];\n\n let msg = \"the message to be signed in benchmarking\";\n\n let max_time = (1 << param.depth()) - 1;\n\n let rngseed = \"\";\n\n for _i in 0..SAMPLES {\n\n let seed = rand::thread_rng()\n\n .gen_ascii_chars()\n\n .take(32)\n\n .collect::<String>();\n\n // generate a sk\n\n let (_, mut sk, _) = Pixel::key_gen(&seed, &param).unwrap();\n\n // delegate it to a random time\n\n let time = rand::thread_rng().gen_range(0u64, max_time - 2);\n", "file_path": "benches/benchmarks/bench_api.rs", "rank": 11, "score": 124892.36345817024 }, { "content": "#[allow(dead_code)]\n\nfn bench_key_update_random(c: &mut Criterion) {\n\n const SAMPLES: usize = 100;\n\n\n\n // this benchmark uses a same set of parameter\n\n let param = Pixel::param_default();\n\n\n\n // get a list of secret keys, as random time\n\n let mut sklist: Vec<SecretKey> = vec![];\n\n\n\n let max_time = (1 << param.depth()) - 1;\n\n let rngseed = \"\";\n\n for _i in 0..SAMPLES {\n\n let seed = rand::thread_rng()\n\n .gen_ascii_chars()\n\n .take(32)\n\n .collect::<String>();\n\n // generate a sk\n\n let (_, mut sk, _) = Pixel::key_gen(&seed, &param).unwrap();\n\n // delegate it to a random time\n\n let time = rand::thread_rng().gen_range(0u64, max_time - 2);\n", "file_path": "benches/benchmarks/bench_api.rs", "rank": 12, "score": 121932.17335822384 }, { "content": "#[allow(dead_code)]\n\nfn bench_key_update_next(c: &mut Criterion) {\n\n const SAMPLES: usize = 1000;\n\n\n\n // this benchmark uses a same set of parameter\n\n let param = Pixel::param_default();\n\n\n\n // get a list of secret keys, as random time\n\n let mut sklist: Vec<SecretKey> = vec![];\n\n\n\n let max_time = (1 << param.depth()) - 1;\n\n let rngseed = \"\";\n\n for _i in 0..SAMPLES {\n\n let seed = rand::thread_rng()\n\n .gen_ascii_chars()\n\n .take(32)\n\n .collect::<String>();\n\n // generate a sk\n\n let (_, mut sk, _) = Pixel::key_gen(&seed, &param).unwrap();\n\n // delegate it to a random time\n\n let time = rand::thread_rng().gen_range(0u64, max_time - 1);\n", "file_path": "benches/benchmarks/bench_api.rs", "rank": 13, "score": 121932.17335822384 }, { "content": "#[allow(dead_code)]\n\nfn bench_ssk_delegate(c: &mut Criterion) {\n\n const SAMPLES: usize = 100;\n\n\n\n // this benchmark uses a same set of parameter\n\n let seed = rand::thread_rng()\n\n .gen_ascii_chars()\n\n .take(32)\n\n .collect::<String>();\n\n let param = Pixel::param_gen(&seed, 0).unwrap();\n\n\n\n // ssklist at time 1\n\n let mut ssklist: Vec<SubSecretKey> = vec![];\n\n for _i in 0..SAMPLES {\n\n let seed = rand::thread_rng()\n\n .gen_ascii_chars()\n\n .take(32)\n\n .collect::<String>();\n\n // generate a sk and store the first ssk\n\n let (_, sk, _) = Pixel::key_gen(&seed, &param).unwrap();\n\n ssklist.push(sk.first_ssk().unwrap());\n", "file_path": "benches/benchmarks/bench_ssk_ops.rs", "rank": 14, "score": 121932.17335822384 }, { "content": "#[allow(dead_code)]\n\nfn bench_sk_update_seq(c: &mut Criterion) {\n\n const SAMPLES: usize = 100;\n\n\n\n // this benchmark uses a same set of parameter\n\n let seed = rand::thread_rng()\n\n .gen_ascii_chars()\n\n .take(32)\n\n .collect::<String>();\n\n let param = Pixel::param_gen(&seed, 0).unwrap();\n\n\n\n // sklist1 at time 1\n\n let mut sklist: Vec<SecretKey> = vec![];\n\n for _i in 0..SAMPLES {\n\n let seed = rand::thread_rng()\n\n .gen_ascii_chars()\n\n .take(32)\n\n .collect::<String>();\n\n // generate a sk\n\n let (_, sk, _) = Pixel::key_gen(&seed, &param).unwrap();\n\n sklist.push(sk);\n", "file_path": "benches/benchmarks/bench_sk_ops.rs", "rank": 15, "score": 119198.8888381763 }, { "content": "#[allow(dead_code)]\n\nfn bench_sk_update_leaf(c: &mut Criterion) {\n\n const SAMPLES: usize = 100;\n\n\n\n // this benchmark uses a same set of parameter\n\n let seed = rand::thread_rng()\n\n .gen_ascii_chars()\n\n .take(32)\n\n .collect::<String>();\n\n let param = Pixel::param_gen(&seed, 0).unwrap();\n\n let rngseed = \"\";\n\n // sklist at time 31 -- one level above the leaf nodes\n\n let mut sklist: Vec<SecretKey> = vec![];\n\n for _i in 0..SAMPLES {\n\n let seed = rand::thread_rng()\n\n .gen_ascii_chars()\n\n .take(32)\n\n .collect::<String>();\n\n // generate a sk and update to time 31\n\n let (_, mut sk, _) = Pixel::key_gen(&seed, &param).unwrap();\n\n let res = Pixel::sk_update(&mut sk, 31, &param, rngseed);\n", "file_path": "benches/benchmarks/bench_sk_ops.rs", "rank": 16, "score": 119198.8888381763 }, { "content": "#[allow(dead_code)]\n\nfn bench_ssk_leveled_randomization(c: &mut Criterion) {\n\n // this benchmark uses a same set of parameter\n\n let seed = rand::thread_rng()\n\n .gen_ascii_chars()\n\n .take(32)\n\n .collect::<String>();\n\n let param = Pixel::param_gen(&seed, 0).unwrap();\n\n\n\n // ssk at time 1\n\n\n\n let seed = rand::thread_rng()\n\n .gen_ascii_chars()\n\n .take(32)\n\n .collect::<String>();\n\n // generate a sk and store the first ssk\n\n let (_, sk, _) = Pixel::key_gen(&seed, &param).unwrap();\n\n let mut ssk = sk.first_ssk().unwrap();\n\n\n\n // from root to the leaf we can delegate d - 1 times\n\n for _ in 0..param.depth() - 1 {\n", "file_path": "benches/benchmarks/bench_ssk_ops.rs", "rank": 17, "score": 119198.8888381763 }, { "content": "#[allow(dead_code)]\n\nfn bench_ssk_leaf_randomization(c: &mut Criterion) {\n\n // this benchmark uses a same set of parameter\n\n let seed = rand::thread_rng()\n\n .gen_ascii_chars()\n\n .take(32)\n\n .collect::<String>();\n\n let param = Pixel::param_gen(&seed, 0).unwrap();\n\n\n\n // ssk at time 1\n\n let seed = rand::thread_rng()\n\n .gen_ascii_chars()\n\n .take(32)\n\n .collect::<String>();\n\n // generate a sk and store the first ssk\n\n let (_, sk, _) = Pixel::key_gen(&seed, &param).unwrap();\n\n let mut ssk = sk.first_ssk().unwrap();\n\n // update ssk to a leaf node\n\n let tar_time = param.depth() as u64;\n\n let res = ssk.delegate(tar_time, param.depth());\n\n assert!(res.is_ok(), res.err());\n", "file_path": "benches/benchmarks/bench_ssk_ops.rs", "rank": 18, "score": 119198.8888381763 }, { "content": "// Convert time into a vector.\n\n// Returns an error if the time stamp or the time depth is invalid.\n\nfn time_to_vec(time: TimeStamp, d: usize) -> Result<Vec<u64>, String> {\n\n // requires D >=1 and t in {1,2,...,2^D-1}\n\n if d == 0 {\n\n #[cfg(debug_assertions)]\n\n println!(\"Error in time_to_vec: {}\", ERR_TIME_DEPTH);\n\n return Err(ERR_TIME_DEPTH.to_owned());\n\n }\n\n let max_t = 1 << d;\n\n if time > max_t || time == 0 {\n\n #[cfg(debug_assertions)]\n\n println!(\"Error in time_to_vec: {}\", ERR_TIME_STAMP);\n\n return Err(ERR_TIME_STAMP.to_owned());\n\n }\n\n\n\n // python code:\n\n // if t==1:\n\n // return []\n\n // if D>0 and t > pow(2,D-1):\n\n // return [2] + time2vec(t-pow(2,D-1),D-1)\n\n // else:\n", "file_path": "src/time.rs", "rank": 19, "score": 98591.12249888186 }, { "content": "#[cfg(test)]\n\nfn foo_prng(prng: PRNG) -> *const PRNG {\n\n &prng as *const PRNG\n\n}\n\n\n", "file_path": "src/test/pixel_zeroize.rs", "rank": 20, "score": 77835.70918416174 }, { "content": "#[test]\n\nfn test_g1_serialization_rand() {\n\n // PixelG1::zero, compressed\n\n let g1_zero = PixelG1::zero();\n\n let mut buf: Vec<u8> = vec![];\n\n // serialize a PixelG1 element into buffer\n\n assert!(g1_zero.serialize(&mut buf, true).is_ok());\n\n assert_eq!(buf.len(), 96, \"length of blob is incorrect\");\n\n let g1_zero_recover = PixelG1::deserialize(&mut buf[..].as_ref(), true).unwrap();\n\n\n\n assert_eq!(g1_zero, g1_zero_recover);\n\n\n\n // PixelG1::one, compressed\n\n let g1_one = PixelG1::one();\n\n let mut buf: Vec<u8> = vec![];\n\n // serialize a PixelG1 element into buffer\n\n assert!(g1_one.serialize(&mut buf, true).is_ok());\n\n assert_eq!(buf.len(), 96, \"length of blob is incorrect\");\n\n let g1_one_recover = PixelG1::deserialize(&mut buf[..].as_ref(), true).unwrap();\n\n\n\n assert_eq!(g1_one, g1_one_recover);\n", "file_path": "src/test/serdes.rs", "rank": 21, "score": 76546.9676717261 }, { "content": "#[cfg(test)]\n\nfn foo_sk(sk: SecretKey) -> *const SecretKey {\n\n &sk as *const SecretKey\n\n}\n", "file_path": "src/test/pixel_zeroize.rs", "rank": 22, "score": 74328.94666702929 }, { "content": "#[test]\n\nfn test_sig_serialization_kat_invalid_const() {\n\n // mix-match the compressness\n\n for &csid in &VALID_CIPHERSUITE {\n\n let val1 = VALID_G1_ZERO_COM;\n\n let val2 = VALID_G2_ZERO_UNCOM;\n\n let time: [u8; 4] = [1, 2, 3, 4];\n\n let tmp = [[csid].as_ref(), time.as_ref(), val1, val2].concat();\n\n let res = Signature::deserialize(&mut Cursor::new(tmp), true);\n\n assert!(res.is_err(), \"expected Err, got Ok: {:?}\", res.ok());\n\n }\n\n for &csid in &VALID_CIPHERSUITE {\n\n let val1 = VALID_G1_ZERO_UNCOM;\n\n let val2 = VALID_G2_ZERO_COM;\n\n let time: [u8; 4] = [1, 2, 3, 4];\n\n let tmp = [[csid].as_ref(), time.as_ref(), val1, val2].concat();\n\n let res = Signature::deserialize(&mut Cursor::new(tmp), true);\n\n assert!(res.is_err(), \"expected Err, got Ok: {:?}\", res.ok());\n\n }\n\n\n\n // incorrect csid\n", "file_path": "src/test/serdes.rs", "rank": 23, "score": 72044.6368308524 }, { "content": "#[cfg(test)]\n\nfn foo_ssk(ssk: SubSecretKey) -> *const SubSecretKey {\n\n &ssk as *const SubSecretKey\n\n}\n\n\n", "file_path": "src/test/pixel_zeroize.rs", "rank": 24, "score": 71304.34885058185 }, { "content": "/// This function generate a proof of possesion of the master secret.\n\n/// This function is a subroutine of the key generation function, and\n\n/// should not be called anywhere else -- the master secret key is\n\n/// destroyed after key generation.\n\nfn proof_of_possession(msk: Fr, pk: PixelG2, ciphersuite: u8) -> Result<PixelG1, String> {\n\n // buf = DOM_SEP_POP | serial (PK)\n\n let mut buf = domain_sep::DOM_SEP_POP.as_bytes().to_vec();\n\n if pk.serialize(&mut buf, true).is_err() {\n\n return Err(ERR_SERIAL.to_owned());\n\n };\n\n // the pop is a signature on the buf\n\n let sig = BLSSigCore::core_sign(msk, buf, &[ciphersuite]);\n\n Ok(sig)\n\n}\n\n\n\n/// This function tests if a public key and a master secret key has a same exponent.\n\n/// This function is private, and test only, since by default no one shall have the master secret key.\n\n#[cfg(test)]\n\npub(crate) fn validate_master_key(pk: &PixelG2, sk: &PixelG1, pp: &PubParam) -> bool {\n\n use ff::Field;\n\n use pairing::{bls12_381::*, CurveAffine, Engine};\n\n let mut g2 = pp.g2();\n\n g2.negate();\n\n let h = pp.h();\n", "file_path": "src/key_pair.rs", "rank": 25, "score": 58977.83262636418 }, { "content": "#[test]\n\nfn test_delegate() {\n\n let pp = PubParam::init_without_seed();\n\n let depth = pp.depth();\n\n // a random field element\n\n let r = Fr::from_str(\n\n \"5902757315117623225217061455046442114914317855835382236847240262163311537283\",\n\n )\n\n .unwrap();\n\n\n\n // a random master secret key\n\n let mut alpha = pp.h();\n\n let msk = Fr::from_str(\n\n \"8010751325124863419913799848205334820481433752958938231164954555440305541353\",\n\n )\n\n .unwrap();\n\n alpha.mul_assign(msk);\n\n\n\n // a random public key\n\n let mut pke = pp.g2();\n\n pke.mul_assign(msk);\n", "file_path": "src/test/keys.rs", "rank": 26, "score": 51787.17739956912 }, { "content": "#[test]\n\nfn test_randomization() {\n\n use ff::PrimeField;\n\n let pp = PubParam::init_without_seed();\n\n // a random field element\n\n let r = Fr::from_str(\n\n \"5902757315117623225217061455046442114914317855835382236847240262163311537283\",\n\n )\n\n .unwrap();\n\n\n\n // a random master secret key\n\n let mut alpha = pp.h();\n\n let msk = Fr::from_str(\n\n \"8010751325124863419913799848205334820481433752958938231164954555440305541353\",\n\n )\n\n .unwrap();\n\n alpha.mul_assign(msk);\n\n\n\n // a random public key\n\n let mut pke = pp.g2();\n\n pke.mul_assign(msk);\n", "file_path": "src/test/keys.rs", "rank": 27, "score": 51787.17739956912 }, { "content": "#[test]\n\nfn test_prng() {\n\n // prng default\n\n assert_eq!(PRNG::default(), PRNG::new([0u8; 64]));\n\n\n\n // test sample then update function\n\n let mut prng = PRNG::init(\"seed\", \"salt\");\n\n println!(\"unit test: prng debug {:?}\", prng);\n\n assert_eq!(\n\n prng.seed().as_ref(),\n\n [\n\n 0xde, 0x8, 0xde, 0xe9, 0xf2, 0x71, 0xa6, 0xa0, 0x61, 0xf0, 0xc7, 0x6b, 0x10, 0xb0,\n\n 0xe4, 0xa7, 0x10, 0x7d, 0xa1, 0xeb, 0x84, 0x9f, 0x7e, 0x46, 0xd4, 0x80, 0xf3, 0xab,\n\n 0x93, 0x5b, 0xd5, 0x63, 0x29, 0x75, 0x16, 0x34, 0x8f, 0x3a, 0x4, 0x43, 0x7e, 0x99,\n\n 0x84, 0x80, 0x8a, 0xde, 0xab, 0xc5, 0x40, 0x8f, 0x78, 0xc0, 0x66, 0x7d, 0xd0, 0x15,\n\n 0x7c, 0x6e, 0xcb, 0xf7, 0xa7, 0x4b, 0x69, 0xb7,\n\n ]\n\n .as_ref()\n\n );\n\n\n\n let r = prng.sample_then_update(\"info\");\n", "file_path": "src/test/prng.rs", "rank": 28, "score": 51787.17739956912 }, { "content": "#[test]\n\nfn test_os2ip() {\n\n assert_eq!(Fr::from_str(\"0\").unwrap(), os2ip_mod_p(&[0u8, 0u8]));\n\n assert_eq!(Fr::from_str(\"1\").unwrap(), os2ip_mod_p(&[0u8, 1u8]));\n\n assert_eq!(Fr::from_str(\"255\").unwrap(), os2ip_mod_p(&[0u8, 0xffu8]));\n\n assert_eq!(Fr::from_str(\"256\").unwrap(), os2ip_mod_p(&[1u8, 0u8]));\n\n assert_eq!(\n\n Fr::from_str(\"65535\").unwrap(),\n\n os2ip_mod_p(&[0xffu8, 0xffu8])\n\n );\n\n // 2^128\n\n assert_eq!(\n\n Fr::from_str(\"340282366920938463463374607431768211456\").unwrap(),\n\n // 1 followed by 128/8 = 16 zeros\n\n os2ip_mod_p(&[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])\n\n );\n\n // 2^256 % p\n\n assert_eq!(\n\n Fr::from_str(\n\n \"10920338887063814464675503992315976177888879664585288394250266608035967270910\"\n\n )\n", "file_path": "src/test/prng.rs", "rank": 29, "score": 51787.17739956912 }, { "content": "#[test]\n\nfn test_pixel_api() {\n\n let res = Pixel::param_gen(\"this is a very very long seed for parameter testing\", 0);\n\n assert!(res.is_ok(), \"pixel param gen failed\");\n\n let pp = res.unwrap();\n\n\n\n let res = Pixel::key_gen(\"this is a very very long seed for key gen testing\", &pp);\n\n assert!(res.is_ok(), \"pixel key gen failed\");\n\n let (pk, mut sk, pop) = res.unwrap();\n\n assert!(Pixel::verify_pop(&pk, &pop), \"pop verification failed\");\n\n\n\n let sk2 = sk.clone();\n\n\n\n // testing basic signings\n\n let msg = \"message to sign\";\n\n let seed = \"\";\n\n let res = Pixel::sign(&mut sk, 1, &pp, msg, seed);\n\n assert!(res.is_ok(), \"error in signing algorithm\");\n\n let sig = res.unwrap();\n\n assert!(Pixel::verify(&pk, &pp, msg, &sig), \"verification failed\");\n\n // testing update-then-sign for present\n", "file_path": "src/test/api.rs", "rank": 30, "score": 50401.96978624024 }, { "content": "#[test]\n\nfn test_sig_getter() {\n\n let sig = Signature::new(0, 1, PixelG2::one(), PixelG1::one());\n\n assert_eq!(sig.ciphersuite(), 0);\n\n assert_eq!(sig.time(), 1);\n\n assert_eq!(sig.sigma1(), PixelG2::one());\n\n assert_eq!(sig.sigma2(), PixelG1::one());\n\n}\n\n\n", "file_path": "src/test/getter.rs", "rank": 31, "score": 50401.96978624024 }, { "content": "#[test]\n\nfn test_pk_getter() {\n\n let pk = PublicKey::new(1, PixelG2::one());\n\n assert_eq!(pk.size(), PK_LEN);\n\n assert_eq!(pk.ciphersuite(), 1);\n\n assert_eq!(pk.pk(), PixelG2::one());\n\n}\n\n\n", "file_path": "src/test/getter.rs", "rank": 32, "score": 50401.96978624024 }, { "content": "#[test]\n\nfn test_ssk_getter() {\n\n let pp = PubParam::default();\n\n let res = KeyPair::keygen(b\"this is a very very long seed for testing\", &pp);\n\n\n\n let (_pk, sk, _pop) = res.unwrap();\n\n let ssk = sk.first_ssk().unwrap();\n\n\n\n assert_eq!(ssk.time(), 1);\n\n ssk.g2r();\n\n ssk.hpoly();\n\n}\n\n\n", "file_path": "src/test/getter.rs", "rank": 33, "score": 50401.96978624024 }, { "content": "#[test]\n\nfn test_pop_getter() {\n\n let pop = ProofOfPossession::new(0, PixelG1::one());\n\n assert_eq!(pop.ciphersuite(), 0);\n\n assert_eq!(pop.pop(), PixelG1::one());\n\n}\n\n\n", "file_path": "src/test/getter.rs", "rank": 34, "score": 50401.96978624024 }, { "content": "#[test]\n\nfn negative_test_sk() {\n\n let pp = PubParam::default();\n\n let res = KeyPair::keygen(b\"this is a very very long seed for testing\", &pp);\n\n assert!(\n\n res.is_ok(),\n\n \"key gen failed\\n\\\n\n error message {:?}\",\n\n res.err()\n\n );\n\n let (_pk, sk, _pop) = res.unwrap();\n\n let pk = PublicKey::new(1, PixelG2::zero());\n\n assert!(!sk.validate(&pk, &pp));\n\n}\n\n\n", "file_path": "src/test/keys.rs", "rank": 35, "score": 50401.96978624024 }, { "content": "#[test]\n\nfn test_master_key() {\n\n let pp = PubParam::init_without_seed();\n\n let res = crate::key_pair::master_key_gen(b\"this is a very very long seed for testing\", &pp);\n\n assert!(res.is_ok(), \"master key gen failed\");\n\n let (pk, sk, _pop, _seed) = res.unwrap();\n\n assert!(\n\n crate::key_pair::validate_master_key(&pk, &sk, &pp),\n\n \"master key is invalid\"\n\n )\n\n}\n\n\n\n/// This test does the following:\n\n/// 1. generate a paring of public/secret keys, and the pop\n\n/// 2. verify the pop against the public key\n\n/// 3. for j in 2..16\n\n/// * update sk1 from time 1 to time j, and check the correctness\n\n/// * update from sk_{j-1} to sk_j, and check the correctness\n", "file_path": "src/test/keys.rs", "rank": 36, "score": 50401.96978624024 }, { "content": "#[test]\n\nfn test_time_getter() {\n\n //TBD\n\n}\n", "file_path": "src/test/getter.rs", "rank": 37, "score": 50401.96978624024 }, { "content": "#[test]\n\nfn test_key_gen() {\n\n // a random field element\n\n let r = Fr::from_str(\n\n \"5902757315117623225217061455046442114914317855835382236847240262163311537283\",\n\n )\n\n .unwrap();\n\n let pp = PubParam::init_without_seed();\n\n // a random master secret key\n\n let mut alpha = pp.h();\n\n let msk = Fr::from_str(\n\n \"8010751325124863419913799848205334820481433752958938231164954555440305541353\",\n\n )\n\n .unwrap();\n\n alpha.mul_assign(msk);\n\n\n\n let t = SubSecretKey::init(&pp, alpha, r);\n\n let res = SubSecretKey::init_from_randomization(&pp, alpha, r);\n\n assert!(\n\n res.is_ok(),\n\n \"ssk initiation from randomization failed\\n\\\n", "file_path": "src/test/keys.rs", "rank": 38, "score": 50401.96978624024 }, { "content": "#[test]\n\nfn test_zeroize() {\n\n let tmp_prng = PRNG::new([1; 64]);\n\n let tmp_ssk = SubSecretKey::new(1, PixelG2::one(), PixelG1::one(), vec![PixelG1::one(); 2]);\n\n let tmp_sk = SecretKey::new(0, 1, vec![tmp_ssk.clone()], tmp_prng.clone());\n\n\n\n let t = foo_prng(tmp_prng);\n\n unsafe {\n\n assert_eq!(*t, PRNG::default());\n\n }\n\n\n\n let t = foo_ssk(tmp_ssk);\n\n unsafe {\n\n assert_eq!(*t, SubSecretKey::default());\n\n }\n\n\n\n let t = foo_sk(tmp_sk);\n\n unsafe {\n\n assert_eq!(*t, SecretKey::default());\n\n }\n\n}\n\n\n", "file_path": "src/test/pixel_zeroize.rs", "rank": 39, "score": 50401.96978624024 }, { "content": "#[test]\n\nfn test_sk_validation() {\n\n let pp = PubParam::init_without_seed();\n\n let res = KeyPair::keygen(b\"this is a very very long seed for testing\", &pp);\n\n assert!(\n\n res.is_ok(),\n\n \"key gen failed\\n\\\n\n error message {:?}\",\n\n res.err()\n\n );\n\n let (pk, sk, pop) = res.unwrap();\n\n assert!(pk.validate(&pop));\n\n assert!(sk.validate(&pk, &pp), \"invalid sk\");\n\n\n\n let seed = \"\";\n\n for j in 2..16 {\n\n let mut sk2 = sk.clone();\n\n let res = sk2.update(&pp, j, seed.as_ref());\n\n assert!(\n\n res.is_ok(),\n\n \"update failed\\n\\\n\n error message {:?}\",\n\n res.err()\n\n );\n\n assert!(sk2.validate(&pk, &pp), \"invalid sk\");\n\n }\n\n}\n\n\n\n/// this test takes quite some time to finish\n\n/// enable this test with `cargo test -- --ignored`\n", "file_path": "src/test/keys.rs", "rank": 40, "score": 50401.96978624024 }, { "content": "#[test]\n\nfn test_sk_getter() {\n\n let pp = PubParam::default();\n\n let res = KeyPair::keygen(b\"this is a very very long seed for testing\", &pp);\n\n\n\n let (_pk, sk, _pop) = res.unwrap();\n\n assert_eq!(sk.ciphersuite(), 0);\n\n assert_eq!(sk.time(), 1);\n\n assert!(sk.digest().is_ok());\n\n}\n\n\n", "file_path": "src/test/getter.rs", "rank": 41, "score": 50401.96978624024 }, { "content": "#[test]\n\nfn negative_test_pop() {\n\n let pk = PublicKey::new(1, PixelG2::one());\n\n let pop = ProofOfPossession::new(0, PixelG1::one());\n\n assert!(!pk.validate(&pop));\n\n\n\n let pk = PublicKey::new(0, PixelG2::one());\n\n let pop = ProofOfPossession::new(1, PixelG1::one());\n\n assert!(!pk.validate(&pop));\n\n\n\n let pk = PublicKey::new(0, PixelG2::one());\n\n let pop = ProofOfPossession::new(0, PixelG1::one());\n\n assert!(!pk.validate(&pop));\n\n}\n\n\n", "file_path": "src/test/keys.rs", "rank": 42, "score": 50401.96978624024 }, { "content": "#[test]\n\nfn test_group_is_correct() {\n\n use pairing::CurveProjective;\n\n use PixelG1;\n\n let a = PixelG1::one();\n\n assert_eq!(a, pairing::bls12_381::G2::one());\n\n}\n", "file_path": "src/test/api.rs", "rank": 43, "score": 50401.96978624024 }, { "content": "#[test]\n\nfn test_prng_getter() {\n\n let prng = PRNG::default();\n\n assert_eq!(prng.seed().to_vec(), vec![0u8; 64]);\n\n}\n\n\n", "file_path": "src/test/getter.rs", "rank": 44, "score": 50401.96978624024 }, { "content": "#[test]\n\nfn test_quick_signature_tests() {\n\n let pp = PubParam::init_without_seed();\n\n let res = KeyPair::keygen(b\"this is a very very long seed for testing\", &pp);\n\n assert!(res.is_ok(), \"key gen failed\");\n\n let (pk, sk, pop) = res.unwrap();\n\n assert!(pk.validate(&pop));\n\n\n\n let msg = b\"message to sign\";\n\n let res = Signature::sign_bytes(&sk, 1, &pp, msg);\n\n assert!(res.is_ok(), \"signing failed\");\n\n let sig = res.unwrap();\n\n assert!(sig.verify_bytes(&pk, &pp, msg), \"verification failed\");\n\n println!(\"{:?}\", sig);\n\n let seed = \"\";\n\n\n\n for j in 2..16 {\n\n let mut sk2 = sk.clone();\n\n let res = sk2.update(&pp, j, seed.as_ref());\n\n assert!(res.is_ok(), \"updating failed\");\n\n let res = Signature::sign_bytes(&sk2, sk2.time(), &pp, msg);\n", "file_path": "src/test/sig.rs", "rank": 45, "score": 49142.166728362994 }, { "content": "#[ignore]\n\n#[test]\n\nfn test_long_key_update() {\n\n let pp = PubParam::init_without_seed();\n\n let res = KeyPair::keygen(b\"this is a very very long seed for testing\", &pp);\n\n assert!(\n\n res.is_ok(),\n\n \"key gen failed\\n\\\n\n error message {:?}\",\n\n res.err()\n\n );\n\n let (pk, sk, pop) = res.unwrap();\n\n assert!(pk.validate(&pop));\n\n\n\n let seed = \"\";\n\n\n\n // this double loop\n\n // 1. performs key updates with all possible `start_time` and `finish_time`\n\n // 2. for each updated key, checks the validity of its subkeys\n\n for j in 2..16 {\n\n println!(\"delegate to time {}\", j);\n\n let mut sk2 = sk.clone();\n", "file_path": "src/test/keys.rs", "rank": 46, "score": 49142.166728362994 }, { "content": "#[test]\n\nfn test_pk_serialization_rand() {\n\n use PK_LEN;\n\n\n\n let pp = PubParam::init_without_seed();\n\n let res = KeyPair::keygen(b\"this is a very very long seed for testing\", &pp);\n\n assert!(\n\n res.is_ok(),\n\n \"key gen failed\\n\\\n\n error message {:?}\",\n\n res.err()\n\n );\n\n let (pk, _sk, pop) = res.unwrap();\n\n assert!(pk.validate(&pop));\n\n\n\n // buffer space\n\n let mut buf: Vec<u8> = vec![];\n\n // serialize a ssk into buffer\n\n assert!(pk.serialize(&mut buf, true).is_ok());\n\n assert_eq!(buf.len(), PK_LEN, \"length of blob is incorrect\");\n\n\n\n // deserialize a buffer into ssk\n\n let pk_recover = PublicKey::deserialize(&mut buf[..].as_ref(), true).unwrap();\n\n\n\n // makes sure that the keys match\n\n assert_eq!(pk, pk_recover);\n\n}\n\n\n", "file_path": "src/test/serdes.rs", "rank": 47, "score": 49142.166728362994 }, { "content": "#[ignore]\n\n#[test]\n\nfn test_long_sk_validation() {\n\n let pp = PubParam::init_without_seed();\n\n let res = KeyPair::keygen(b\"this is a very very long seed for testing\", &pp);\n\n assert!(\n\n res.is_ok(),\n\n \"key gen failed\\n\\\n\n error message {:?}\",\n\n res.err()\n\n );\n\n let (pk, sk, pop) = res.unwrap();\n\n assert!(pk.validate(&pop));\n\n assert!(sk.validate(&pk, &pp), \"invalid sk\");\n\n\n\n let seed = \"\";\n\n // this double loop\n\n // 1. performs key updates with all possible `start_time` and `finish_time`\n\n // 2. for each updated key, checks the validity\n\n for j in 2..16 {\n\n println!(\"validate key for time {}\", j);\n\n let mut sk2 = sk.clone();\n", "file_path": "src/test/keys.rs", "rank": 48, "score": 49142.166728362994 }, { "content": "#[test]\n\nfn test_pixel_ffi_basic() {\n\n let seed = \"This is a very very long seed for testing\";\n\n let msg = \"message to sign\";\n\n\n\n let _t = c_estimate_sk_size(1, c_get_depth());\n\n unsafe {\n\n // generate key pair\n\n let kp = c_keygen(seed.as_ptr(), seed.len());\n\n\n\n // check pop\n\n assert!(c_verify_pop(kp.pk, kp.pop));\n\n\n\n // sign\n\n let sig = c_sign_present(kp.sk, msg.as_ptr(), msg.len(), 1);\n\n\n\n assert!(c_verify(kp.pk, msg.as_ptr(), msg.len(), sig));\n\n\n\n // testing update-then-sign for present\n\n for j in 2..16 {\n\n let sk_new = c_sk_update(kp.sk, seed.as_ptr(), seed.len(), j);\n\n let sig = c_sign_present(sk_new, msg.as_ptr(), msg.len(), j);\n\n assert!(c_verify(kp.pk, msg.as_ptr(), msg.len(), sig));\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/test/ffi.rs", "rank": 49, "score": 49142.166728362994 }, { "content": "#[test]\n\nfn test_ssk_serialization_rand() {\n\n // a random field element\n\n let r = Fr::from_str(\n\n \"5902757315117623225217061455046442114914317855835382236847240262163311537283\",\n\n )\n\n .unwrap();\n\n let pp = PubParam::init_without_seed();\n\n // a random master secret key\n\n let mut alpha = pp.h();\n\n let msk = Fr::from_str(\n\n \"8010751325124863419913799848205334820481433752958938231164954555440305541353\",\n\n )\n\n .unwrap();\n\n alpha.mul_assign(msk);\n\n\n\n // generate a sub secret key\n\n let t = SubSecretKey::init(&pp, alpha, r);\n\n let bufsize = t.size();\n\n\n\n // buffer space\n", "file_path": "src/test/serdes.rs", "rank": 50, "score": 49142.166728362994 }, { "content": "#[test]\n\nfn test_quick_key_update() {\n\n let pp = PubParam::init_without_seed();\n\n let res = KeyPair::keygen(b\"this is a very very long seed for testing\", &pp);\n\n assert!(\n\n res.is_ok(),\n\n \"key gen failed\\n\\\n\n error message {:?}\",\n\n res.err()\n\n );\n\n let (pk, mut sk, pop) = res.unwrap();\n\n assert!(pk.validate(&pop));\n\n\n\n let seed = \"\";\n\n // update from 1 to j\n\n for j in 2..16 {\n\n let mut sk2 = sk.clone();\n\n let res = sk2.update(&pp, j, seed.as_ref());\n\n assert!(\n\n res.is_ok(),\n\n \"update failed\\n\\\n", "file_path": "src/test/keys.rs", "rank": 51, "score": 49142.166728362994 }, { "content": "#[test]\n\nfn test_signature_serialization_rand() {\n\n use SIG_LEN;\n\n let pp = PubParam::init_without_seed();\n\n let res = KeyPair::keygen(b\"this is a very very long seed for testing\", &pp);\n\n assert!(res.is_ok(), \"key gen failed\");\n\n let (pk, sk, pop) = res.unwrap();\n\n assert!(pk.validate(&pop));\n\n\n\n let msg = b\"message to sign\";\n\n let res = Signature::sign_bytes(&sk, 1, &pp, msg);\n\n assert!(res.is_ok(), \"signing failed\");\n\n let sig = res.unwrap();\n\n assert!(sig.verify_bytes(&pk, &pp, msg), \"verification failed\");\n\n\n\n // buffer space\n\n let mut buf: Vec<u8> = vec![];\n\n // serialize a ssk into buffer\n\n assert!(sig.serialize(&mut buf, true).is_ok());\n\n assert_eq!(buf.len(), SIG_LEN, \"length of blob is incorrect\");\n\n // deserialize a buffer into ssk\n\n let sig_recover = Signature::deserialize(&mut buf[..].as_ref(), true).unwrap();\n\n\n\n // makes sure that the keys match\n\n assert_eq!(sig, sig_recover);\n\n}\n\n\n", "file_path": "src/test/serdes.rs", "rank": 52, "score": 49142.166728362994 }, { "content": "#[test]\n\nfn negative_test_key_gen() {\n\n let pp = PubParam::default();\n\n assert!(KeyPair::keygen(\"\".as_ref(), &pp).is_err());\n\n}\n\n\n", "file_path": "src/test/keys.rs", "rank": 53, "score": 49142.166728362994 }, { "content": "#[test]\n\nfn test_must_fails_seed() {\n\n let res = PubParam::init(b\"seed\", 0);\n\n res.expect_err(pixel_err::ERR_SEED_TOO_SHORT);\n\n\n\n let pp = PubParam::default();\n\n let res = KeyPair::keygen(b\"\", &pp);\n\n res.expect_err(pixel_err::ERR_SEED_TOO_SHORT);\n\n}\n\n\n", "file_path": "src/test/sig.rs", "rank": 54, "score": 49142.166728362994 }, { "content": "#[test]\n\nfn test_sk_serialization_rand() {\n\n let pp = PubParam::init_without_seed();\n\n let res = KeyPair::keygen(b\"this is a very very long seed for testing\", &pp);\n\n assert!(\n\n res.is_ok(),\n\n \"key gen failed\\n\\\n\n error message {:?}\",\n\n res.err()\n\n );\n\n let (pk, sk, pop) = res.unwrap();\n\n assert!(pk.validate(&pop));\n\n\n\n let bufsize = sk.size();\n\n let estsize = SecretKey::estimate_size(1, pp.depth());\n\n assert_eq!(\n\n bufsize,\n\n estsize.unwrap(),\n\n \"estimated size doesn't match the actual size\"\n\n );\n\n\n", "file_path": "src/test/serdes.rs", "rank": 55, "score": 49142.166728362994 }, { "content": "#[test]\n\nfn test_pixel_ffi_aggregation() {\n\n let seed1 = \"This is a very very long seed for testing\";\n\n let seed2 = \"This is another very very long seed for testing\";\n\n let msg = \"message to sign\";\n\n\n\n unsafe {\n\n // generate key pairs\n\n let kp1 = c_keygen(seed1.as_ptr(), seed1.len());\n\n let kp2 = c_keygen(seed2.as_ptr(), seed2.len());\n\n // generate signatures\n\n let sig1 = c_sign_present(kp1.sk, msg.as_ptr(), msg.len(), 1);\n\n let sig2 = c_sign_present(kp2.sk, msg.as_ptr(), msg.len(), 1);\n\n\n\n // aggregate then verify\n\n let agg_sig = c_aggregation([sig1, sig2].as_mut_ptr(), 2);\n\n assert!(c_verify_agg(\n\n [kp1.pk, kp2.pk].as_mut_ptr(),\n\n 2,\n\n msg.as_ptr(),\n\n msg.len(),\n\n agg_sig\n\n ));\n\n }\n\n}\n", "file_path": "src/test/ffi.rs", "rank": 56, "score": 49142.166728362994 }, { "content": "#[test]\n\nfn test_must_fails_mismatch() {\n\n let pp1 = PubParam::default();\n\n let pp2 = pp1.clone();\n\n let pp3 = PubParam::init(b\"this is a very very long seed for testing\", 0).unwrap();\n\n\n\n let msg1 = b\"message to sign\";\n\n let msg2 = b\"anther message to sign\";\n\n\n\n let (pk1, sk1, pop1) =\n\n KeyPair::keygen(b\"this is a very very long seed for testing\", &pp1).unwrap();\n\n\n\n let (pk2, sk2, pop2) =\n\n KeyPair::keygen(b\"this is another very very long seed for testing\", &pp2).unwrap();\n\n\n\n let (pk3, _sk3, pop3) =\n\n KeyPair::keygen(b\"this is a third very very long seed for testing\", &pp3).unwrap();\n\n\n\n // use popi to validate pkj with i != j\n\n assert!(pk1.validate(&pop1));\n\n assert!(!pk1.validate(&pop2));\n", "file_path": "src/test/sig.rs", "rank": 57, "score": 49142.166728362994 }, { "content": "#[test]\n\nfn test_g2_serialization_rand() {\n\n // PixelG2::zero, compressed\n\n let g2_zero = PixelG2::zero();\n\n let mut buf: Vec<u8> = vec![];\n\n // serialize a PixelG2 element into buffer\n\n assert!(g2_zero.serialize(&mut buf, true).is_ok());\n\n assert_eq!(buf.len(), 48, \"length of blob is incorrect\");\n\n let g2_zero_recover = PixelG2::deserialize(&mut buf[..].as_ref(), true).unwrap();\n\n assert_eq!(g2_zero, g2_zero_recover);\n\n\n\n // PixelG2::one, compressed\n\n let g2_one = PixelG2::one();\n\n let mut buf: Vec<u8> = vec![];\n\n // serialize a PixelG2 element into buffer\n\n assert!(g2_one.serialize(&mut buf, true).is_ok());\n\n assert_eq!(buf.len(), 48, \"length of blob is incorrect\");\n\n let g2_one_recover = PixelG2::deserialize(&mut buf[..].as_ref(), true).unwrap();\n\n assert_eq!(g2_one, g2_one_recover);\n\n\n\n // PixelG2::zero, uncompressed\n", "file_path": "src/test/serdes.rs", "rank": 58, "score": 49142.166728362994 }, { "content": "#[test]\n\nfn test_pop_serialization_kat() {\n\n // correct format of pops\n\n for &val in &VALID_G2_POINTS[0..2] {\n\n for &csid in &VALID_CIPHERSUITE {\n\n let tmp = [[csid].as_ref(), val].concat();\n\n let res = ProofOfPossession::deserialize(&mut Cursor::new(tmp), true);\n\n assert!(res.is_ok(), \"expected Ok, got Err: {:?}\", res.err());\n\n }\n\n }\n\n\n\n // incorrect compressness of pops\n\n for &csid in &VALID_CIPHERSUITE {\n\n let tmp = [[csid].as_ref(), &VALID_G2_POINTS[2]].concat();\n\n let res = ProofOfPossession::deserialize(&mut Cursor::new(tmp), true);\n\n assert!(res.is_err(), \"expected Err, got Ok: {:?}\", res.ok());\n\n }\n\n\n\n // incorrect format\n\n for &inval in &INVALID_G2_POINTS[..] {\n\n for &csid in &VALID_CIPHERSUITE {\n", "file_path": "src/test/serdes.rs", "rank": 59, "score": 49142.166728362994 }, { "content": "#[ignore]\n\n#[test]\n\nfn test_long_signature_tests() {\n\n let pp = PubParam::init_without_seed();\n\n let res = KeyPair::keygen(b\"this is a very very long seed for testing\", &pp);\n\n assert!(res.is_ok(), \"key gen failed\");\n\n let (pk, sk, pop) = res.unwrap();\n\n assert!(pk.validate(&pop));\n\n\n\n let msg = b\"message to sign\";\n\n let res = Signature::sign_bytes(&sk, 1, &pp, msg);\n\n assert!(res.is_ok(), \"signing failed\");\n\n let sig = res.unwrap();\n\n assert!(sig.verify_bytes(&pk, &pp, msg), \"verification failed\");\n\n\n\n let seed = \"\";\n\n // this double loop\n\n // 1. performs key updates with all possible `start_time` and `finish_time`\n\n // 2. for each updated key, check the validity of its subkeys (with --long_tests flag)\n\n // 3. check that the signature generated from dedicated keys can be verified\n\n for j in 2..16 {\n\n println!(\"delegate to time {}\", j);\n", "file_path": "src/test/sig.rs", "rank": 60, "score": 49142.166728362994 }, { "content": "#[test]\n\nfn test_pk_serialization_kat() {\n\n // correct format of pks\n\n for &val in &[VALID_G1_ZERO_COM, VALID_G1_COM] {\n\n for &csid in &VALID_CIPHERSUITE {\n\n let tmp = [[csid].as_ref(), val].concat();\n\n let res = PublicKey::deserialize(&mut Cursor::new(tmp), true);\n\n assert!(res.is_ok(), \"expected Ok, got Err: {:?}\", res.err());\n\n }\n\n }\n\n for &csid in &VALID_CIPHERSUITE {\n\n let tmp = [[csid].as_ref(), VALID_G1_ZERO_UNCOM].concat();\n\n let res = PublicKey::deserialize(&mut Cursor::new(tmp), false);\n\n assert!(res.is_ok(), \"expected Ok, got Err: {:?}\", res.err());\n\n }\n\n\n\n // incorrect compressness\n\n for &val in &[VALID_G1_ZERO_COM, VALID_G1_COM] {\n\n for &csid in &VALID_CIPHERSUITE {\n\n let tmp = [[csid].as_ref(), val].concat();\n\n let res = PublicKey::deserialize(&mut Cursor::new(tmp), false);\n", "file_path": "src/test/serdes.rs", "rank": 61, "score": 49142.166728362994 }, { "content": "#[test]\n\nfn test_quick_aggregated_signature_tests() {\n\n let pp = PubParam::init_without_seed();\n\n\n\n let mut sklist: Vec<SecretKey> = vec![];\n\n let mut pklist: Vec<PublicKey> = vec![];\n\n let mut siglist: Vec<Signature> = vec![];\n\n for i in 0..10 {\n\n let key_gen_seed = format!(\"this is a very very long seed for testing #{}\", i);\n\n\n\n let res = KeyPair::keygen(key_gen_seed.as_ref(), &pp);\n\n assert!(res.is_ok(), \"key gen failed\");\n\n let (pk, sk, pop) = res.unwrap();\n\n assert!(pk.validate(&pop));\n\n sklist.push(sk);\n\n pklist.push(pk);\n\n }\n\n\n\n let msg = b\"message to sign\";\n\n\n\n // generate 10 signatures on a same message\n", "file_path": "src/test/sig.rs", "rank": 62, "score": 47991.4761528341 }, { "content": "#[test]\n\n#[ignore]\n\nfn test_long_aggregated_signature_tests() {\n\n let pp = PubParam::init_without_seed();\n\n\n\n let mut sklist: Vec<SecretKey> = vec![];\n\n let mut pklist: Vec<PublicKey> = vec![];\n\n let mut siglist: Vec<Signature> = vec![];\n\n for i in 0..10 {\n\n let key_gen_seed = format!(\"this is a very very long seed for testing #{}\", i);\n\n\n\n let res = KeyPair::keygen(key_gen_seed.as_ref(), &pp);\n\n assert!(res.is_ok(), \"key gen failed\");\n\n let (pk, sk, pop) = res.unwrap();\n\n assert!(pk.validate(&pop));\n\n sklist.push(sk);\n\n pklist.push(pk);\n\n }\n\n\n\n let msg = b\"message to sign\";\n\n\n\n // generate 10 signatures on a same message\n", "file_path": "src/test/sig.rs", "rank": 63, "score": 47991.4761528341 }, { "content": "#[test]\n\nfn test_must_fails_time_stamp() {\n\n let pp = PubParam::default();\n\n\n\n let msg = b\"message to sign\";\n\n\n\n let (_pk, mut sk, _pop) =\n\n KeyPair::keygen(b\"this is a very very long seed for testing\", &pp).unwrap();\n\n let mut sk2 = sk.clone();\n\n\n\n // sign for an invalid time stamp\n\n let res = Signature::sign_present(&mut sk, 2, &pp, msg);\n\n res.expect_err(pixel_err::ERR_TIME_STAMP);\n\n\n\n assert!(sk2.update(&pp, 10, b\"\").is_ok());\n\n\n\n // sign for present\n\n let res = Signature::sign_present(&mut sk2, 0, &pp, msg);\n\n res.expect_err(pixel_err::ERR_TIME_STAMP);\n\n\n\n let res = Signature::sign_present(&mut sk2, 9, &pp, msg);\n", "file_path": "src/test/sig.rs", "rank": 64, "score": 47991.4761528341 }, { "content": "#[test]\n\nfn test_sig_serialization_kat_valid() {\n\n // correct format of signatures\n\n for &val1 in &VALID_G1_POINTS[0..2] {\n\n for &val2 in &VALID_G2_POINTS[0..2] {\n\n for &csid in &VALID_CIPHERSUITE {\n\n let time: [u8; 4] = [1, 2, 3, 4];\n\n let tmp = [[csid].as_ref(), time.as_ref(), val1, val2].concat();\n\n let res = Signature::deserialize(&mut Cursor::new(tmp), true);\n\n assert!(res.is_ok(), \"expected Ok, got Err: {:?}\", res.err());\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/test/serdes.rs", "rank": 65, "score": 47991.4761528341 }, { "content": "#[test]\n\nfn test_sig_serialization_kat_invalid_points() {\n\n // incorrect G1 points\n\n for &inval1 in &INVALID_G1_POINTS[..] {\n\n for &val2 in &VALID_G2_POINTS[..] {\n\n for &csid in &VALID_CIPHERSUITE {\n\n let time: [u8; 4] = [1, 2, 3, 4];\n\n let tmp = [[csid].as_ref(), time.as_ref(), inval1, val2].concat();\n\n let res = Signature::deserialize(&mut Cursor::new(tmp), true);\n\n assert!(res.is_err(), \"expected Err, got Ok: {:?}\", res.ok())\n\n }\n\n }\n\n }\n\n\n\n // incorrect G2 points\n\n for &val1 in &VALID_G1_POINTS[..] {\n\n for &inval2 in &INVALID_G2_POINTS[..] {\n\n for &csid in &VALID_CIPHERSUITE {\n\n let time: [u8; 4] = [1, 2, 3, 4];\n\n let tmp = [[csid].as_ref(), time.as_ref(), val1, inval2].concat();\n\n let res = Signature::deserialize(&mut Cursor::new(tmp), true);\n", "file_path": "src/test/serdes.rs", "rank": 66, "score": 46936.31098287842 }, { "content": "// This function generates test vectors for pixel signature scheme.\n\n// * data in _plain.txt files are stored in plain mode\n\n// * data in _bin.txt files are stored in serialized mode\n\nfn main() -> std::io::Result<()> {\n\n // default parameter\n\n let pp = Pixel::param_default();\n\n let mut file = File::create(\"test_vector/param_plain.txt\")?;\n\n file.write_all(format!(\"{:?}\", pp).as_ref())?;\n\n let mut file = File::create(\"test_vector/param_bin.txt\")?;\n\n pp.serialize(&mut file, false)?;\n\n\n\n // the default seed to generate the keys is\n\n // \"this is a very long seed for pixel tests\"\n\n let seed = \"this is a very long seed for pixel tests\";\n\n let rngseed = \"\";\n\n let timestamp = 1;\n\n let (pk, mut sk, pop) = Pixel::key_gen(seed, &pp).unwrap();\n\n let sk2 = sk.clone();\n\n\n\n let mut file = File::create(\"test_vector/pk_plain.txt\")?;\n\n file.write_all(format!(\"{:?}\", pk).as_ref())?;\n\n let mut file = File::create(\"test_vector/pk_bin.txt\")?;\n\n pk.serialize(&mut file, true)?;\n", "file_path": "test_vector/src/main.rs", "rank": 67, "score": 40912.55997982012 }, { "content": "/// this is pixel's Octect String to Integer Primitive (os2ip) function\n\n/// https://tools.ietf.org/html/rfc8017#section-4\n\n/// the input is a 64 bytes array, and the output is between 0 and p-1\n\n/// i.e., it performs mod operation by default.\n\npub fn os2ip_mod_p(oct_str: &[u8]) -> Fr {\n\n // \"For the purposes of this document, and consistent with ASN.1 syntax,\n\n // an octet string is an ordered sequence of octets (eight-bit bytes).\n\n // The sequence is indexed from first (conventionally, leftmost) to last\n\n // (rightmost). For purposes of conversion to and from integers, the\n\n // first octet is considered the most significant in the following\n\n // conversion primitives.\n\n //\n\n // OS2IP converts an octet string to a nonnegative integer.\n\n // OS2IP (X)\n\n // Input: X octet string to be converted\n\n // Output: x corresponding nonnegative integer\n\n // Steps:\n\n // 1. Let X_1 X_2 ... X_xLen be the octets of X from first to last,\n\n // and let x_(xLen-i) be the integer value of the octet X_i for 1\n\n // <= i <= xLen.\n\n // 2. Let x = x_(xLen-1) 256^(xLen-1) + x_(xLen-2) 256^(xLen-2) +\n\n // ... + x_1 256 + x_0.\n\n // 3. Output x. \"\n\n\n", "file_path": "src/prng.rs", "rank": 68, "score": 38391.11128537562 }, { "content": "/// This function hashes a message into a field element\n\n/// by returning sha512(DOM_SEP_HASH_TO_MSG | ciphersuite | msg) % p\n\nfn hash_msg_into_fr(msg: &[u8], ciphersuite: u8) -> Fr {\n\n use domain_sep::DOM_SEP_HASH_TO_MSG;\n\n // output sha512(DOM_SEP_HASH_TO_MSG | ciphersuite | msg) % p\n\n // DOM_SEP_HASH_TO_MSG: domain seperator\n\n // msg: input message\n\n let m = [DOM_SEP_HASH_TO_MSG.as_bytes(), [ciphersuite].as_ref(), msg].concat();\n\n let mut hasher = sha2::Sha512::new();\n\n hasher.input(m);\n\n // obtain the output\n\n let hashresult = hasher.result();\n\n prng::os2ip_mod_p(&hashresult)\n\n}\n\n\n\nimpl fmt::Debug for Signature {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(\n\n f,\n\n \"================================\\n\\\n\n ==========Signature======\\n\\\n\n sigma1 : {:#?}\\n\\\n", "file_path": "src/sig.rs", "rank": 69, "score": 36207.33317786807 }, { "content": "def prng_sample(prng_seed, info):\n\n hashinput = hkdf.hkdf_expand(prng_seed, info, 64)\n\n\n\n # Riad:\n\n # \"The issue is that the Python interface is slightly different than the\n\n # Rust one. In particular, the Python hash_to_field function does not\n\n # automatically inject a ciphersuite string, whereas the Rust interface\n\n # you're using does.\"\n\n\n\n # Inject \\0 for ciphersuite so that the Hr function matches rust's\n\n # hash_to_field\n\n r = OS2IP(hashinput) % q\n", "file_path": "pixel-python/prng.py", "rank": 70, "score": 26218.025819864335 }, { "content": "def time_to_vec(time, depth):\n\n if time == 1:\n\n return []\n\n if depth > 0 and time > pow(2, depth - 1):\n\n return [2] + time_to_vec(time - pow(2,depth - 1), depth - 1)\n\n else:\n", "file_path": "pixel-python/keyupdate.py", "rank": 71, "score": 26195.651261161955 }, { "content": "def vec_to_time(tvec, depth):\n\n if tvec == []:\n\n return 1\n\n else:\n\n ti = tvec.pop(0)\n", "file_path": "pixel-python/keyupdate.py", "rank": 72, "score": 26195.651261161955 }, { "content": "def prng_sample_then_update(prng_seed, info):\n\n key = hkdf.hkdf_expand(prng_seed, info, 128)\n\n hashinput = key[:64]\n\n new_prng_seed = key[64:]\n\n\n\n # Riad:\n\n # \"The issue is that the Python interface is slightly different than the\n\n # Rust one. In particular, the Python hash_to_field function does not\n\n # automatically inject a ciphersuite string, whereas the Rust interface\n\n # you're using does.\"\n\n\n\n # Inject \\0 for ciphersuite so that the Hr function matches rust's\n\n # hash_to_field\n\n\n\n\n\n r = OS2IP(hashinput) % q\n", "file_path": "pixel-python/prng.py", "rank": 73, "score": 25129.771686636395 }, { "content": "\n\n // benchmark sk update to next time slot\n\n let message = format!(\n\n \"sk update from {} to {}\",\n\n sklist[0].time(),\n\n sklist[0].time() + 1\n\n );\n\n let rngseed = \"\";\n\n c.bench_function(&message, move |b| {\n\n let mut counter = 0;\n\n b.iter(|| {\n\n let mut sknew = sklist[counter].clone();\n\n let tar_time = sknew.time() + 1;\n\n let res = Pixel::sk_update(&mut sknew, tar_time, &param, rngseed);\n\n assert!(res.is_ok(), res.err());\n\n counter = (counter + 1) % SAMPLES;\n\n })\n\n });\n\n}\n\n\n\ncriterion_group!(sk_ops, bench_sk_update_leaf);\n\ncriterion_group!(sk_ops_slow, bench_sk_update_seq);\n", "file_path": "benches/benchmarks/bench_sk_ops.rs", "rank": 79, "score": 14.719346543420457 }, { "content": " let time = rand::thread_rng().gen_range(0u64, max_time - 2);\n\n assert!(Pixel::sk_update(&mut sk, time, &param, rngseed).is_ok());\n\n sklist.push(sk);\n\n }\n\n\n\n // benchmarking\n\n let mut counter = 0;\n\n c.bench_function(\"sk at random time, sign a random future\", move |b| {\n\n b.iter(|| {\n\n let mut sknew = sklist[counter].clone();\n\n // the target time will be random between current time + 1 and max time\n\n let tar_time = rand::thread_rng().gen_range(sknew.time() + 1, max_time - 1);\n\n assert!(Pixel::sign(&mut sknew, tar_time, &param, msg, rngseed).is_ok());\n\n counter = (counter + 1) % SAMPLES;\n\n })\n\n });\n\n}\n\n\n\n/// benchmark sign at a random present time\n", "file_path": "benches/benchmarks/bench_api.rs", "rank": 80, "score": 14.144828251703553 }, { "content": "## SubSecretKey\n\n* Structure\n\n ``` rust\n\n struct SubSecretKey {\n\n\n\n time: TimeStamp, // timestamp for the current subkey\n\n g2r: PixelG2, // randomization on g2: g2^r\n\n hpoly: PixelG1, // h^{alpha + f(x) r}\n\n hvector: Vec<PixelG1>, // the randomization of the public parameter hlist\n\n // excluding 0 elements\n\n }\n\n ```\n\n* Construct a sub secret key object from some input:\n\n ``` rust\n\n fn new(time: TimeStamp, g2r: PixelG2, hpoly: PixelG1, hvector: Vec<PixelG1>) -> SubSecretKey;\n\n ```\n\n* Get various elements from the sub secret key:\n\n ``` rust\n\n fn time(&self) -> TimeStamp;\n\n // Returns the time vector associated with the time stamp.\n\n fn time_vec(&self, depth: usize) -> Result<TimeVec, String>; \n\n fn g2r(&self) -> PixelG2;\n\n fn hpoly(&self) -> PixelG1;\n\n fn hvector(&self) -> Vec<PixelG1>;\n\n // Returns the last coefficient of the h_vector.\n\n fn last_hvector_coeff(&self) -> Result<PixelG1, String>;\n\n ```\n\n\n\n* Serialization:\n\n * Each ssk is a blob:\n\n`| time stamp | hv_length | serial(g2r) | serial(hpoly) | serial(h0) ... | serial(ht) |`\n\n ``` rust\n\n fn size(&self) -> usize; // get the storage requirement\n\n fn serialize<W: Write>(&self, writer: &mut W, compressed: bool) -> Result<()>;\n\n fn deserialize<R: Read>(reader: &mut R) -> Result<(SubSecretKey, bool)>;\n\n ```\n\n * The `reader` and `writer` is assumed\n\n to have allocated sufficient memory, or an error will be returned.\n\n * `Deserialize` function will also return a flag that wether the `reader` is\n\n in compressed format or not.\n\n The compressed flag can be `true` or `false`. However, the flag needs to\n", "file_path": "spec.md", "rank": 81, "score": 13.839990972356038 }, { "content": " // let mut counter = 0;\n\n // b.iter(|| {\n\n // let sknew = sklist_clone[counter].clone();\n\n // let _res = sknew.to_bytes();\n\n // counter = (counter + 1) % SAMPLES;\n\n // })\n\n // });\n\n\n\n // benchmark time to generate a digest\n\n let message = format!(\"sk digest for sk at time {}\", sklist[0].time(),);\n\n let sklist_clone = sklist.clone();\n\n c.bench_function(&message, move |b| {\n\n let mut counter = 0;\n\n b.iter(|| {\n\n let sknew = sklist_clone[counter].clone();\n\n let res = sknew.digest();\n\n assert!(res.is_ok(), res.err());\n\n counter = (counter + 1) % SAMPLES;\n\n })\n\n });\n", "file_path": "benches/benchmarks/bench_sk_ops.rs", "rank": 82, "score": 13.673704198427279 }, { "content": "\n\n // benchmarking\n\n let mut counter = 0;\n\n c.bench_function(\"verifying signature\", move |b| {\n\n b.iter(|| {\n\n let res = Pixel::verify(&pklist[counter], &param, msg, &siglist[counter]);\n\n assert!(res, \"verification failed\");\n\n counter = (counter + 1) % SAMPLES;\n\n })\n\n });\n\n}\n\n\n\n/// benchmark aggregation and batch verification\n", "file_path": "benches/benchmarks/bench_api.rs", "rank": 83, "score": 13.62208724039076 }, { "content": " assert!(Pixel::sk_update(&mut sk, time, &param, rngseed).is_ok());\n\n sklist.push(sk);\n\n }\n\n\n\n // benchmarking\n\n let mut counter = 0;\n\n c.bench_function(\"sk update to random future\", move |b| {\n\n b.iter(|| {\n\n let mut sknew = sklist[counter].clone();\n\n // the target time will be random between current time + 1 and max time\n\n let tar_time = rand::thread_rng().gen_range(sknew.time() + 1, max_time - 1);\n\n let res = Pixel::sk_update(&mut sknew, tar_time, &param, rngseed);\n\n assert!(res.is_ok(), res.err());\n\n counter = (counter + 1) % SAMPLES;\n\n })\n\n });\n\n}\n\n\n\n/// benchmark sign at a random present/future time\n", "file_path": "benches/benchmarks/bench_api.rs", "rank": 84, "score": 13.605153678904895 }, { "content": " // sign at a random time\n\n let time = rand::thread_rng().gen_range(0u64, max_time - 2);\n\n let res = Pixel::sign(&mut sk, time, &param, msg, rngseed);\n\n assert!(res.is_ok(), res.err());\n\n // pack the signature, time, and public key\n\n pklist.push(pk);\n\n siglist.push(res.unwrap());\n\n poplist.push(pop);\n\n }\n\n\n\n // benchmarking\n\n let pklist_clone = pklist.clone();\n\n let mut counter = 0;\n\n c.bench_function(\"verifying POP\", move |b| {\n\n b.iter(|| {\n\n let res = pklist_clone[counter].validate(&poplist[counter]);\n\n assert!(res, \"verification failed\");\n\n counter = (counter + 1) % SAMPLES;\n\n })\n\n });\n", "file_path": "benches/benchmarks/bench_api.rs", "rank": 85, "score": 13.379774704429927 }, { "content": "\n\n/// This module defines interfaces to C.\n\npub(crate) mod ffi;\n\n\n\n/// The size of pk is 49 when PK is in G1. 1 byte for ciphersuite ID\n\n/// and 48 byte for group element.\n\npub(crate) const PK_LEN: usize = 49;\n\n\n\n/// The Signature size is 149.\n\n/// 1 byte for ciphersuite ID, 4 bytes for time stamp,\n\n/// 48+96 bytes for two group elements.\n\npub(crate) const SIG_LEN: usize = 149;\n\n\n\n/// The size of pop is 97: 1 byte for ciphersuite ID\n\n/// and 96 byte for bls signature in G2.\n\npub(crate) const POP_LEN: usize = 97;\n\n\n\n// Expose this constant.\n\npub use param::{PixelG1, PixelG2, PubParam, SerDes, CONST_D};\n\npub use pop::ProofOfPossession;\n\npub use public_key::PublicKey;\n\npub use secret_key::SecretKey;\n\npub use sig::Signature;\n\npub use subkeys::SubSecretKey;\n\npub use time::{TimeStamp, TimeVec};\n\n\n\n/// Pixel is a trait that implements the algorithms within the pixel signature scheme.\n", "file_path": "src/lib.rs", "rank": 86, "score": 12.700543796705123 }, { "content": " assert!(Pixel::sk_update(&mut sk, time, &param, rngseed).is_ok());\n\n sklist.push(sk);\n\n }\n\n\n\n // benchmarking\n\n let mut counter = 0;\n\n c.bench_function(\"sk update to next time stamp\", move |b| {\n\n b.iter(|| {\n\n let mut sknew = sklist[counter].clone();\n\n let tar_time = sknew.time() + 1;\n\n let res = Pixel::sk_update(&mut sknew, tar_time, &param, rngseed);\n\n assert!(res.is_ok(), res.err());\n\n counter = (counter + 1) % SAMPLES;\n\n })\n\n });\n\n}\n\n\n\n/// benchmark key update: update to the a random time stamp\n", "file_path": "benches/benchmarks/bench_api.rs", "rank": 87, "score": 12.420409156746468 }, { "content": " assert!(Pixel::sk_update(&mut sk, time, &param, rngseed).is_ok());\n\n sklist.push(sk);\n\n }\n\n\n\n // benchmarking\n\n let mut counter = 0;\n\n c.bench_function(\"sk at random time, sign for present\", move |b| {\n\n b.iter(|| {\n\n let mut sknew = sklist[counter].clone();\n\n let tar_time = sknew.time();\n\n let res = Pixel::sign_present(&mut sknew, tar_time, &param, msg);\n\n assert!(res.is_ok(), res.err());\n\n counter = (counter + 1) % SAMPLES;\n\n })\n\n });\n\n}\n\n\n\n/// benchmark sign at a random present time then update to next time stamp\n", "file_path": "benches/benchmarks/bench_api.rs", "rank": 88, "score": 12.283500080199918 }, { "content": "\n\n // PixelG1::zero, uncompressed\n\n let mut buf: Vec<u8> = vec![];\n\n // serialize a PixelG1 element into buffer\n\n assert!(g1_zero.serialize(&mut buf, false).is_ok());\n\n assert_eq!(buf.len(), 192, \"length of blob is incorrect\");\n\n let g1_zero_recover = PixelG1::deserialize(&mut buf[..].as_ref(), false).unwrap();\n\n\n\n assert_eq!(g1_zero, g1_zero_recover);\n\n\n\n // PixelG1::one, uncompressed\n\n let mut buf: Vec<u8> = vec![];\n\n // serialize a PixelG1 element into buffer\n\n assert!(g1_one.serialize(&mut buf, false).is_ok());\n\n assert_eq!(buf.len(), 192, \"length of blob is incorrect\");\n\n let g1_one_recover = PixelG1::deserialize(&mut buf[..].as_ref(), false).unwrap();\n\n\n\n assert_eq!(g1_one, g1_one_recover);\n\n}\n\n\n", "file_path": "src/test/serdes.rs", "rank": 89, "score": 12.271346137066029 }, { "content": "use super::pixel::TimeVec;\n\nuse criterion::Criterion;\n\n\n\n/// benchmark gamma list generation\n\n#[allow(dead_code)]\n", "file_path": "benches/benchmarks/bench_time.rs", "rank": 90, "score": 12.196902028004189 }, { "content": " assert!(Pixel::sk_update(&mut sk, time, &param, rngseed).is_ok());\n\n sklist.push(sk);\n\n }\n\n\n\n // benchmarking\n\n let mut counter = 0;\n\n c.bench_function(\"sk at random time, sign then update\", move |b| {\n\n b.iter(|| {\n\n let mut sknew = sklist[counter].clone();\n\n let tar_time = sknew.time();\n\n let res = Pixel::sign_then_update(&mut sknew, tar_time, &param, msg, rngseed);\n\n assert!(res.is_ok(), res.err());\n\n counter = (counter + 1) % SAMPLES;\n\n // check that the time stamp has advanced by 1\n\n assert_eq!(sknew.time(), tar_time + 1);\n\n })\n\n });\n\n}\n\n\n\n/// benchmark verification at a random time\n", "file_path": "benches/benchmarks/bench_api.rs", "rank": 92, "score": 11.892731518314442 }, { "content": " }\n\n let rngseed = \"\";\n\n for i in 0..64 {\n\n // clone sk and param for benchmarking\n\n let sklist_clone = sklist.clone();\n\n let param_clone = param.clone();\n\n let message = format!(\n\n \"sk update from {} to {}\",\n\n sklist_clone[i].time(),\n\n sklist_clone[i].time() + 1\n\n );\n\n // benchmark sk update\n\n c.bench_function(&message, move |b| {\n\n let mut counter = 0;\n\n b.iter(|| {\n\n let mut sknew = sklist_clone[counter].clone();\n\n let tar_time = sknew.time() + 1;\n\n let res = Pixel::sk_update(&mut sknew, tar_time, &param_clone, rngseed);\n\n assert!(res.is_ok(), res.err());\n\n counter = (counter + 1) % SAMPLES;\n", "file_path": "benches/benchmarks/bench_sk_ops.rs", "rank": 93, "score": 11.877542896258888 }, { "content": "use super::pixel::Pixel;\n\nuse super::pixel::PixelSignature;\n\nuse super::pixel::SecretKey;\n\nuse super::rand::Rng;\n\nuse criterion::Criterion;\n\n\n\n/// benchmark secret key update sequentially\n\n#[allow(dead_code)]\n", "file_path": "benches/benchmarks/bench_sk_ops.rs", "rank": 94, "score": 11.746243747944291 }, { "content": "// a module for sub secret keys and related functions\n\n\n\nuse ff::Field;\n\nuse pairing::{bls12_381::*, CurveAffine, CurveProjective, Engine};\n\nuse param::PubParam;\n\nuse pixel_err::*;\n\nuse public_key::PublicKey;\n\nuse std::fmt;\n\nuse time::{TimeStamp, TimeVec};\n\nuse zeroize::*;\n\nuse PixelG1;\n\nuse PixelG2;\n\n\n\n/// Each SubSecretKey consists of ...\n\n/// * time: the time stamp for the current key\n\n/// * g1r: the randomization on G1\n\n/// * h0poly: h0^{alpha + f(x) r}\n\n/// * hlist: the randomization of the public parameter hlist\n\n#[derive(Clone, PartialEq, Default, Zeroize)]\n\n#[zeroize(drop)]\n", "file_path": "src/subkeys.rs", "rank": 95, "score": 11.731666868763021 }, { "content": "## Time\n\n* TimeStamp is a wrapper of `u64`.\n\n ``` Rust\n\n type TimeStamp = u64;\n\n ```\n\n* Structure \n\n ``` rust\n\n struct TimeVec {\n\n time: TimeStamp, // the actual time stamp, for example 2\n\n vec: Vec<u64>, // the path to this time stamp, for example [1, 1]\n\n }\n\n ```\n\n* Get various elements from the TimeVec\n\n ``` rust\n\n fn time(&self) -> TimeStamp ;\n\n fn vector(&self) -> Vec<u64> ;\n\n fn vector_len(&self) -> usize ;\n\n ```\n\n* Additional functionalities\n\n ``` Rust\n\n fn gamma_list(&self, depth: usize) -> Result<Vec<TimeVec>, String> ;\n\n ```\n\n It converts a time vector to a list of time vectors,\n\n so that any future time stamp greater than self is either with in the gamma list,\n\n or is a posterity of the elements in the list.\n\n And propagates error messages if the conversion fails.\n\n\n\n Example: for time vector `[1, 1, 1]` and `depth = 4`, the list consists\n\n `[1,1,1], [1,1,2], [1,2], [2]`.\n\n\n\n\n\n## Pseudo random generators\n\nNOTE: this part of the spec is NOT documented in the paper.\n\nIt is the result of a serial of internal discussion.\n\n\n\n* Structure\n\n ``` Rust\n\n struct PRNG([u8; 64]); // PRNG is a wrapper of a 64 bytes array\n\n ```\n\n\n\n* Initialization\n\n ``` Rust\n\n // initialize the prng with a seed and a salt\n\n fn init<Blob: AsRef<[u8]>>(seed: Blob, salt: Blob) -> PRNG;\n\n\n\n ```\n\n * Input: a seed of adequate length\n\n * Input: a (public) salt\n\n * Output: a PRNG\n\n * Steps:\n\n 1. `m = HKDF-SHA512-extract(seed, salt)`\n\n 2. `return PRNG(m)`\n\n\n\n* Sample and update\n\n ``` rust\n\n // sample a field element from PRNG, and update the internal state\n\n fn sample_then_update<Blob: AsRef<[u8]>>(&mut self, info: Blob) -> Fr;\n\n ```\n\n * Input: the prng\n\n * Input: public info\n\n * Output: a field element\n\n * Output: update self's state\n\n * Steps:\n\n 1. `tmp = HKDF-SHA512-expand(prng, info, 128)`\n\n 2. update self: `prng = PRNG(tmp[64:128])`\n", "file_path": "spec.md", "rank": 96, "score": 11.670657087408994 }, { "content": "use super::ff::Field;\n\nuse super::pairing::bls12_381::Fr;\n\nuse super::pixel::Pixel;\n\nuse super::pixel::PixelSignature;\n\nuse super::pixel::SubSecretKey;\n\nuse super::rand::Rng;\n\nuse super::rand_core::*;\n\nuse super::rand_xorshift::XorShiftRng;\n\nuse criterion::Criterion;\n\n\n\n/// benchmark sub secret key delegation - without randomization\n\n#[allow(dead_code)]\n", "file_path": "benches/benchmarks/bench_ssk_ops.rs", "rank": 97, "score": 11.647584466882872 }, { "content": "use super::pixel::Pixel;\n\nuse super::pixel::PixelSignature;\n\nuse super::pixel::{ProofOfPossession, PublicKey, SecretKey, Signature};\n\nuse super::rand::Rng;\n\nuse criterion::Criterion;\n\n\n\n/// benchmark parameter generation\n\n#[allow(dead_code)]\n", "file_path": "benches/benchmarks/bench_api.rs", "rank": 98, "score": 11.506049809245463 }, { "content": "## Secret Key\n\n\n\n* Structure\n\n ``` rust\n\n struct SecretKey {\n\n ciphersuite: u8, // ciphersuite id\n\n time: TimeStamp, // smallest timestamp for all subkeys\n\n ssk: Vec<SubSecretKey>, // the list of the subsecretkeys that are\n\n // stored chronologically based on time stamp\n\n rngseed: [u8; 64], // a seed that is used to generate the randomness\n\n // during key updating\n\n }\n\n ```\n\n* Construct a secret key object from some input:\n\n ``` rust\n\n fn new(ciphersuite: u8, time: TimeStamp, ssk: Vec<SubSecretKey>, prng: PRNG) -> SecretKey\n\n ```\n\n* Get various elements from the secret key:\n\n ``` rust\n\n fn ciphersuite(&self) -> u8;\n\n fn time(&self) -> TimeStamp;\n\n fn ssk_number(&self) -> usize; // the number of subsecretkeys\n\n fn first_ssk(&self) -> Result<SubSecretKey, String>; // the first ssk\n\n fn ssk_vec(&self) -> Vec<SubSecretKey>; // the whole ssk vector\n\n fn prng(&self) -> PRNG; // the seed\n\n ```\n\n* Serialization: \n\n * Each SecretKey is a blob of `|ciphersuite id| number_of_ssk-s | prng | serial(first ssk) | serial(second ssk)| ...`\n\n\n\n ``` rust\n\n fn size(&self) -> usize; // get the storage requirement\n\n fn serialize<W: Write>(&self, writer: &mut W, compressed: bool) -> Result<()>;\n\n fn deserialize<R: Read>(reader: &mut R) -> Result<(SecretKey, bool)>;\n\n ```\n\n * The `reader` and `writer` is assumed\n\n to have allocated sufficient memory, or an error will be returned.\n\n * `Deserialize` function will also return a flag that wether the `reader` is\n\n in compressed format or not.\n\n The compressed flag can be either `true` or `false`. However, the flag needs to\n\n be consistent for all its components.\n\n\n", "file_path": "spec.md", "rank": 99, "score": 11.304224622130935 } ]
Rust
src/signal.rs
Emilgardis/fundsp
213f5ae89ac454c791c731075c323b939dcc476c
use super::math::*; use num_complex::Complex64; use tinyvec::TinyVec; #[derive(Clone, Copy)] pub enum Signal { Unknown, Value(f64), Latency(f64), Response(Complex64, f64), } impl Default for Signal { fn default() -> Signal { Signal::Unknown } } impl Signal { pub fn filter(&self, latency: f64, filter: impl Fn(Complex64) -> Complex64) -> Signal { match self { Signal::Latency(l) => Signal::Latency(l + latency), Signal::Response(response, l) => Signal::Response(filter(*response), l + latency), _ => Signal::Unknown, } } pub fn distort(&self, latency: f64) -> Signal { match self { Signal::Latency(l) => Signal::Latency(l + latency), Signal::Response(_, l) => Signal::Latency(l + latency), _ => Signal::Unknown, } } pub fn delay(&self, latency: f64) -> Signal { match self { Signal::Latency(l) => Signal::Latency(l + latency), Signal::Response(response, l) => Signal::Response(*response, l + latency), x => *x, } } pub fn scale(&self, factor: f64) -> Signal { match self { Signal::Value(x) => Signal::Value(x * factor), Signal::Response(response, latency) => Signal::Response(response * factor, *latency), x => *x, } } pub fn combine_nonlinear(&self, other: Signal, latency: f64) -> Signal { match (self.distort(0.0), other.distort(0.0)) { (Signal::Latency(lx), Signal::Latency(ly)) => Signal::Latency(min(lx, ly) + latency), (Signal::Latency(lx), _) => Signal::Latency(lx + latency), (_, Signal::Latency(ly)) => Signal::Latency(ly + latency), _ => Signal::Unknown, } } pub fn combine_linear( &self, other: Signal, latency: f64, value: impl Fn(f64, f64) -> f64, response: impl Fn(Complex64, Complex64) -> Complex64, ) -> Signal { match (*self, other) { (Signal::Value(vx), Signal::Value(vy)) => Signal::Value(value(vx, vy)), (Signal::Latency(lx), Signal::Latency(ly)) => Signal::Latency(min(lx, ly) + latency), (Signal::Response(rx, lx), Signal::Response(ry, ly)) => { Signal::Response(response(rx, ry), min(lx, ly) + latency) } (Signal::Response(rx, lx), Signal::Value(_)) => { Signal::Response(response(rx, Complex64::new(0.0, 0.0)), lx + latency) } (Signal::Value(_), Signal::Response(ry, ly)) => { Signal::Response(response(Complex64::new(0.0, 0.0), ry), ly + latency) } (Signal::Response(_, lx), Signal::Latency(ly)) => { Signal::Latency(min(lx, ly) + latency) } (Signal::Latency(lx), Signal::Response(_, ly)) => { Signal::Latency(min(lx, ly) + latency) } (Signal::Latency(lx), _) => Signal::Latency(lx + latency), (Signal::Response(_, lx), _) => Signal::Latency(lx + latency), (_, Signal::Latency(ly)) => Signal::Latency(ly + latency), (_, Signal::Response(_, ly)) => Signal::Latency(ly + latency), _ => Signal::Unknown, } } } pub type SignalFrame = TinyVec<[Signal; 32]>; pub fn new_signal_frame(size: usize) -> SignalFrame { let mut frame = TinyVec::with_capacity(size); frame.resize(size, Signal::Unknown); frame } pub fn copy_signal_frame(source: &SignalFrame, i: usize, n: usize) -> SignalFrame { let mut frame = new_signal_frame(n); frame[0..n].copy_from_slice(&source[i..i + n]); frame } pub enum Routing { Arbitrary, Split, Join, } impl Routing { pub fn propagate(&self, input: &SignalFrame, outputs: usize) -> SignalFrame { let mut output = new_signal_frame(outputs); if input.is_empty() { return output; } match self { Routing::Arbitrary => { let mut combo = input[0].distort(0.0); for i in 1..input.len() { combo = combo.combine_nonlinear(input[i], 0.0); } output.fill(combo); } Routing::Split => { for i in 0..outputs { output[i] = input[i % input.len()]; } } Routing::Join => { let bundle = input.len() / output.len(); for i in 0..outputs { let mut combo = input[i]; for j in 1..bundle { combo = combo.combine_linear( input[i + j * outputs], 0.0, |x, y| x + y, |x, y| x + y, ); } output[i] = combo.scale(output.len() as f64 / input.len() as f64); } } } output } }
use super::math::*; use num_complex::Complex64; use tinyvec::TinyVec; #[derive(Clone, Copy)] pub enum Signal { Unknown, Value(f64), Latency(f64), Response(Complex64, f64), } impl Default for Signal { fn default() -> Signal { Signal::Unknown } } impl Signal { pub fn filter(&self, latency: f64, filter: impl Fn(Complex64) -> Complex64) -> Signal { match self { Signal::Latency(l) => Signal::Latency(l + latency), Signal::Response(response, l) => Signal::Response(filter(*response), l + latency), _ => Signal::Unknown, } } pub fn distort(&self, latency: f64) -> S
pub fn delay(&self, latency: f64) -> Signal { match self { Signal::Latency(l) => Signal::Latency(l + latency), Signal::Response(response, l) => Signal::Response(*response, l + latency), x => *x, } } pub fn scale(&self, factor: f64) -> Signal { match self { Signal::Value(x) => Signal::Value(x * factor), Signal::Response(response, latency) => Signal::Response(response * factor, *latency), x => *x, } } pub fn combine_nonlinear(&self, other: Signal, latency: f64) -> Signal { match (self.distort(0.0), other.distort(0.0)) { (Signal::Latency(lx), Signal::Latency(ly)) => Signal::Latency(min(lx, ly) + latency), (Signal::Latency(lx), _) => Signal::Latency(lx + latency), (_, Signal::Latency(ly)) => Signal::Latency(ly + latency), _ => Signal::Unknown, } } pub fn combine_linear( &self, other: Signal, latency: f64, value: impl Fn(f64, f64) -> f64, response: impl Fn(Complex64, Complex64) -> Complex64, ) -> Signal { match (*self, other) { (Signal::Value(vx), Signal::Value(vy)) => Signal::Value(value(vx, vy)), (Signal::Latency(lx), Signal::Latency(ly)) => Signal::Latency(min(lx, ly) + latency), (Signal::Response(rx, lx), Signal::Response(ry, ly)) => { Signal::Response(response(rx, ry), min(lx, ly) + latency) } (Signal::Response(rx, lx), Signal::Value(_)) => { Signal::Response(response(rx, Complex64::new(0.0, 0.0)), lx + latency) } (Signal::Value(_), Signal::Response(ry, ly)) => { Signal::Response(response(Complex64::new(0.0, 0.0), ry), ly + latency) } (Signal::Response(_, lx), Signal::Latency(ly)) => { Signal::Latency(min(lx, ly) + latency) } (Signal::Latency(lx), Signal::Response(_, ly)) => { Signal::Latency(min(lx, ly) + latency) } (Signal::Latency(lx), _) => Signal::Latency(lx + latency), (Signal::Response(_, lx), _) => Signal::Latency(lx + latency), (_, Signal::Latency(ly)) => Signal::Latency(ly + latency), (_, Signal::Response(_, ly)) => Signal::Latency(ly + latency), _ => Signal::Unknown, } } } pub type SignalFrame = TinyVec<[Signal; 32]>; pub fn new_signal_frame(size: usize) -> SignalFrame { let mut frame = TinyVec::with_capacity(size); frame.resize(size, Signal::Unknown); frame } pub fn copy_signal_frame(source: &SignalFrame, i: usize, n: usize) -> SignalFrame { let mut frame = new_signal_frame(n); frame[0..n].copy_from_slice(&source[i..i + n]); frame } pub enum Routing { Arbitrary, Split, Join, } impl Routing { pub fn propagate(&self, input: &SignalFrame, outputs: usize) -> SignalFrame { let mut output = new_signal_frame(outputs); if input.is_empty() { return output; } match self { Routing::Arbitrary => { let mut combo = input[0].distort(0.0); for i in 1..input.len() { combo = combo.combine_nonlinear(input[i], 0.0); } output.fill(combo); } Routing::Split => { for i in 0..outputs { output[i] = input[i % input.len()]; } } Routing::Join => { let bundle = input.len() / output.len(); for i in 0..outputs { let mut combo = input[i]; for j in 1..bundle { combo = combo.combine_linear( input[i + j * outputs], 0.0, |x, y| x + y, |x, y| x + y, ); } output[i] = combo.scale(output.len() as f64 / input.len() as f64); } } } output } }
ignal { match self { Signal::Latency(l) => Signal::Latency(l + latency), Signal::Response(_, l) => Signal::Latency(l + latency), _ => Signal::Unknown, } }
function_block-function_prefixed
[ { "content": "#[inline]\n\npub fn shape_fn<S: Fn(f64) -> f64>(f: S) -> An<ShaperFn<f64, S>> {\n\n super::prelude::shape_fn(f)\n\n}\n\n\n\n/// Shape signal.\n", "file_path": "src/hacker.rs", "rank": 0, "score": 177753.0561621905 }, { "content": "#[inline]\n\npub fn declick_s(t: f64) -> An<Declick<f64, f64>> {\n\n super::prelude::declick_s(t)\n\n}\n\n\n\n/// Shape signal with a waveshaper function.\n", "file_path": "src/hacker.rs", "rank": 1, "score": 176387.5781320854 }, { "content": "#[inline]\n\npub fn pinkpass() -> An<Pinkpass<f64, f64>> {\n\n An(Pinkpass::new(DEFAULT_SR))\n\n}\n\n\n\n/// Pink noise.\n", "file_path": "src/hacker.rs", "rank": 2, "score": 174485.52776890155 }, { "content": "#[inline]\n\npub fn goertzel() -> An<Goertzel<f64, f64>> {\n\n An(Goertzel::new(DEFAULT_SR))\n\n}\n\n\n\n/// Frequency detector of frequency `f` Hz.\n", "file_path": "src/hacker.rs", "rank": 3, "score": 174485.52776890155 }, { "content": "#[inline]\n\npub fn declick() -> An<Declick<f64, f64>> {\n\n super::prelude::declick()\n\n}\n\n\n\n/// Apply `t` seconds of fade-in to signal at time zero.\n", "file_path": "src/hacker.rs", "rank": 4, "score": 174485.52776890155 }, { "content": "#[inline]\n\npub fn delay(t: f64) -> An<Delay<f64>> {\n\n An(Delay::new(t, DEFAULT_SR))\n\n}\n\n\n", "file_path": "src/hacker.rs", "rank": 5, "score": 171795.99184569682 }, { "content": "#[inline]\n\npub fn dcblock() -> An<DCBlock<f64, f64>> {\n\n dcblock_hz(10.0)\n\n}\n\n\n\n/// Apply 10 ms of fade-in to signal at time zero.\n", "file_path": "src/hacker.rs", "rank": 6, "score": 170855.25381169107 }, { "content": "#[inline]\n\npub fn pink() -> An<Pipe<f64, Noise<f64>, Pinkpass<f64, f64>>> {\n\n super::prelude::pink()\n\n}\n\n\n\n/// Brown noise.\n", "file_path": "src/hacker.rs", "rank": 7, "score": 170204.19990675763 }, { "content": "#[inline]\n\npub fn lowshelf() -> An<Svf<f64, f64, LowshelfMode<f64>>> {\n\n super::prelude::lowshelf()\n\n}\n\n\n\n/// Low shelf filter centered at `f` Hz with Q value `q` and amplitude gain `gain`.\n\n/// - Input 0: audio\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 8, "score": 170123.54070124062 }, { "content": "#[inline]\n\npub fn dcblock_hz(c: f64) -> An<DCBlock<f64, f64>> {\n\n An(DCBlock::new(DEFAULT_SR, c))\n\n}\n\n\n\n/// Keeps a signal zero centered.\n", "file_path": "src/hacker.rs", "rank": 9, "score": 170123.54070124062 }, { "content": "#[inline]\n\npub fn highpass() -> An<Svf<f64, f64, HighpassMode<f64>>> {\n\n super::prelude::highpass()\n\n}\n\n\n\n/// Highpass filter with cutoff frequency `f` Hz with Q value `q`.\n\n/// - Input 0: audio\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 10, "score": 170123.54070124062 }, { "content": "#[inline]\n\npub fn highshelf() -> An<Svf<f64, f64, HighshelfMode<f64>>> {\n\n super::prelude::highshelf()\n\n}\n\n\n\n/// High shelf filter centered at `cutoff` Hz with Q value `q` and amplitude gain `gain`.\n\n/// - Input 0: audio\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 11, "score": 170123.54070124062 }, { "content": "#[inline]\n\npub fn lowpass() -> An<Svf<f64, f64, LowpassMode<f64>>> {\n\n super::prelude::lowpass()\n\n}\n\n\n\n/// Lowpass filter with cutoff frequency `f` Hz with Q value `q`.\n\n/// - Input 0: audio\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 12, "score": 170123.54070124062 }, { "content": "#[inline]\n\npub fn bandpass() -> An<Svf<f64, f64, BandpassMode<f64>>> {\n\n super::prelude::bandpass()\n\n}\n\n\n\n/// Bandpass filter centered at `f` Hz with Q value `q`.\n\n/// - Input 0: audio\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 13, "score": 170123.54070124062 }, { "content": "#[inline]\n\npub fn peak() -> An<Svf<f64, f64, PeakMode<f64>>> {\n\n super::prelude::peak()\n\n}\n\n\n\n/// Peaking filter centered at `f` Hz with Q value `q`.\n\n/// - Input 0: audio\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 14, "score": 170123.54070124062 }, { "content": "#[inline]\n\npub fn allpass() -> An<Svf<f64, f64, AllpassMode<f64>>> {\n\n super::prelude::allpass()\n\n}\n\n\n\n/// Allpass filter centered at `f` Hz with Q value `q`.\n\n/// - Input 0: audio\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 15, "score": 170123.54070124062 }, { "content": "#[inline]\n\npub fn notch() -> An<Svf<f64, f64, NotchMode<f64>>> {\n\n super::prelude::notch()\n\n}\n\n\n\n/// Notch filter centered at `f` Hz with Q value `q`.\n\n/// - Input 0: audio\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 16, "score": 170123.54070124062 }, { "content": "#[inline]\n\npub fn bell() -> An<Svf<f64, f64, BellMode<f64>>> {\n\n super::prelude::bell()\n\n}\n\n\n\n/// Bell filter centered at `f` Hz with Q value `q` and amplitude gain `gain`.\n\n/// - Input 0: audio\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 17, "score": 170123.54070124062 }, { "content": "#[inline]\n\npub fn resonator() -> An<Resonator<f64, f64, U3>> {\n\n An(Resonator::new(DEFAULT_SR, 440.0, 110.0))\n\n}\n\n\n\n/// Constant-gain bandpass resonator with fixed `center` frequency (Hz) and `bandwidth` (Hz).\n\n/// - Input 0: audio\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 18, "score": 168165.7178884863 }, { "content": "#[inline]\n\npub fn lowpole() -> An<Lowpole<f64, f64, U2>> {\n\n An(Lowpole::new(DEFAULT_SR, 440.0))\n\n}\n\n\n\n/// One-pole lowpass filter (1st order) with fixed cutoff frequency `f` Hz.\n\n/// - Input 0: audio\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 19, "score": 168165.7178884863 }, { "content": "#[inline]\n\npub fn highpole() -> An<Highpole<f64, f64, U2>> {\n\n An(Highpole::new(DEFAULT_SR, 440.0))\n\n}\n\n\n\n/// One-pole, one-zero highpass filter (1st order) with fixed cutoff frequency f.\n\n/// - Input 0: audio\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 20, "score": 168165.7178884863 }, { "content": "#[inline]\n\npub fn allpole() -> An<Allpole<f64, f64, U2>> {\n\n An(Allpole::new(DEFAULT_SR, 1.0))\n\n}\n\n\n\n/// Allpole filter with delay (delay > 0) in samples at DC.\n\n/// - Input 0: audio\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 21, "score": 168165.7178884863 }, { "content": "#[inline]\n\npub fn lowpole_hz(f: f64) -> An<Lowpole<f64, f64, U1>> {\n\n super::prelude::lowpole_hz(f)\n\n}\n\n\n\n/// Allpole filter with adjustable delay (delay > 0) in samples at DC.\n\n/// - Input 0: audio\n\n/// - Input 1: delay in samples\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 22, "score": 167951.8550069646 }, { "content": "#[inline]\n\npub fn highpole_hz(f: f64) -> An<Highpole<f64, f64, U1>> {\n\n An(Highpole::new(DEFAULT_SR, f))\n\n}\n\n\n\n/// Constant-gain bandpass resonator.\n\n/// - Input 0: audio\n\n/// - Input 1: cutoff frequency (Hz)\n\n/// - Input 2: bandwidth (Hz)\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 23, "score": 167951.8550069646 }, { "content": "#[inline]\n\npub fn clip_to(minimum: f64, maximum: f64) -> An<Shaper<f64>> {\n\n super::prelude::clip_to(minimum, maximum)\n\n}\n\n\n\n/// Equal power mono-to-stereo panner.\n", "file_path": "src/hacker.rs", "rank": 24, "score": 167951.8550069646 }, { "content": "#[inline]\n\npub fn butterpass_hz(f: f64) -> An<ButterLowpass<f64, f64, U1>> {\n\n super::prelude::butterpass_hz(f)\n\n}\n\n\n\n/// One-pole lowpass filter (1st order).\n\n/// - Input 0: audio\n\n/// - Input 1: cutoff frequency (Hz)\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 25, "score": 165109.51368597726 }, { "content": "#[inline]\n\npub fn butterpass() -> An<ButterLowpass<f64, f64, U2>> {\n\n An(ButterLowpass::new(DEFAULT_SR, 440.0))\n\n}\n\n\n\n/// Butterworth lowpass filter (2nd order) with fixed cutoff frequency `f` Hz.\n\n/// - Input 0: audio\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 26, "score": 164787.16242247148 }, { "content": "#[inline]\n\npub fn notch_hz(f: f64, q: f64) -> An<FixedSvf<f64, f64, NotchMode<f64>>> {\n\n super::prelude::notch_hz::<f64, f64>(f, q)\n\n}\n\n\n\n/// Notch filter with Q value `q`.\n\n/// - Input 0: audio\n\n/// - Input 1: center frequency (Hz)\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 27, "score": 164657.81407725537 }, { "content": "#[inline]\n\npub fn peak_hz(f: f64, q: f64) -> An<FixedSvf<f64, f64, PeakMode<f64>>> {\n\n super::prelude::peak_hz::<f64, f64>(f, q)\n\n}\n\n\n\n/// Peaking filter with Q value `q`.\n\n/// - Input 0: audio\n\n/// - Input 1: center frequency (Hz)\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 28, "score": 164657.81407725537 }, { "content": "#[inline]\n\npub fn bandpass_hz(f: f64, q: f64) -> An<FixedSvf<f64, f64, BandpassMode<f64>>> {\n\n super::prelude::bandpass_hz::<f64, f64>(f, q)\n\n}\n\n\n\n/// Bandpass filter with Q value `q`.\n\n/// - Input 0: audio\n\n/// - Input 1: center frequency (Hz)\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 29, "score": 164657.81407725537 }, { "content": "#[inline]\n\npub fn highpass_hz(f: f64, q: f64) -> An<FixedSvf<f64, f64, HighpassMode<f64>>> {\n\n super::prelude::highpass_hz::<f64, f64>(f, q)\n\n}\n\n\n\n/// Highpass filter with Q value `q`.\n\n/// - Input 0: audio\n\n/// - Input 1: cutoff frequency (Hz)\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 30, "score": 164657.81407725537 }, { "content": "#[inline]\n\npub fn lowpass_hz(f: f64, q: f64) -> An<FixedSvf<f64, f64, LowpassMode<f64>>> {\n\n super::prelude::lowpass_hz::<f64, f64>(f, q)\n\n}\n\n\n\n/// Lowpass filter with Q value `q`.\n\n/// - Input 0: audio\n\n/// - Input 1: cutoff frequency (Hz)\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 31, "score": 164657.81407725537 }, { "content": "#[inline]\n\npub fn allpass_hz(f: f64, q: f64) -> An<FixedSvf<f64, f64, AllpassMode<f64>>> {\n\n super::prelude::allpass_hz::<f64, f64>(f, q)\n\n}\n\n\n\n/// Allpass filter with Q value `q`.\n\n/// - Input 0: audio\n\n/// - Input 1: center frequency (Hz)\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 32, "score": 164657.81407725537 }, { "content": "#[inline]\n\npub fn resonator_hz(center: f64, bandwidth: f64) -> An<Resonator<f64, f64, U1>> {\n\n super::prelude::resonator_hz(center, bandwidth)\n\n}\n\n\n\n/// Control envelope from time-varying function `f(t)` with `t` in seconds.\n\n/// Spaces samples using pseudorandom jittering.\n\n/// Synonymous with `lfo`.\n\n/// - Output(s): envelope linearly interpolated from samples at 2 ms intervals (average).\n", "file_path": "src/hacker.rs", "rank": 33, "score": 163384.47384812875 }, { "content": "#[inline]\n\npub fn allpole_delay(delay_in_samples: f64) -> An<Allpole<f64, f64, U1>> {\n\n An(Allpole::new(DEFAULT_SR, delay_in_samples))\n\n}\n\n\n\n/// One-pole, one-zero highpass filter (1st order).\n\n/// - Input 0: audio\n\n/// - Input 1: cutoff frequency (Hz)\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 34, "score": 162435.91232260998 }, { "content": "#[inline]\n\npub fn shape(mode: Shape<f64>) -> An<Shaper<f64>> {\n\n super::prelude::shape(mode)\n\n}\n\n\n\n/// Clip signal to -1...1.\n", "file_path": "src/hacker.rs", "rank": 35, "score": 162303.03516283794 }, { "content": "#[inline]\n\npub fn pan(pan: f64) -> An<Panner<f64, U1>> {\n\n An(Panner::new(pan))\n\n}\n\n\n\n/// Parameter follower filter with halfway response time `t` seconds.\n", "file_path": "src/hacker.rs", "rank": 36, "score": 162303.03516283794 }, { "content": "#[inline]\n\npub fn sine_hz(f: f64) -> An<Pipe<f64, Constant<U1, f64>, Sine<f64>>> {\n\n super::prelude::sine_hz(f)\n\n}\n\n\n\n/// Add constant to signal.\n", "file_path": "src/hacker.rs", "rank": 37, "score": 161684.079075542 }, { "content": "#[inline]\n\npub fn lowshelf_hz(f: f64, q: f64, gain: f64) -> An<FixedSvf<f64, f64, LowshelfMode<f64>>> {\n\n super::prelude::lowshelf_hz::<f64, f64>(f, q, gain)\n\n}\n\n\n\n/// Low shelf filter with Q value `q` and amplitude gain `gain`.\n\n/// - Input 0: audio\n\n/// - Input 1: cutoff frequency\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 38, "score": 161655.35221705894 }, { "content": "#[inline]\n\npub fn bell_hz(f: f64, q: f64, gain: f64) -> An<FixedSvf<f64, f64, BellMode<f64>>> {\n\n super::prelude::bell_hz::<f64, f64>(f, q, gain)\n\n}\n\n\n\n/// Bell filter with Q value `q` and amplitude gain `gain`.\n\n/// - Input 0: audio\n\n/// - Input 1: center frequency\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 39, "score": 161655.35221705894 }, { "content": "#[inline]\n\npub fn highshelf_hz(f: f64, q: f64, gain: f64) -> An<FixedSvf<f64, f64, HighshelfMode<f64>>> {\n\n super::prelude::highshelf_hz::<f64, f64>(f, q, gain)\n\n}\n\n\n\n/// High shelf filter with Q value `q` and amplitude gain `gain`.\n\n/// - Input 0: audio\n\n/// - Input 1: cutoff frequency\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 40, "score": 161655.35221705894 }, { "content": "#[inline]\n\npub fn sine() -> An<Sine<f64>> {\n\n An(Sine::new(DEFAULT_SR))\n\n}\n\n\n\n/// Fixed sine oscillator at `f` Hz.\n\n/// - Output 0: sine wave\n", "file_path": "src/hacker.rs", "rank": 41, "score": 161376.7739648165 }, { "content": "#[inline]\n\npub fn white() -> An<Noise<f64>> {\n\n An(Noise::new())\n\n}\n\n\n\n/// FIR filter.\n\n/// - Input 0: signal.\n\n/// - Output 0: filtered signal.\n", "file_path": "src/hacker.rs", "rank": 42, "score": 161376.7739648165 }, { "content": "#[inline]\n\npub fn clip() -> An<Shaper<f64>> {\n\n super::prelude::clip()\n\n}\n\n\n\n/// Clip signal to min...max.\n", "file_path": "src/hacker.rs", "rank": 43, "score": 161376.7739648165 }, { "content": "#[inline]\n\npub fn rnd(x: i64) -> f64 {\n\n let x = x as u64 ^ 0x5555555555555555;\n\n let x = x.wrapping_mul(0x9e3779b97f4a7c15);\n\n let x = (x ^ (x >> 30)).wrapping_mul(0xbf58476d1ce4e5b9);\n\n let x = (x ^ (x >> 27)).wrapping_mul(0x94d049bb133111eb);\n\n let x = x ^ (x >> 31);\n\n (x >> 11) as f64 / (1u64 << 53) as f64\n\n}\n\n\n\n/// 64-bit hash function.\n\n/// This hash is a pseudorandom permutation.\n", "file_path": "src/math.rs", "rank": 44, "score": 161376.7739648165 }, { "content": "#[inline]\n\npub fn noise() -> An<Noise<f64>> {\n\n An(Noise::new())\n\n}\n\n\n\n/// White noise generator.\n\n/// Synonymous with `noise`.\n\n/// - Output 0: white noise.\n", "file_path": "src/hacker.rs", "rank": 45, "score": 161376.7739648165 }, { "content": "#[inline]\n\npub fn swap() -> An<Swap<f64>> {\n\n An(Swap::new())\n\n}\n\n\n\n/// Sine oscillator.\n\n/// - Input 0: frequency (Hz)\n\n/// - Output 0: sine wave\n", "file_path": "src/hacker.rs", "rank": 46, "score": 161376.7739648165 }, { "content": "#[inline]\n\npub fn mls() -> An<Mls<f64>> {\n\n mls_bits(29)\n\n}\n\n\n\n/// White noise generator.\n\n/// Synonymous with `white`.\n\n/// - Output 0: white noise.\n", "file_path": "src/hacker.rs", "rank": 47, "score": 161376.7739648165 }, { "content": "#[inline]\n\npub fn pass() -> An<Pass<f64>> {\n\n An(Pass::new())\n\n}\n\n\n\n/// Multichannel pass-through.\n", "file_path": "src/hacker.rs", "rank": 48, "score": 161376.7739648165 }, { "content": "fn is_equal_response(x: Complex64, y: Complex64) -> bool {\n\n let abs_tolerance = 1.0e-9;\n\n let amp_tolerance = db_amp(0.05);\n\n let phase_tolerance = 5.0e-4 * TAU;\n\n let x_norm = x.norm();\n\n let y_norm = y.norm();\n\n let x_phase = x.arg();\n\n let y_phase = y.arg();\n\n x_norm / amp_tolerance - abs_tolerance <= y_norm\n\n && x_norm * amp_tolerance + abs_tolerance >= y_norm\n\n && min(\n\n abs(x_phase - y_phase),\n\n min(abs(x_phase - y_phase + TAU), abs(x_phase - y_phase - TAU)),\n\n ) <= phase_tolerance\n\n}\n\n\n", "file_path": "tests/filter.rs", "rank": 49, "score": 160757.04075290915 }, { "content": "/// Tapped delay line with cubic interpolation.\n\n/// Minimum and maximum delay times are in seconds.\n\n/// - Input 0: signal.\n\n/// - Input 1: delay time in seconds.\n\n/// - Output 0: delayed signal.\n\npub fn tap(min_delay: f64, max_delay: f64) -> An<Tap<U1, f64>> {\n\n An(Tap::new(DEFAULT_SR, min_delay, max_delay))\n\n}\n\n\n", "file_path": "src/hacker.rs", "rank": 50, "score": 157891.74232081062 }, { "content": "/// Multichannel single sample delay.\n\n/// - Inputs: signal.\n\n/// - Outputs: delayed signal.\n\npub fn multitick<N: Size<f64>>() -> An<Tick<N, f64>> {\n\n An(Tick::new(convert(DEFAULT_SR)))\n\n}\n\n\n\n/// Fixed delay of `t` seconds.\n\n/// Delay time is rounded to the nearest sample.\n\n/// - Input 0: signal.\n\n/// - Output 0: delayed signal.\n", "file_path": "src/hacker.rs", "rank": 51, "score": 156853.3051782543 }, { "content": "#[inline]\n\npub fn multisink<N: Size<f64>>() -> An<Sink<N, f64>> {\n\n An(Sink::new())\n\n}\n\n\n\n/// Swap stereo channels.\n\n/// - Input 0: left channel.\n\n/// - Input 1: right channel.\n\n/// - Output 0: right channel input.\n\n/// - Output 1: left channel input.\n", "file_path": "src/hacker.rs", "rank": 52, "score": 156848.21154877084 }, { "content": "#[inline]\n\npub fn tag(tag: Tag, value: f64) -> An<Tagged<f64>> {\n\n An(Tagged::new(tag, value))\n\n}\n\n\n\n/// Zero generator.\n\n/// - Output 0: zero\n", "file_path": "src/hacker.rs", "rank": 53, "score": 156848.21154877084 }, { "content": "#[inline]\n\npub fn multizero<N: Size<f64>>() -> An<Constant<N, f64>> {\n\n An(Constant::new(Frame::splat(0.0)))\n\n}\n\n\n\n/// Mono pass-through.\n", "file_path": "src/hacker.rs", "rank": 54, "score": 156848.21154877084 }, { "content": "#[inline]\n\npub fn zero() -> An<Constant<U1, f64>> {\n\n constant(0.0)\n\n}\n\n\n\n/// Multichannel zero generator.\n\n/// - Output(s): zero\n", "file_path": "src/hacker.rs", "rank": 55, "score": 154210.77098041636 }, { "content": "#[inline]\n\npub fn tick() -> An<Tick<U1, f64>> {\n\n An(Tick::new(DEFAULT_SR))\n\n}\n\n\n", "file_path": "src/hacker.rs", "rank": 56, "score": 154210.77098041636 }, { "content": "#[inline]\n\npub fn sink() -> An<Sink<U1, f64>> {\n\n An(Sink::new())\n\n}\n\n\n\n/// Multichannel sink. Inputs are discarded.\n", "file_path": "src/hacker.rs", "rank": 57, "score": 154210.77098041636 }, { "content": "#[inline]\n\npub fn panner() -> An<Panner<f64, U2>> {\n\n An(Panner::new(0.0))\n\n}\n\n\n\n/// Fixed equal power mono-to-stereo panner with pan value in -1...1.\n", "file_path": "src/hacker.rs", "rank": 58, "score": 154210.77098041636 }, { "content": "#[inline]\n\npub fn multipass<N: Size<f64>>() -> An<MultiPass<N, f64>> {\n\n An(MultiPass::new())\n\n}\n\n\n\n/// Timer node. An empty node that presents time as a parameter.\n", "file_path": "src/hacker.rs", "rank": 59, "score": 153898.86089180503 }, { "content": "/// Tapped delay line with cubic interpolation.\n\n/// The number of taps is `N`.\n\n/// Minimum and maximum delay times are in seconds.\n\n/// - Input 0: signal.\n\n/// - Inputs 1...N: delay time in seconds.\n\n/// - Output 0: delayed signal.\n\npub fn multitap<N>(min_delay: f64, max_delay: f64) -> An<Tap<N, f64>>\n\nwhere\n\n N: Size<f64> + Add<U1>,\n\n <N as Add<U1>>::Output: Size<f64>,\n\n{\n\n An(Tap::new(DEFAULT_SR, min_delay, max_delay))\n\n}\n\n\n", "file_path": "src/hacker.rs", "rank": 60, "score": 153611.20688911818 }, { "content": "#[inline]\n\npub fn triangle_hz(f: f64) -> An<Pipe<f64, Constant<U1, f64>, WaveSynth<'static, f64, U1>>> {\n\n super::prelude::triangle_hz(f)\n\n}\n\n\n\n/// Lowpass filter.\n\n/// - Input 0: audio\n\n/// - Input 1: cutoff frequency (Hz)\n\n/// - Input 2: Q\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 62, "score": 152008.19980806077 }, { "content": "#[inline]\n\npub fn square_hz(f: f64) -> An<Pipe<f64, Constant<U1, f64>, WaveSynth<'static, f64, U1>>> {\n\n super::prelude::square_hz(f)\n\n}\n\n\n\n/// Fixed triangle wave oscillator at `f` Hz.\n\n/// - Output 0: triangle wave\n", "file_path": "src/hacker.rs", "rank": 63, "score": 152008.19980806077 }, { "content": "#[inline]\n\npub fn saw_hz(f: f64) -> An<Pipe<f64, Constant<U1, f64>, WaveSynth<'static, f64, U1>>> {\n\n super::prelude::saw_hz(f)\n\n}\n\n\n\n/// Fixed square wave oscillator at `f` Hz.\n\n/// - Output 0: square wave\n", "file_path": "src/hacker.rs", "rank": 64, "score": 152008.19980806077 }, { "content": "#[inline]\n\npub fn follow<S: ScalarOrPair<Sample = f64>>(t: S) -> An<AFollow<f64, f64, S>> {\n\n An(AFollow::new(DEFAULT_SR, t))\n\n}\n\n\n\n/// Look-ahead limiter with `(attack, release)` times in seconds.\n\n/// Look-ahead is equal to the attack time.\n", "file_path": "src/hacker.rs", "rank": 65, "score": 150143.55394219368 }, { "content": "#[inline]\n\npub fn split<N>() -> An<Split<N, f64>>\n\nwhere\n\n N: Size<f64>,\n\n{\n\n super::prelude::split::<N, f64>()\n\n}\n\n\n\n/// Split M channels into N branches. The output has M * N channels.\n", "file_path": "src/hacker.rs", "rank": 66, "score": 147657.13626228974 }, { "content": "#[inline]\n\npub fn timer(tag: Tag) -> An<Timer<f64>> {\n\n An(Timer::new(DEFAULT_SR, tag))\n\n}\n\n\n\n/// Monitor node. Passes through input and retains the latest input as a parameter.\n", "file_path": "src/hacker.rs", "rank": 67, "score": 147657.13626228974 }, { "content": "#[inline]\n\npub fn join<N>() -> An<Join<N, f64>>\n\nwhere\n\n N: Size<f64>,\n\n{\n\n super::prelude::join::<N, f64>()\n\n}\n\n\n\n/// Average `N` branches of `M` channels into one branch with `M` channels.\n\n/// The input has `M` * `N` channels. Inverse of `multisplit::<M, N>`.\n", "file_path": "src/hacker.rs", "rank": 68, "score": 147657.13626228974 }, { "content": "#[inline]\n\npub fn monitor(tag: Tag) -> An<Monitor<f64>> {\n\n An(Monitor::new(tag))\n\n}\n\n\n\n/// Mono sink. Input is discarded.\n", "file_path": "src/hacker.rs", "rank": 69, "score": 147657.13626228974 }, { "content": "#[inline]\n\npub fn mls_bits(n: i64) -> An<Mls<f64>> {\n\n An(Mls::new(MlsState::new(n as u32)))\n\n}\n\n\n\n/// Default Maximum Length Sequence noise generator.\n\n/// - Output 0: repeating white noise sequence of only -1 and 1 values.\n", "file_path": "src/hacker.rs", "rank": 70, "score": 147657.13626228974 }, { "content": "#[inline]\n\npub fn mul<X: ConstantFrame<Sample = f64>>(\n\n x: X,\n\n) -> An<Binop<f64, FrameMul<X::Size, f64>, MultiPass<X::Size, f64>, Constant<X::Size, f64>>>\n\nwhere\n\n X::Size: Size<f64> + Add<U0>,\n\n <X::Size as Add<U0>>::Output: Size<f64>,\n\n{\n\n An(MultiPass::<X::Size, f64>::new()) * dc(x)\n\n}\n\n\n\n/// Butterworth lowpass filter (2nd order).\n\n/// - Input 0: audio\n\n/// - Input 1: cutoff frequency (Hz)\n\n/// - Output 0: filtered audio\n", "file_path": "src/hacker.rs", "rank": 71, "score": 147044.83237361113 }, { "content": "#[inline]\n\npub fn sub<X: ConstantFrame<Sample = f64>>(\n\n x: X,\n\n) -> An<Binop<f64, FrameSub<X::Size, f64>, MultiPass<X::Size, f64>, Constant<X::Size, f64>>>\n\nwhere\n\n X::Size: Size<f64> + Add<U0>,\n\n <X::Size as Add<U0>>::Output: Size<f64>,\n\n{\n\n An(MultiPass::<X::Size, f64>::new()) - dc(x)\n\n}\n\n\n\n/// Multiply signal with constant.\n", "file_path": "src/hacker.rs", "rank": 72, "score": 147044.83237361113 }, { "content": "#[inline]\n\npub fn add<X: ConstantFrame<Sample = f64>>(\n\n x: X,\n\n) -> An<Binop<f64, FrameAdd<X::Size, f64>, MultiPass<X::Size, f64>, Constant<X::Size, f64>>>\n\nwhere\n\n X::Size: Size<f64> + Add<U0>,\n\n <X::Size as Add<U0>>::Output: Size<f64>,\n\n{\n\n An(MultiPass::<X::Size, f64>::new()) + dc(x)\n\n}\n\n\n\n/// Subtract constant from signal.\n", "file_path": "src/hacker.rs", "rank": 73, "score": 147044.83237361113 }, { "content": "#[inline]\n\npub fn lfo<E, R>(f: E) -> An<Envelope<f64, f64, E, R>>\n\nwhere\n\n E: Fn(f64) -> R,\n\n R: ConstantFrame<Sample = f64>,\n\n R::Size: Size<f64>,\n\n{\n\n An(Envelope::new(0.002, DEFAULT_SR, f))\n\n}\n\n\n\n/// Control envelope from time-varying, input dependent function `f(t, input)` with `t` in seconds.\n\n/// Spaces samples using pseudorandom jittering.\n\n/// Synonymous with `lfo2`.\n\n/// - Output(s): envelope linearly interpolated from samples at 2 ms intervals (average).\n", "file_path": "src/hacker.rs", "rank": 74, "score": 145134.65487343242 }, { "content": "#[inline]\n\npub fn envelope<E, R>(f: E) -> An<Envelope<f64, f64, E, R>>\n\nwhere\n\n E: Fn(f64) -> R,\n\n R: ConstantFrame<Sample = f64>,\n\n R::Size: Size<f64>,\n\n{\n\n An(Envelope::new(0.002, DEFAULT_SR, f))\n\n}\n\n\n\n/// Control envelope from time-varying function `f(t)` with `t` in seconds.\n\n/// Spaces samples using pseudorandom jittering.\n\n/// Synonymous with `envelope`.\n\n/// - Output(s): envelope linearly interpolated from samples at 2 ms intervals (average).\n", "file_path": "src/hacker.rs", "rank": 75, "score": 145134.65487343242 }, { "content": "#[inline]\n\npub fn envelope2<E, R>(f: E) -> An<Envelope2<f64, f64, E, R>>\n\nwhere\n\n E: Fn(f64, f64) -> R,\n\n R: ConstantFrame<Sample = f64>,\n\n R::Size: Size<f64>,\n\n{\n\n An(Envelope2::new(0.002, DEFAULT_SR, f))\n\n}\n\n\n\n/// Control envelope from time-varying, input dependent function `f(t, value)` with `t` in seconds.\n\n/// Spaces samples using pseudorandom jittering.\n\n/// Synonymous with `envelope2`.\n\n/// - Output(s): envelope linearly interpolated from samples at 2 ms intervals (average).\n", "file_path": "src/hacker.rs", "rank": 76, "score": 145134.65487343242 }, { "content": "#[inline]\n\npub fn lfo2<E, R>(f: E) -> An<Envelope2<f64, f64, E, R>>\n\nwhere\n\n E: Fn(f64, f64) -> R,\n\n R: ConstantFrame<Sample = f64>,\n\n R::Size: Size<f64>,\n\n{\n\n An(Envelope2::new(0.002, DEFAULT_SR, f))\n\n}\n\n\n\n/// Maximum Length Sequence noise generator from an `n`-bit sequence.\n\n/// - Output 0: repeating white noise sequence of only -1 and 1 values.\n", "file_path": "src/hacker.rs", "rank": 77, "score": 145134.65487343242 }, { "content": "#[inline]\n\npub fn square() -> An<WaveSynth<'static, f64, U1>> {\n\n An(WaveSynth::new(DEFAULT_SR, &SQUARE_TABLE))\n\n}\n\n\n\n/// Triangle wave oscillator.\n", "file_path": "src/hacker.rs", "rank": 78, "score": 144217.76170892117 }, { "content": "#[inline]\n\npub fn pulse() -> An<super::prelude::PulseWave<f64>> {\n\n super::prelude::pulse()\n\n}\n", "file_path": "src/hacker.rs", "rank": 79, "score": 144217.76170892117 }, { "content": "#[inline]\n\npub fn saw() -> An<WaveSynth<'static, f64, U1>> {\n\n An(WaveSynth::new(DEFAULT_SR, &SAW_TABLE))\n\n}\n\n\n\n/// Square wave oscillator.\n", "file_path": "src/hacker.rs", "rank": 80, "score": 144217.76170892117 }, { "content": "#[inline]\n\npub fn triangle() -> An<WaveSynth<'static, f64, U1>> {\n\n An(WaveSynth::new(DEFAULT_SR, &TRIANGLE_TABLE))\n\n}\n\n\n\n/// Fixed saw wave oscillator at `f` Hz.\n\n/// - Output 0: saw wave\n", "file_path": "src/hacker.rs", "rank": 81, "score": 144217.76170892117 }, { "content": "/// Complex64 with real component `x` and imaginary component zero.\n\nfn re<T: Float>(x: T) -> Complex64 {\n\n Complex64::new(x.to_f64(), 0.0)\n\n}\n\n\n", "file_path": "tests/filter.rs", "rank": 82, "score": 139654.20030348958 }, { "content": "/// Complex64 with real component `x` and imaginary component zero.\n\nfn re<T: Float>(x: T) -> Complex64 {\n\n Complex64::new(x.to_f64(), 0.0)\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, Default)]\n\npub struct BiquadCoefs<F> {\n\n pub a1: F,\n\n pub a2: F,\n\n pub b0: F,\n\n pub b1: F,\n\n pub b2: F,\n\n}\n\n\n\nimpl<F: Real> BiquadCoefs<F> {\n\n /// Returns settings for a Butterworth lowpass filter.\n\n /// Cutoff is the -3 dB point of the filter in Hz.\n\n pub fn butter_lowpass(sample_rate: F, cutoff: F) -> BiquadCoefs<F> {\n\n let c = F::from_f64;\n\n let f: F = tan(cutoff * c(PI) / sample_rate);\n\n let a0r: F = c(1.0) / (c(1.0) + c(SQRT_2) * f + f * f);\n", "file_path": "src/filter.rs", "rank": 83, "score": 139654.20030348958 }, { "content": "#[inline]\n\npub fn delay<T: Float>(t: f64) -> An<Delay<T>> {\n\n An(Delay::new(t, DEFAULT_SR))\n\n}\n\n\n", "file_path": "src/prelude.rs", "rank": 84, "score": 139280.63694626666 }, { "content": "#[inline]\n\npub fn dc<X: ConstantFrame<Sample = f64>>(x: X) -> An<Constant<X::Size, f64>>\n\nwhere\n\n X::Size: Size<f64>,\n\n{\n\n An(Constant::new(x.convert()))\n\n}\n\n\n\n/// Tagged constant. Outputs the (scalar) value of the tag.\n\n/// - Output 0: value\n", "file_path": "src/hacker.rs", "rank": 85, "score": 138343.08243440592 }, { "content": "#[inline]\n\npub fn constant<X: ConstantFrame<Sample = f64>>(x: X) -> An<Constant<X::Size, f64>>\n\nwhere\n\n X::Size: Size<f64>,\n\n{\n\n An(Constant::new(x.convert()))\n\n}\n\n\n\n/// Constant node.\n\n/// Synonymous with `constant`.\n\n/// (DC stands for \"direct current\", which is an electrical engineering term used with signals.)\n", "file_path": "src/hacker.rs", "rank": 86, "score": 138343.08243440592 }, { "content": "/// Tapped delay line with cubic interpolation.\n\n/// Minimum and maximum delay times are in seconds.\n\n/// - Input 0: signal.\n\n/// - Input 1: delay time in seconds.\n\n/// - Output 0: delayed signal.\n\npub fn tap<T: Float>(min_delay: f64, max_delay: f64) -> An<Tap<U1, T>> {\n\n An(Tap::new(DEFAULT_SR, min_delay, max_delay))\n\n}\n\n\n", "file_path": "src/prelude.rs", "rank": 87, "score": 137786.05893094835 }, { "content": "/// Tapped delay line with cubic interpolation.\n\n/// The number of taps is `N`.\n\n/// Minimum and maximum delay times are in seconds.\n\n/// - Input 0: signal.\n\n/// - Inputs 1...N: delay time in seconds.\n\n/// - Output 0: delayed signal.\n\npub fn multitap<N, T>(min_delay: f64, max_delay: f64) -> An<Tap<N, T>>\n\nwhere\n\n T: Float,\n\n N: Size<T> + Add<U1>,\n\n <N as Add<U1>>::Output: Size<T>,\n\n{\n\n An(Tap::new(DEFAULT_SR, min_delay, max_delay))\n\n}\n\n\n", "file_path": "src/prelude.rs", "rank": 88, "score": 137785.8681942405 }, { "content": "/// New nodes can be defined with the following return signature.\n\n/// Declaring the full arity in the signature enables use of the node\n\n/// in further combinations, as does the full type name.\n\n/// Signatures with generic number of channels can be challenging to write.\n\nfn split_quad() -> An<impl AudioNode<Sample = f64, Inputs = U1, Outputs = U4>> {\n\n pass() ^ pass() ^ pass() ^ pass()\n\n}\n\n\n", "file_path": "tests/basic.rs", "rank": 89, "score": 136364.03105786868 }, { "content": "#[inline]\n\npub fn limiter<S: ScalarOrPair<Sample = f64>>(time: S) -> An<Limiter<f64, U1, S>> {\n\n An(Limiter::new(DEFAULT_SR, time))\n\n}\n\n\n\n/// Stereo look-ahead limiter with `(attack, release)` times in seconds.\n\n/// Look-ahead is equal to the attack time.\n", "file_path": "src/hacker.rs", "rank": 91, "score": 136035.16111994605 }, { "content": "#[inline]\n\npub fn fir<X: ConstantFrame<Sample = f64>>(weights: X) -> An<Fir<f64, X::Size>> {\n\n An(Fir::new(weights))\n\n}\n\n\n\n/// Single sample delay.\n\n/// - Input 0: signal.\n\n/// - Output 0: delayed signal.\n", "file_path": "src/hacker.rs", "rank": 92, "score": 136035.16111994605 }, { "content": "/// 2x oversample enclosed `node`.\n\n/// - Inputs and outputs: from `node`.\n\npub fn oversample<X>(node: An<X>) -> An<Oversampler<f64, X>>\n\nwhere\n\n X: AudioNode<Sample = f64>,\n\n X::Inputs: Size<f64>,\n\n X::Outputs: Size<f64>,\n\n X::Inputs: Size<Frame<f64, U128>>,\n\n X::Outputs: Size<Frame<f64, U128>>,\n\n{\n\n An(Oversampler::new(DEFAULT_SR, node.0))\n\n}\n\n\n\n/// Mix output of enclosed circuit `node` back to its input.\n\n/// Feedback circuit `node` must have an equal number of inputs and outputs.\n\n/// - Inputs: input signal.\n\n/// - Outputs: `node` output signal.\n", "file_path": "src/hacker.rs", "rank": 93, "score": 133928.27157172104 }, { "content": "#[inline]\n\npub fn limiter_stereo<S: ScalarOrPair<Sample = f64>>(time: S) -> An<Limiter<f64, U2, S>> {\n\n An(Limiter::new(DEFAULT_SR, time))\n\n}\n\n\n\n/// Pinking filter.\n", "file_path": "src/hacker.rs", "rank": 94, "score": 133854.27789619082 }, { "content": "#[inline]\n\npub fn multijoin<M, N>() -> An<MultiJoin<M, N, f64>>\n\nwhere\n\n M: Size<f64> + Mul<N>,\n\n N: Size<f64>,\n\n <M as Mul<N>>::Output: Size<f64>,\n\n{\n\n super::prelude::multijoin::<M, N, f64>()\n\n}\n\n\n", "file_path": "src/hacker.rs", "rank": 95, "score": 133140.1635547885 }, { "content": "#[inline]\n\npub fn multisplit<M, N>() -> An<MultiSplit<M, N, f64>>\n\nwhere\n\n M: Size<f64> + Mul<N>,\n\n N: Size<f64>,\n\n <M as Mul<N>>::Output: Size<f64>,\n\n{\n\n super::prelude::multisplit::<M, N, f64>()\n\n}\n\n\n\n/// Average N channels into one. Inverse of `split`.\n", "file_path": "src/hacker.rs", "rank": 96, "score": 133140.1635547885 }, { "content": "#[inline]\n\npub fn fdn<N, X>(x: An<X>) -> An<Feedback<N, f64, X, FrameHadamard<N, f64>>>\n\nwhere\n\n X: AudioNode<Sample = f64, Inputs = N, Outputs = N>,\n\n X::Inputs: Size<f64>,\n\n X::Outputs: Size<f64>,\n\n N: Size<f64>,\n\n{\n\n An(Feedback::new(x.0, FrameHadamard::new()))\n\n}\n\n\n\n/// Bus `N` similar nodes from indexed generator `f`.\n", "file_path": "src/hacker.rs", "rank": 97, "score": 132852.94356361846 }, { "content": "#[inline]\n\npub fn feedback<N, X>(node: An<X>) -> An<Feedback<N, f64, X, FrameId<N, f64>>>\n\nwhere\n\n X: AudioNode<Sample = f64, Inputs = N, Outputs = N>,\n\n X::Inputs: Size<f64>,\n\n X::Outputs: Size<f64>,\n\n N: Size<f64>,\n\n{\n\n An(Feedback::new(node.0, FrameId::new()))\n\n}\n\n\n\n/// Transform channels freely. Accounted as non-linear processing for signal flow.\n\n///\n\n/// # Example\n\n/// ```\n\n/// # use fundsp::hacker::*;\n\n/// let my_sum = map(|i: &Frame<f64, U2>| Frame::<f64, U1>::splat(i[0] + i[1]));\n\n/// ```\n", "file_path": "src/hacker.rs", "rank": 98, "score": 130672.06033986324 }, { "content": "#[inline]\n\npub fn sum<N, X, F>(f: F) -> An<Reduce<N, f64, X, FrameAdd<X::Outputs, f64>>>\n\nwhere\n\n N: Size<f64>,\n\n N: Size<X>,\n\n X: AudioNode<Sample = f64>,\n\n X::Inputs: Size<f64> + Mul<N>,\n\n X::Outputs: Size<f64>,\n\n <X::Inputs as Mul<N>>::Output: Size<f64>,\n\n F: Fn(i64) -> An<X>,\n\n{\n\n super::prelude::sum(f)\n\n}\n\n\n\n/// Mix together `N` similar nodes from fractional generator `f`.\n\n/// The fractional generator is given values in the range 0...1.\n", "file_path": "src/hacker.rs", "rank": 99, "score": 127153.72609060227 } ]
Rust
src/connection/peer_provider/k8s.rs
chenfisher/c19-1
f7f22aadb325d218ff24b754c5858aa567ec097b
use crate::connection::peer_provider::{PeerProvider, Peer}; use futures::{StreamExt, TryStreamExt}; use k8s_openapi::api::core::v1::Pod; use kube::{ api::{Api, ListParams, Meta, WatchEvent}, Client, }; use log::error; use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::sync::{Arc, RwLock}; use std::error::Error as StdError; type Result<T> = std::result::Result<T, Box<dyn StdError + Send + Sync>>; #[derive(Serialize, Deserialize, Debug)] #[serde(default)] pub struct K8s { selector: HashMap<String, String>, namespace: String, #[serde(skip_serializing, skip_deserializing)] peers: Arc<RwLock<HashMap<String, Peer>>>, } impl std::default::Default for K8s { fn default() -> Self { K8s { selector: Default::default(), namespace: "default".to_string(), peers: Default::default(), } } } impl K8s { fn selector(&self) -> String { self.selector .iter() .fold(String::new(), |s, (k, v)| format!("{},{}={}", s, k, v)) .strip_prefix(",") .unwrap_or("") .to_string() } fn ip(pod: &Pod) -> Option<Peer> { pod.status.as_ref().and_then(|status| { status .pod_ip .as_ref() .and_then(|ip| ip.parse().ok()) }) } } #[typetag::serde] impl PeerProvider for K8s { fn init(&self) -> Result<()> { let selector = self.selector(); let peers = self.peers.clone(); let namespace = self.namespace.clone(); tokio::spawn(async move { let client = Client::try_default().await?; let pods: Api<Pod> = if namespace == ":all" { Api::all(client) } else { Api::namespaced(client, namespace.as_ref()) }; let lp = ListParams::default().labels(&selector); let mut events = pods.watch(&lp, "0").await?.boxed(); while let Some(event) = events.try_next().await? { let event = &event; match event { WatchEvent::Added(pod) | WatchEvent::Modified(pod) => { if let Some(ip) = K8s::ip(pod) { peers.write().unwrap().insert( Meta::meta(pod).uid.as_ref().unwrap().clone(), ip, ); } } WatchEvent::Deleted(pod) => { peers .write() .unwrap() .remove(Meta::meta(pod).uid.as_ref().unwrap()); } _ => error!("Some error occured while receiving pod event"), } } Ok::<_, kube::Error>(()) }); Ok(()) } fn get(&self) -> Vec<Peer> { self.peers .read() .unwrap() .values() .map(|value| value.clone()) .collect() } }
use crate::connection::peer_provider::{PeerProvider, Peer}; use futures::{StreamExt, TryStreamExt}; use k8s_openapi::api::core::v1::Pod; use kube::{ api::{Api, ListParams, Meta, WatchEvent}, Client, }; use log::error; use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::sync::{Arc, RwLock}; use std::error::Error as StdError; type Result<T> = std::result::Result<T, Box<dyn StdError + Send + Sync>>; #[derive(Serialize, Deserialize, Debug)] #[serde(default)] pub struct K8s { selector: HashMap<String, String>, namespace: String, #[serde(skip_serializing, skip_deserializing)] peers: Arc<RwLock<HashMap<String, Peer>>>, } impl std::default::Default for K8s { fn default() -> Self { K8s { selector: Default::default(), namespace: "default".to_string(), peers: Default::default(), } } } impl K8s { fn selector(&self) -> String { self.selector .iter() .fold(String::new(), |s, (k, v)| format!("{},{}={}", s, k, v)) .strip_prefix(",") .unwrap_or("") .to_string() } fn ip(pod: &Pod) -> Option<Peer> { pod.status.as_ref().and_then(|status| { status .pod_ip .as_ref() .and_then(|ip| ip.parse().ok()) }) } } #[typetag::serde] impl PeerProvider for K8s { fn init(&self) -> Result<()> { let selector = self.selector(); let peers = self.pee
.values() .map(|value| value.clone()) .collect() } }
rs.clone(); let namespace = self.namespace.clone(); tokio::spawn(async move { let client = Client::try_default().await?; let pods: Api<Pod> = if namespace == ":all" { Api::all(client) } else { Api::namespaced(client, namespace.as_ref()) }; let lp = ListParams::default().labels(&selector); let mut events = pods.watch(&lp, "0").await?.boxed(); while let Some(event) = events.try_next().await? { let event = &event; match event { WatchEvent::Added(pod) | WatchEvent::Modified(pod) => { if let Some(ip) = K8s::ip(pod) { peers.write().unwrap().insert( Meta::meta(pod).uid.as_ref().unwrap().clone(), ip, ); } } WatchEvent::Deleted(pod) => { peers .write() .unwrap() .remove(Meta::meta(pod).uid.as_ref().unwrap()); } _ => error!("Some error occured while receiving pod event"), } } Ok::<_, kube::Error>(()) }); Ok(()) } fn get(&self) -> Vec<Peer> { self.peers .read() .unwrap()
random
[ { "content": "/// Initializes the state and runs the connection and agent layers.\n\n///\n\n/// The state is given a chance to be initialized by running state::init\n\n/// on the instance. The connection and agent layers are then started while\n\n/// given the initialized state.\n\n///\n\n/// The instances for the state, connection and agent are the ones\n\n/// initialized by the configuration.\n\n///\n\n/// The connection and agents layers are expected to return a future\n\n/// which is then being waited on until completion (mostly indfefinately)./\n\npub fn run(config: config::Config) -> impl Future<Output = Result<(), Box<dyn StdError + Send + Sync>>> {\n\n let state = config.spec.state.init();\n\n let conn = Arc::new(config.spec.connection).clone();\n\n let agent = Arc::new(config.spec.agent).clone();\n\n\n\n let mut futures = FuturesUnordered::new();\n\n let state1 = state.clone();\n\n futures.push(tokio::spawn(async move { conn.start(state1).await }));\n\n\n\n let state2 = state.clone();\n\n futures.push(tokio::spawn(async move { agent.start(state2).await }));\n\n\n\n async move {\n\n let mut iter = futures.iter_mut();\n\n while let Ok(result) = iter.next().unwrap().await {\n\n if let Err(e) = result {\n\n return Err(e);\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 0, "score": 178401.01264153374 }, { "content": "type Result<T> = std::result::Result<T, Box<dyn StdError + Send + Sync>>;\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct Static {\n\n peers: Vec<Peer>,\n\n}\n\n\n\n#[typetag::serde]\n\nimpl PeerProvider for Static {\n\n fn init(&self) -> Result<()> {\n\n Ok(())\n\n }\n\n\n\n fn get(&self) -> Vec<Peer> {\n\n self.peers.clone()\n\n }\n\n}\n", "file_path": "src/connection/peer_provider/static_peer_provider.rs", "rank": 1, "score": 169559.70215480827 }, { "content": "type Result<T> = std::result::Result<T, Box<dyn StdError + Send + Sync>>;\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\n#[serde(default)]\n\npub struct Default {\n\n /// The port to bind to and listen for connection from other peers.\n\n /// Default port: 4097.\n\n port: u16,\n\n\n\n /// An optional port to use as a target when sending the state to peers. If ommited,\n\n /// then the `port` field will be used both for accepting connections and when connecting to\n\n /// other peers. Default is null (use the `port` field).\n\n target_port: Option<u16>,\n\n\n\n /// The publish interval in milliseconds.\n\n /// Default value is 1 second (1000 milliseconds).\n\n ///\n\n /// The connection layer will publish a new state `push_interval` after it has \n\n /// finished publishing the previous state. This means that if preparing a state for publishing \n\n /// takes a few seconds, then only after those few seconds the counter will start counting\n", "file_path": "src/connection/default.rs", "rank": 2, "score": 169435.6857316441 }, { "content": "type Result<T> = std::result::Result<T, Box<dyn StdError + Send + Sync>>;\n\n\n\n/// The Default struct.\n\n///\n\n/// This struct holds information loaded from the agent configuration.\n\n#[derive(Serialize, Deserialize, Debug)]\n\n#[serde(default)]\n\npub struct Default {\n\n /// Binds and accepts connections on this port.\n\n /// Default port: 3097\n\n port: u16,\n\n}\n\n\n\n/// Default values for this implementation.\n\nimpl std::default::Default for Default {\n\n fn default() -> Self {\n\n Default { port: 3097 }\n\n }\n\n}\n\n\n", "file_path": "src/agent/default.rs", "rank": 3, "score": 169435.68573164413 }, { "content": "#[typetag::serde(tag = \"kind\")]\n\npub trait PeerProvider: std::fmt::Debug + Send + Sync {\n\n /// Initializes the peer provider.\n\n fn init(&self) -> Result<(), Box<dyn StdError + Send + Sync>>;\n\n\n\n /// Returns a vector of available peers.\n\n fn get(&self) -> Vec<Peer>;\n\n}\n\n\n\nimpl FromStr for Peer {\n\n type Err = std::net::AddrParseError;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n SocketAddrV4::from_str(s).and_then(|socket| Ok(Peer::SocketAddrV4(socket))).or_else(|_| {\n\n Ipv4Addr::from_str(s).and_then(|ip| Ok(Peer::Ipv4Addr(ip)))\n\n })\n\n }\n\n}\n\n\n\nimpl Peer {\n\n pub fn ip(&self) -> &Ipv4Addr {\n", "file_path": "src/connection/peer_provider.rs", "rank": 5, "score": 161973.944822559 }, { "content": "type Result<T> = std::result::Result<T, Box<dyn StdError + Send + Sync>>;\n\n\n", "file_path": "src/helpers/middlewares/json.rs", "rank": 6, "score": 151956.98782486486 }, { "content": "#[typetag::serde(tag = \"kind\")]\n\npub trait Agent: std::fmt::Debug + Send + Sync {\n\n fn start<'a>(\n\n &'a self,\n\n state: state::SafeState,\n\n ) -> BoxFuture<'a, Result<(), Box<dyn StdError + Send + Sync>>>;\n\n}\n", "file_path": "src/agent.rs", "rank": 7, "score": 143494.9514182326 }, { "content": "#[typetag::serde(tag = \"kind\")]\n\npub trait Connection: std::fmt::Debug + Send + Sync {\n\n fn start<'a>(\n\n &'a self,\n\n state: state::SafeState,\n\n ) -> BoxFuture<'a, Result<(), Box<dyn StdError + Send + Sync>>>;\n\n}\n", "file_path": "src/connection.rs", "rank": 8, "score": 143494.9514182326 }, { "content": "#[typetag::serde(tag = \"kind\")]\n\npub trait State: std::fmt::Debug + Send + Sync + CloneState {\n\n /// Initializes the state and returns a state that is safe to be shared across threads.\n\n ///\n\n /// A state object is already loaded by the configuration. The implementor can use this\n\n /// function to add or initialize any other relevant data and then return a SafeState object\n\n /// which is shared with the connection and agent layers.\n\n fn init(&self) -> SafeState;\n\n\n\n /// Returns the version of the current state.\n\n ///\n\n /// An implementor can use this function to keep a version for each \"state\" of the sate. For\n\n /// example, the default state implementation sets this value to \n\n /// a random string whenever the state changes. It is then saves that version in a version history which \n\n /// allows for the connection layer to compute diff between two versions. \n\n fn version(&self) -> String;\n\n\n\n /// Sets a value to the state.\n\n ///\n\n /// There's no assumption about the value itself. It can be anything the implementor wishes.\n\n /// The default state implementation, for example, treats this value as a map of key/value\n", "file_path": "src/state.rs", "rank": 9, "score": 135184.6009825761 }, { "content": "#[typetag::serde(tag = \"kind\")]\n\npub trait DataSeeder: std::fmt::Debug + Send + Sync {\n\n fn load(&self) -> Result<Box<dyn StateValue>, Box<dyn StdError>>;\n\n}\n\n\n", "file_path": "src/state/data_seeder.rs", "rank": 10, "score": 135060.24431099358 }, { "content": "/// Purges expired keys at a specficied interval.\n\nfn purge(state: Arc<Default>) -> impl futures::future::Future<Output = ()> + Send {\n\n let purge_interval = state.purge_interval;\n\n\n\n async move {\n\n let start = Instant::now() + Duration::from_millis(purge_interval);\n\n let mut interval = interval_at(start, Duration::from_millis(purge_interval));\n\n\n\n loop {\n\n interval.tick().await;\n\n state.purge();\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/state/default.rs", "rank": 11, "score": 133703.55516689998 }, { "content": "/// A trait used as a protocol between different components that use the state.\n\n///\n\n/// Both the Connection and Agent layers use the state to get and set values. They do not know\n\n/// anything about the implementation of one another. This trait allows them to use the same state\n\n/// without assuming anything about the structure of a state value.\n\n///\n\n/// The trait offers functions to serialize to different formats. The default connection layer for\n\n/// example, serializes a state value to json to be exchanged with other peers.\n\n///\n\n/// The functions in this trait have a default implementation that always return `None` so that\n\n/// implementors don't have to bother and implement all serialization functions that are irrelevant\n\n/// to them. If an implementor of an Agent layer, for example, tries to serialize a state value that the State layer does not\n\n/// support, it will get a `None` value in response. This might indicate an incompatible usage of\n\n/// an Agent and State layers.\n\n///\n\n/// When a user of the c19 protocol chooses their state, agent and connection layers, they should\n\n/// ensure that each component is compatible with one another.\n\n///\n\n/// An implementor of layer might choose to run different compatability tests at startup and notify\n\n/// the user of incompatabilities.\n\npub trait StateValue: Send + Sync {\n\n fn as_bytes(&self) -> Option<Vec<u8>>;\n\n}\n\n\n\n/// The State trait.\n\n///\n\n/// Every state implementor must implement this trait. It is first auto-loaded by the configuration\n\n/// deserializer and then initialized by the library by calling the `init` function. The `init`\n\n/// function should return a [SafeState] which is then passed to the connection and agent layers.\n", "file_path": "src/state.rs", "rank": 12, "score": 117683.75474480187 }, { "content": "type Result<T> = std::result::Result<T, Box<dyn StdError>>;\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct File {\n\n filename: String,\n\n}\n\n\n\n#[typetag::serde]\n\nimpl data_seeder::DataSeeder for File {\n\n fn load(&self) -> Result<Box<dyn StateValue>> {\n\n let data: Box<dyn StateValue> = Box::new(fs::read(&self.filename)?);\n\n Ok(data)\n\n }\n\n}\n", "file_path": "src/state/data_seeder/file.rs", "rank": 13, "score": 98371.884410625 }, { "content": "/// Returns the configuration after dynanmically loading all layers.\n\n///\n\n/// Takes command line arguments, loads the YAML configuration file and initializes all layer\n\n/// objects.\n\npub fn new(matches: &ArgMatches) -> Result<Config, Box<dyn Error>> {\n\n let config_file = matches.value_of(\"config\").unwrap_or(DEFAULT_CONFIG_FILE);\n\n let config = fs::read_to_string(config_file)?;\n\n\n\n let config: Config = serde_yaml::from_str(&config)?;\n\n\n\n Ok(config)\n\n}\n\n\n\nimpl fmt::Display for Config {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let s = serde_yaml::to_string(self).unwrap_or(\"Failed to parse config to yaml\".to_string());\n\n\n\n f.write_str(s.as_ref())\n\n }\n\n}\n", "file_path": "src/config.rs", "rank": 14, "score": 97961.53749700724 }, { "content": "fn hash(hm: &HashMap<String, Box<Value>>) -> u64 {\n\n let mut h: u64 = 0;\n\n\n\n for (k, v) in hm.iter() {\n\n let mut hasher = XxHash64::default();\n\n k.hash(&mut hasher);\n\n v.ts.hash(&mut hasher);\n\n h ^= hasher.finish();\n\n }\n\n\n\n h\n\n}\n\n\n\nimpl Hash for Default {\n\n fn hash<H: Hasher>(&self, state: &mut H) {\n\n state.write_u64(hash(&self.storage.read().unwrap().clone()));\n\n }\n\n}\n\n\n\n/// An implementation of the StateValue trait.\n\n///\n\n/// This value is a serde_json::Value, has a timestamp to resolve conflicts and supports a TTL. See the module\n\n/// documentation for more information.\n", "file_path": "src/state/default.rs", "rank": 15, "score": 88786.17702801607 }, { "content": "/// Returns the current time in seconds since epoch.\n\npub fn epoch() -> u64 {\n\n u64::try_from(\n\n SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .unwrap_or(std::time::Duration::default())\n\n .as_millis(),\n\n )\n\n .unwrap_or(0)\n\n}\n\n\n", "file_path": "src/helpers/utils.rs", "rank": 16, "score": 82750.47698653134 }, { "content": "#[derive(Serialize, Debug, Clone, Deserialize)]\n\nstruct Value {\n\n /// A serde_json::Value to hold any value that can be serialized into JSON format.\n\n value: serde_json::Value,\n\n\n\n /// The timestamp when this value was first created.\n\n #[serde(default = \"epoch\")]\n\n ts: u64,\n\n\n\n /// An optional TTL (resolved to an absolute epoch time) when this value will be expired.\n\n ttl: Option<u64>,\n\n}\n\n\n\nimpl Value {\n\n /// Returns true if the value was expired.\n\n fn is_expired(&self) -> bool {\n\n match self.ttl {\n\n Some(ttl) => ttl + self.ts < epoch(),\n\n _ => false,\n\n }\n\n }\n", "file_path": "src/state/default.rs", "rank": 17, "score": 72138.33317720296 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Version {\n\n ts: u64,\n\n storage: HashMap<String, Value>,\n\n}\n\n\n\n/// The default state struct.\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\n#[serde(default)]\n\npub struct Default {\n\n /// The default TTL (in milliseconds) to use if none is specified when setting a new value.\n\n ttl: Option<u64>,\n\n\n\n /// The interval in milliseconds in which to purge expired values.\n\n ///\n\n /// Default value is 1 minute (60000 milliseconds).\n\n purge_interval: u64,\n\n\n\n /// The [DataSeeder] to use for seeding the data on initialization.\n\n data_seeder: Option<Arc<RwLock<Box<dyn DataSeeder>>>>,\n\n\n", "file_path": "src/state/default.rs", "rank": 18, "score": 72132.01014579952 }, { "content": "pub fn wrap_json_response<F, S: 'static>(\n\n f: F,\n\n) -> impl Fn(state::SafeState, Request<Body>) -> BoxFuture<'static, Result<Response<Body>>>\n\nwhere\n\n F: Fn(state::SafeState, Request<Body>) -> S,\n\n S: Future<Output = Result<Response<Body>>> + Send,\n\n{\n\n move |state: state::SafeState, req: Request<Body>| {\n\n {\n\n f(state, req).and_then(|mut response: Response<Body>| async {\n\n response\n\n .headers_mut()\n\n .insert(\"Content-Type\", HeaderValue::from_static(\"application/json\"));\n\n\n\n Ok(response)\n\n })\n\n }\n\n .boxed()\n\n }\n\n}\n", "file_path": "src/helpers/middlewares/json.rs", "rank": 19, "score": 71168.17992860256 }, { "content": "/// Async set thread.\n\n///\n\n/// Listens on the receiver channel for values to be commited to the state.\n\nfn async_set(state: Arc<Default>, rx: mpsc::Receiver<Vec<u8>>) {\n\n for value in rx.iter() {\n\n let value = serde_json::from_slice(value.as_slice());\n\n if let Ok(value) = value {\n\n Default::set(&state, &value);\n\n }\n\n }\n\n}\n\n\n\nimpl From<&dyn StateValue> for Result<HashMap<String, Box<Value>>, Box<dyn StdError>> {\n\n fn from(value: &dyn StateValue) -> Self {\n\n let value: Vec<u8> = value\n\n .as_bytes()\n\n .ok_or(Vec::from(\"\")).unwrap();\n\n\n\n serde_json::from_slice(value.as_slice()).map_err(|e| e.into())\n\n }\n\n}\n\n\n\nimpl From<Box<Value>> for Box<dyn StateValue> {\n", "file_path": "src/state/default.rs", "rank": 20, "score": 70630.35591620186 }, { "content": "/// Sets the key and value specified in the request.\n\n///\n\n/// `PUT /`\n\n///\n\n/// The agent does not assume anything about the format of the value in the body of the message.\n\n/// The value is passed to the state as-is and it is up to the app to make sure it conforms to the \n\n/// expected format by the state.\n\n///\n\n/// For example, using the `Default` state, an app would send a `PUT` request like so:\n\n///\n\n/// A JSON body of the followibng format:\n\n///\n\n/// ```\n\n/// {\"cat\": {\"value\": \"garfield\", \"ttl\": 60000}}\n\n/// ```\n\n///\n\n/// According to the `Default` state documentation, the value can be anything JSON.\n\n/// `ttl` is optional and another field `ts` can be specified to override the automatic `ts` \n\n/// set by the state.\n\n///\n\n/// Please refer to the [Default] state implementation for more information. \n\n///\n\n/// [Default]: crate::state::default::Default\n\nfn set_handler(\n\n state: state::SafeState,\n\n req: Request<Body>,\n\n) -> impl FutureExt<Output = Result<Response<Body>>> {\n\n hyper::body::to_bytes(req.into_body()).and_then(move |body| async move {\n\n let result = state.set(&body as &dyn StateValue);\n\n\n\n Ok(match result {\n\n Ok(_) => Responses::no_content(),\n\n _ => Responses::unprocessable(None),\n\n })\n\n }).map_err(|e| e.into())\n\n}\n\n\n\n/// Accepts a request and dynamically dispatches the handler based on the method of the request.\n\n///\n\n/// Returns whatever the get and set handlers return or 404 (not found) if method is invalid.\n\nasync fn handler(state: state::SafeState, req: Request<Body>) -> Result<Response<Body>> {\n\n Ok(match req.method() {\n\n &Method::GET => get_handler(state, &req),\n", "file_path": "src/agent/default.rs", "rank": 21, "score": 66877.58209397673 }, { "content": "/// Accepts a JSON body that represents a state. Merges it with its own state.\n\n///\n\n/// PUT /\n\nfn set_handler<'a>(\n\n state: state::SafeState,\n\n req: Request<Body>,\n\n) -> impl FutureExt<Output = Result<Response<Body>>> + 'a {\n\n hyper::body::to_bytes(req.into_body())\n\n .and_then(move |body| async move {\n\n let body = &body as &dyn state::StateValue;\n\n let result = state.set(body);\n\n\n\n Ok(match result {\n\n Ok(_) => Responses::no_content(),\n\n _ => Responses::unprocessable(None),\n\n })\n\n })\n\n .map_err(|e| e.into())\n\n}\n\n\n\nasync fn handler(state: state::SafeState, req: Request<Body>) -> Result<Response<Body>> {\n\n Ok(match req.method() {\n\n &Method::GET => get_handler(state, &req),\n", "file_path": "src/connection/default.rs", "rank": 22, "score": 65735.08032435749 }, { "content": "/// Returns the value associated with the given key.\n\n///\n\n/// `GET /<key>`\n\n///\n\n/// Expects key to be a String. Returns the value as-is from the state.\n\n///\n\n/// # Example\n\n///\n\n/// Assuming usage of the `Default` state, a value might look like this:\n\n///\n\n/// ```\n\n/// GET /cat\n\n///\n\n/// {\"ts\":1601241450390,\"ttl\":null,\"value\":\"garfield\"}\n\n/// ```\n\n///\n\n/// As you can see, the value holds more than the value itself. Included are the ttl (can be null)\n\n/// which is an absolute value of when this key will expire, and the timestamp (ts) that this key\n\n/// was created.\n\n///\n\n/// A different form of the value might be returned, depending on which state layer is being used.\n\n/// In any case, this agent implementation does not assume anything about the format of the values\n\n/// returned by the state.\n\nfn get_handler(state: state::SafeState, req: &Request<Body>) -> Response<Body> {\n\n let result = req.uri().path().split('/').last().and_then(|key| {\n\n state\n\n .get(&key.to_string() as &dyn state::StateValue)\n\n .and_then(|value| Some(value.as_bytes()))\n\n });\n\n\n\n match result {\n\n Some(value) => {\n\n if let Some(value) = value {\n\n Responses::ok(value.into())\n\n } else {\n\n Responses::bad_request(None)\n\n }\n\n }\n\n _ => Responses::not_found(None),\n\n }\n\n}\n\n\n", "file_path": "src/agent/default.rs", "rank": 23, "score": 47234.776172726895 }, { "content": "/// Returns an HTTP response with the full state as JSON.\n\n///\n\n/// GET /\n\nfn get_handler(state: state::SafeState, req: &Request<Body>) -> Response<Body> {\n\n let versions_match = req.uri().path().split('/').last().and_then(|version| {\n\n (version.is_empty() || version != state.version()).into()\n\n }).unwrap();\n\n\n\n if versions_match {\n\n Responses::ok(state.get_root().unwrap_or(\"\".into()).into())\n\n } else {\n\n Responses::no_content()\n\n }\n\n}\n\n\n", "file_path": "src/connection/default.rs", "rank": 24, "score": 47228.11025570466 }, { "content": "/// A trait to be implemented on iterators to allow conveniently sampling of set of random elements.\n\npub trait Sample {\n\n type Item;\n\n\n\n /// Returns a sample of `n` elements from the collection.\n\n fn sample(self, n: usize) -> Vec<Self::Item>;\n\n}\n\n\n\n/// Blanket implementation for all Iterators.\n\nimpl<T, I> Sample for T\n\nwhere\n\n T: Iterator<Item = I>,\n\n{\n\n type Item = I;\n\n\n\n /// Returns a random set of elements from this iterator.\n\n fn sample(self, n: usize) -> Vec<Self::Item> {\n\n let mut rng = SmallRng::from_entropy();\n\n self.choose_multiple(&mut rng, n)\n\n }\n\n}\n", "file_path": "src/helpers/utils.rs", "rank": 25, "score": 43208.70605692093 }, { "content": "pub trait CloneState {\n\n fn clone_state(&self) -> Box<dyn State>;\n\n}\n\n\n\nimpl<T> CloneState for T\n\nwhere\n\n T: State + Clone + 'static,\n\n{\n\n fn clone_state(&self) -> Box<dyn State> {\n\n Box::new(self.clone())\n\n }\n\n}\n\n\n\nimpl Clone for Box<dyn State> {\n\n fn clone(&self) -> Self {\n\n self.clone_state()\n\n }\n\n}\n\n\n\nimpl StateValue for &'static str {\n", "file_path": "src/state.rs", "rank": 26, "score": 43205.629547859615 }, { "content": "//! An implementation of the peer provider.\n\n//!\n\n//! This implementation uses a static list of peers specified directly in the \n\n//! configuration file.\n\n//!\n\n//! It's helpful when developing and running the agent locally.\n\n//!\n\n//! # Example:\n\n//!\n\n//! ```yaml\n\n//! peer_provider:\n\n//! kind: Static\n\n//! peers:\n\n//! - 127.0.0.1\n\n//! ```\n\n//!\n\nuse crate::connection::peer_provider::{PeerProvider, Peer};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::error::Error as StdError;\n\n\n", "file_path": "src/connection/peer_provider/static_peer_provider.rs", "rank": 27, "score": 30513.321494329866 }, { "content": "//! Peer provider.\n\n//!\n\n//! The peer provider is responsible for retreiving peers that are available for exchanging states.\n\n//!\n\n//! The peer provider is specific to the default connection implementation, although it can serve\n\n//! other implemntations in the future. It is implemented as a trait to allow the default\n\n//! connection implementation to use different kind of providers, not only the [K8s](crate::connection::peer_provider::k8s), which\n\n//! is the default one.\n\n//!\n\n//! The provider is first loaded by the configuration and then initialized by the default\n\n//! connection itself. It is then used by the default connection to get the list of available peers\n\n//! to choose from.\n\n\n\npub mod k8s;\n\npub mod static_peer_provider;\n\nuse std::error::Error as StdError;\n\nuse std::net::{Ipv4Addr, SocketAddrV4};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::str::FromStr;\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\n#[serde(untagged)]\n\npub enum Peer {\n\n Ipv4Addr(Ipv4Addr),\n\n SocketAddrV4(SocketAddrV4),\n\n}\n\n\n\n#[typetag::serde(tag = \"kind\")]\n", "file_path": "src/connection/peer_provider.rs", "rank": 28, "score": 25882.439374554822 }, { "content": " match self {\n\n Peer::Ipv4Addr(s) => s,\n\n Peer::SocketAddrV4(s) => s.ip(),\n\n }\n\n }\n\n\n\n pub fn port(&self) -> Option<u16> {\n\n match self {\n\n Peer::Ipv4Addr(_) => None,\n\n Peer::SocketAddrV4(s) => Some(s.port()),\n\n }\n\n }\n\n}\n", "file_path": "src/connection/peer_provider.rs", "rank": 29, "score": 25874.574437053954 }, { "content": " let res = stream::iter(peers)\n\n .map(|peer| {\n\n let state_to_publish = state_to_publish.clone();\n\n let url = format!(\"http://{}:{}/\", peer.ip(), peer.port().unwrap_or(self.target_port.unwrap_or(self.port)));\n\n let timeout = self.timeout;\n\n\n\n tokio::spawn(async move {\n\n let client = Client::builder()\n\n .connect_timeout(Duration::from_millis(timeout))\n\n .build().unwrap();\n\n\n\n debug!(\"Publishing state to {}\", url);\n\n let result = client\n\n .put(&url.to_string())\n\n .body(state_to_publish)\n\n .send()\n\n .await?\n\n .bytes()\n\n .await;\n\n\n", "file_path": "src/connection/default.rs", "rank": 36, "score": 24243.66329313556 }, { "content": " let timeout = self.timeout;\n\n\n\n tokio::spawn(async move {\n\n let client = Client::builder()\n\n .connect_timeout(Duration::from_millis(timeout))\n\n .build().unwrap();\n\n\n\n let result = client\n\n .get(&url.to_string())\n\n .send()\n\n .await?\n\n .bytes()\n\n .await;\n\n\n\n result\n\n })\n\n })\n\n .buffer_unordered(4);\n\n\n\n res.collect::<Vec<_>>().await.iter().for_each(|result| {\n", "file_path": "src/connection/default.rs", "rank": 37, "score": 24241.629963708983 }, { "content": " /// peers to choose from.\n\n ///\n\n /// Default value: k8s.\n\n pub peer_provider: Box<dyn peer_provider::PeerProvider>,\n\n}\n\n\n\nimpl std::default::Default for Default {\n\n fn default() -> Self {\n\n Default {\n\n port: 4097,\n\n target_port: None,\n\n push_interval: 1000,\n\n pull_interval: 60000,\n\n r0: 3,\n\n timeout: 1000,\n\n peer_provider: Box::new(peer_provider::k8s::K8s::default()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/connection/default.rs", "rank": 38, "score": 24241.322616962625 }, { "content": "use crate::connection;\n\nuse crate::connection::peer_provider;\n\nuse crate::helpers::http::responses::Responses;\n\nuse crate::helpers::middlewares::json::wrap_json_response;\n\nuse crate::helpers::utils::Sample;\n\nuse crate::state;\n\nuse futures::future::{self, BoxFuture, FutureExt, TryFutureExt};\n\nuse futures::{stream, StreamExt};\n\nuse hyper::{\n\n http::Method, service::make_service_fn, service::service_fn, Body, Request, Response, Server,\n\n};\n\nuse log::{debug, warn};\n\nuse reqwest::Client;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::convert::Infallible;\n\nuse std::time::Duration;\n\nuse tokio;\n\nuse tokio::time;\n\nuse std::error::Error as StdError;\n\n\n", "file_path": "src/connection/default.rs", "rank": 39, "score": 24241.124481708088 }, { "content": " /// The version of the current state.\n\n ///\n\n /// This is set to a random unique string on every state change.\n\n #[serde(skip_serializing, skip_deserializing)]\n\n version: Arc<RwLock<String>>,\n\n\n\n /// The SyncSender channel to use for async set operations\n\n ///\n\n /// When a set operation is being commited to the state, the state \n\n /// will pass the operation to an async handler which will then commit the \n\n /// changes to the state.\n\n #[serde(skip_serializing, skip_deserializing)]\n\n tx: Option<mpsc::SyncSender<Vec<u8>>>,\n\n\n\n /// The data storage in the form of a Key/Value hashmap.\n\n #[serde(skip_serializing, skip_deserializing)]\n\n storage: Arc<RwLock<HashMap<String, Box<Value>>>>,\n\n\n\n /// Calculating the version is a bit expensive so we use \n\n /// the dirty flag to lazily calculate the verison on-demand.\n", "file_path": "src/state/default.rs", "rank": 40, "score": 24240.409529697834 }, { "content": " result\n\n })\n\n })\n\n .buffer_unordered(4);\n\n\n\n res.collect::<Vec<_>>().await.iter().for_each(|result| {\n\n if let Ok(Err(e)) = result {\n\n warn!(\"Failed to publish to peer; {}\", e);\n\n }\n\n });\n\n\n\n time::delay_for(time::Duration::from_millis(self.push_interval)).await;\n\n }\n\n }\n\n}\n\n\n\n#[typetag::serde]\n\nimpl connection::Connection for Default {\n\n /// Starts the connection layer.\n\n ///\n", "file_path": "src/connection/default.rs", "rank": 41, "score": 24240.35401369363 }, { "content": " &Method::PUT => set_handler(state, req).await.unwrap(),\n\n _ => Responses::not_found(None),\n\n })\n\n}\n\n\n\nimpl Default {\n\n async fn server(&self, state: state::SafeState) -> Result<()> {\n\n let service = make_service_fn(move |_| {\n\n let state = state.clone();\n\n async move {\n\n Ok::<_, Box<dyn StdError + Send + Sync>>(service_fn(move |req| {\n\n handler(state.clone(), req)\n\n }))\n\n }\n\n });\n\n\n\n let server = Server::try_bind(&([0, 0, 0, 0], self.port).into())?;\n\n server.serve(service).await?;\n\n\n\n Ok(())\n", "file_path": "src/agent/default.rs", "rank": 42, "score": 24240.193051857845 }, { "content": "//! The agent does not assume anything about the format of the value in the body of the message.\n\n//! The value is passed to the state as-is and it is up to the app to make sure it conforms to the \n\n//! expected format by the state.\n\n//!\n\n//! For example, using the [Default] state, an app would send a `PUT` request like so:\n\n//!\n\n//! A JSON body of the followibng format:\n\n//!\n\n//! ```\n\n//! {\"cat\": {\"value\": \"garfield\", \"ttl\": 60000}}\n\n//! ```\n\n//!\n\n//! According to the [Default] state documentation, the value can be anything JSON.\n\n//! `ttl` is optional and another field `ts` can be specified to override the automatic `ts` \n\n//! set by the state.\n\n//!\n\n//! Please refer to the [Default] state implementation for more information. \n\n//!\n\n//! [Default]: state::default\n\n\n\nuse crate::agent;\n\nuse crate::helpers::http::responses::Responses;\n\nuse crate::state::{self, StateValue};\n\nuse futures::future::{BoxFuture, FutureExt, TryFutureExt};\n\nuse http::{Request, Response};\n\nuse hyper::{http::Method, service::make_service_fn, service::service_fn, Body, Server};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::error::Error as StdError;\n\n\n", "file_path": "src/agent/default.rs", "rank": 43, "score": 24239.567298485676 }, { "content": "//!\n\n//! # Conflicts\n\n//! Since this is a distributed system, the state might be updated by different peers that are not\n\n//! yet in sync. To resolve a conflict where a key is being updated by more than one peer, a\n\n//! timestamp is used. Only a newer key can override an older one. The timestamp should be the\n\n//! timestamp when the key was first created (by the source).\n\n//!\n\n//! # Version History\n\n//! The state records version history for every change that is made to the state.\n\n//! To make sure the version history doesn't get bloated it is being purged on every \n\n//! `version_ttl` milliseconds.\n\n//!\n\n//! See the [struct@Default] state struct for details on the different fields and configurations. \n\n\n\nuse crate::helpers::utils::epoch;\n\nuse crate::state::{self, data_seeder::DataSeeder};\n\nuse crate::state::StateValue;\n\nuse im::hashmap::HashMap;\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json;\n", "file_path": "src/state/default.rs", "rank": 44, "score": 24239.04088179771 }, { "content": "impl std::default::Default for Default {\n\n fn default() -> Self {\n\n Default {\n\n ttl: None,\n\n purge_interval: 60000,\n\n version: Arc::new(RwLock::new(String::default())),\n\n storage: std::default::Default::default(),\n\n data_seeder: None,\n\n tx: None,\n\n is_dirty: Arc::new(RwLock::new(false)),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/state/default.rs", "rank": 45, "score": 24237.971455485807 }, { "content": " &Method::PUT => set_handler(state, req).await.unwrap_or(Responses::internal_error(Some(\"Failed to commit state sent by remote peer.\".into()))),\n\n _ => Responses::not_found(None),\n\n })\n\n}\n\n\n\nimpl Default {\n\n async fn server(&self, state: state::SafeState) -> Result<()> {\n\n let service = make_service_fn(move |_| {\n\n let state = state.clone();\n\n async move {\n\n Ok::<_, Infallible>(service_fn(move |req| {\n\n wrap_json_response(handler)(state.clone(), req)\n\n }))\n\n }\n\n });\n\n\n\n let server = Server::try_bind(&([0, 0, 0, 0], self.port).into())?;\n\n server.serve(service).await?;\n\n\n\n Ok(())\n", "file_path": "src/connection/default.rs", "rank": 46, "score": 24237.616978257585 }, { "content": " }\n\n}\n\n\n\n#[typetag::serde]\n\nimpl agent::Agent for Default {\n\n /// Starts the server while passing the current state to be used by the handlers.\n\n fn start<'a>(&'a self, state: state::SafeState) -> BoxFuture<'a, Result<()>> {\n\n self.server(state).map_err(|e| e.into()).boxed()\n\n }\n\n}\n", "file_path": "src/agent/default.rs", "rank": 47, "score": 24237.491538729726 }, { "content": "//! Default implementation of the Agent trait.\n\n//!\n\n//! This is a simple yet decent implementation of the Agent trait and should answer most use\n\n//! cases. \n\n//!\n\n//! This agent exposes a GET and a PUT endpoints to allow an app to get and set values from and to the\n\n//! state.\n\n//!\n\n//! The format of the keys are dependent on the `State` layer being used. For example, when using\n\n//! the [Default] state, the keys are expected to\n\n//! be a String and the values are expected to be in the format specified in the documentation of\n\n//! the state layer.\n\n//!\n\n//! # GET /`<key>`\n\n//! To get a value, the app can send a `GET` request with a `key` that represents a key in the\n\n//! state. \n\n//!\n\n//! Expects key to be a String. Returns the value as-is from the state.\n\n//!\n\n//! # Example\n", "file_path": "src/agent/default.rs", "rank": 48, "score": 24237.238893343125 }, { "content": " fn from(value: Box<Value>) -> Self {\n\n Box::new(*value)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::state::State;\n\n\n\n #[test]\n\n fn state_versions_should_be_equal() {\n\n let value = HashMap::unit(\"cat\".to_string(), Value {value: \"garfield\".into(), ts: 0, ttl: None}.into());\n\n\n\n let first = Default::default();\n\n let second = Default::default();\n\n\n\n first.set(&value);\n\n second.set(&value.clone());\n\n\n", "file_path": "src/state/default.rs", "rank": 49, "score": 24237.195804374052 }, { "content": " #[serde(skip_serializing, skip_deserializing)]\n\n is_dirty: Arc<RwLock<bool>>,\n\n}\n\n\n\nimpl Default {\n\n /// Merges the two maps while resolving conflicts.\n\n ///\n\n /// A value from the other map will be commited to the state only \n\n /// if it has a newer timestamp.\n\n ///\n\n /// If there was a change to the sate, the version will be recorded \n\n /// in the version history.\n\n fn set(&self, map: &HashMap<String, Box<Value>>) {\n\n let map = map.clone();\n\n let mut is_dirty = false;\n\n\n\n // merge the maps\n\n let mut storage = self.storage.write().unwrap();\n\n for (key, mut right) in map {\n\n if right.is_expired() {\n", "file_path": "src/state/default.rs", "rank": 50, "score": 24237.044700727747 }, { "content": " /// `push_interval`.\n\n push_interval: u64,\n\n\n\n /// The pull interval in milliseconds.\n\n /// Default value is 60 seconds (60000 milliseconds).\n\n ///\n\n /// The connection layer will connect and pull the state from peers every `pull_interval`.\n\n pull_interval: u64,\n\n\n\n /// The number of peers to connect to on each interval and exchange the state with.\n\n /// Default value: 3.\n\n r0: usize,\n\n\n\n /// The connection timeout when connecting and exchanging data with a peer.\n\n /// Default value: 1000ms. \n\n timeout: u64,\n\n\n\n /// The peer provider to use.\n\n ///\n\n /// The connection layer will reach out to the peer provider to get the full list of available\n", "file_path": "src/connection/default.rs", "rank": 51, "score": 24235.781838162166 }, { "content": " }\n\n\n\n /// Receiver thread.\n\n /// Connects to `r0` peers at random and pulls their state into its own state.\n\n ///\n\n /// When pulling the state, the version of the current state is specified in the request \n\n /// path so the other peer would determine whether to respond with its full state or nothing.\n\n ///\n\n /// If the current version matches the one of the peer's version then the peer will respond \n\n /// with 204 (no content). The full state will be returned by the peer if the versions do not\n\n /// match.\n\n async fn receiver(&self, state: state::SafeState) -> Result<()> {\n\n loop {\n\n // sample r0 peers\n\n let peers = self.peer_provider.get();\n\n let peers = peers.into_iter().sample(self.r0);\n\n \n\n let res = stream::iter(peers)\n\n .map(|peer| {\n\n let url = format!(\"http://{}:{}/{}\", peer.ip(), peer.port().unwrap_or(self.target_port.unwrap_or(self.port)), state.version());\n", "file_path": "src/connection/default.rs", "rank": 52, "score": 24235.59317122601 }, { "content": "use std::error::Error as StdError;\n\nuse std::sync::{Arc, RwLock};\n\nuse tokio::time::{interval_at, Duration, Instant};\n\nuse log::{info, warn};\n\nuse std::sync::mpsc;\n\nuse std::hash::{Hash, Hasher};\n\nuse twox_hash::XxHash64;\n\n\n\n/// The maximum number of pending async set operations.\n\n///\n\n/// When the channel for async set operations reaches this maximum, \n\n/// all subsequent set operations will be blocked until pending ops are \n\n/// dealt with.\n\nconst MAX_SET_OPS: usize = 64000;\n\n\n\n/// Version information.\n\n///\n\n/// Holds the timestamp where the version was recorded and \n\n/// the content itself.\n\n#[derive(Debug, Clone)]\n", "file_path": "src/state/default.rs", "rank": 53, "score": 24235.400081555777 }, { "content": " Ok(diff.as_bytes().unwrap())\n\n }).or::<Vec<u8>>(Ok(root.clone())).unwrap();\n\n\n\n Some((state_to_publish, root))\n\n }).await?;\n\n\n\n if res.is_none() {\n\n time::delay_for(time::Duration::from_millis(self.push_interval)).await;\n\n continue;\n\n }\n\n\n\n let (state_to_publish, last) = res.unwrap();\n\n last_published = last;\n\n last_published_version = state.version();\n\n\n\n // sample r0 peers\n\n let peers = self.peer_provider.get();\n\n let peers = peers.into_iter().sample(self.r0);\n\n\n\n // start sending to peers in parallel\n", "file_path": "src/connection/default.rs", "rank": 54, "score": 24235.275361715845 }, { "content": " let state = Default::default();\n\n\n\n state.set(&value1);\n\n state.set(&value2);\n\n\n\n let hm: Result<HashMap<String, Box<Value>>, Box<dyn StdError>> = (&*state.get_root().unwrap()).into();\n\n let mut keys: Vec<String> = hm.unwrap().keys().cloned().collect();\n\n\n\n assert_eq!(vec!(\"cat\", \"dog\").sort(), keys.sort());\n\n }\n\n\n\n #[test]\n\n fn should_return_diff() {\n\n let value1 = HashMap::unit(\"cat\".to_string(), Value {value: \"garfield\".into(), ts: 0, ttl: None}.into());\n\n let value2 = HashMap::unit(\"dog\".to_string(), Value {value: \"snoopy\".into(), ts: 0, ttl: None}.into());\n\n\n\n let state = Default::default();\n\n\n\n state.set(&value1);\n\n state.set(&value2);\n\n\n\n let diff: Result<HashMap<String, Box<Value>>, Box<dyn StdError>> = (&*state.diff(&value2).unwrap()).into();\n\n let keys: Vec<String> = diff.unwrap().keys().cloned().collect();\n\n\n\n assert_eq!(vec!(\"cat\"), keys);\n\n }\n\n}\n", "file_path": "src/state/default.rs", "rank": 55, "score": 24235.142589782492 }, { "content": "//! The default connection layer implementation\n\n//!\n\n//! This is an implementation of the Connection trait.\n\n//!\n\n//! It offers a simple and powerful way of exchanging data with other peers.\n\n//!\n\n//! # Choosing peers\n\n//! The connection chooses peers by using a [peer provider]. A peer provider is an abstration over a\n\n//! vector of peers. A peer provider is responsible for retrieving the full list of peers this\n\n//! connection layer can connect with. See the [peer provider] documentation for more information.\n\n//!\n\n//! If not provided in the configuration, this connection will choose the `k8s` peer provider to\n\n//! get the full list of available peers to connect to. The `k8s` peer provider queries Kubernetes\n\n//! server api for endpoints of other peers by using the selector field in the configuration.\n\n//!\n\n//! The default connection will then randomly select a subset of the peers and will exchange the\n\n//! state with each one of them in parallel. The number of peers sampled from the full list is\n\n//! determined by the `r0` configuration field.\n\n//!\n\n//! # Exchanging data\n", "file_path": "src/connection/default.rs", "rank": 56, "score": 24235.10988768475 }, { "content": " /// Initialize the peer provider, the server and the publisher thread and returns a future that\n\n /// runs them all.\n\n fn start<'a>(&'a self, state: state::SafeState) -> BoxFuture<'a, Result<()>> {\n\n let init: Result<()> = self.peer_provider.init().map_err(|e| e.into());\n\n let init = async { Ok(init) };\n\n\n\n let server = self.server(state.clone());\n\n let publisher = self.publisher(state.clone());\n\n let receiver = self.receiver(state.clone());\n\n\n\n future::try_join4(init, server, publisher, receiver)\n\n .map_ok(|_| ())\n\n .boxed()\n\n }\n\n}\n\n\n\nimpl state::StateValue for Vec<u8> {\n\n fn as_bytes(&self) -> Option<Vec<u8>> {\n\n Some(self.to_owned())\n\n }\n", "file_path": "src/connection/default.rs", "rank": 57, "score": 24234.81821258169 }, { "content": " *self.version.write().unwrap() = hash(&self.storage.read().unwrap()).to_string();\n\n *self.is_dirty.write().unwrap() = false;\n\n }\n\n\n\n self.version.read().unwrap().clone()\n\n }\n\n\n\n /// Sets a new value to the state.\n\n ///\n\n /// The new value is expected to be in a form of a key/value hashmap.\n\n /// The value (hashmap) is then filtered to include only values that are new or newer than the current\n\n /// values in store. This is to resolve conflicts of updating items that were already updated\n\n /// by another peer. See the module documentation for more information on conflict resolution.\n\n fn set(&self, value: &dyn StateValue) -> Result<(), Box<dyn StdError>> {\n\n value.as_bytes().and_then(|value| {\n\n self.tx.as_ref().and_then(|tx| tx.send(value).ok())\n\n });\n\n\n\n Ok(())\n\n }\n", "file_path": "src/state/default.rs", "rank": 58, "score": 24234.52499501112 }, { "content": "//! The connection uses HTTP to push and pull (PUT and GET) the state to and from other peers.\n\n//!\n\n//! ## Pushing\n\n//! Only the changes from the last publish time will be pushed to other peers.\n\n//!\n\n//! ## Pulling\n\n//! When pulling, the connection layer specifies its own state version and if it matches the one \n\n//! that the other peer has then nothing will be exchanged. If the versions do not match then \n\n//! the other peer will return its own full state.\n\n//!\n\n//! The connection layer does not assume anything about the content of the data being exchanged.\n\n//! The data will be passed as-in to other peers.\n\n//!\n\n//! The interval in which the data will be exchanged is set in the `push_interval` and `pull_interval` configuration flags.\n\n//!\n\n//! See more about the default implementation and the different options it provides in the [struct documentation].\n\n//!\n\n//! [peer provider]: connection::peer_provider\n\n//! [struct documentation]: struct@Default\n\n\n", "file_path": "src/connection/default.rs", "rank": 59, "score": 24234.414567494634 }, { "content": " if let Ok(Ok(result)) = result {\n\n if let Err(e) = state.set(result as &dyn state::StateValue) {\n\n warn!(\"Failed to set peer response to state; {}\", e);\n\n }\n\n }\n\n });\n\n\n\n time::delay_for(time::Duration::from_millis(self.pull_interval)).await;\n\n }\n\n }\n\n\n\n /// Publisher thread.\n\n /// Publishes the state at an `interval` time.\n\n ///\n\n /// This thread will loop forever, waiting for `interval` time to pass.\n\n /// It will then reach out to the peer provider to get the full list of available peers and\n\n /// will randomly pick `r0` peers to exchange the state with.\n\n ///\n\n /// It connects to other peers in parallel and publishes the changes since last publish.\n\n async fn publisher(&self, state: state::SafeState) -> Result<()> {\n", "file_path": "src/connection/default.rs", "rank": 60, "score": 24234.294047115207 }, { "content": " }\n\n\n\n /// Purges expired keys.\n\n fn purge(&self) {\n\n self.storage.write().unwrap().retain(|_, v| !v.is_expired());\n\n }\n\n\n\n /// Seeds the state with the data from the DataSeeder.\n\n fn seed(&self, data_seeder: Arc<RwLock<Box<dyn DataSeeder>>>) -> Result<(), Box<dyn StdError>> {\n\n data_seeder.read().unwrap().load().and_then(|data| {\n\n let data: Result<HashMap<String, Box<Value>>, Box<dyn StdError>> = (&*data).into();\n\n data.and_then(|data| {\n\n self.set(&data);\n\n Ok(())\n\n })\n\n })\n\n }\n\n}\n\n\n\n/// Default values for the state struct.\n", "file_path": "src/state/default.rs", "rank": 61, "score": 24234.22471899168 }, { "content": "}\n\n\n\nimpl StateValue for Value {\n\n fn as_bytes(&self) -> Option<Vec<u8>> {\n\n serde_json::to_vec(self).ok()\n\n }\n\n}\n\n\n\nimpl StateValue for HashMap<String, Box<Value>> {\n\n fn as_bytes(&self) -> Option<Vec<u8>> {\n\n serde_json::to_vec(self).ok()\n\n }\n\n}\n\n\n\nimpl StateValue for (String, Value) {\n\n fn as_bytes(&self) -> Option<Vec<u8>> {\n\n serde_json::to_vec(self).ok()\n\n }\n\n}\n\n\n", "file_path": "src/state/default.rs", "rank": 62, "score": 24234.05066415379 }, { "content": "}\n\n\n\nimpl state::StateValue for hyper::body::Bytes {\n\n fn as_bytes(&self) -> Option<Vec<u8>> {\n\n Some(self.to_vec())\n\n }\n\n}\n\n\n\nimpl From<Box<dyn state::StateValue>> for hyper::Body {\n\n fn from(value: Box<dyn state::StateValue>) -> Self {\n\n value.as_bytes().unwrap_or(\"{}\".into()).into()\n\n }\n\n}\n\n\n\nimpl From<Box<dyn state::StateValue>> for reqwest::Body {\n\n fn from(value: Box<dyn state::StateValue>) -> Self {\n\n value.as_bytes().unwrap_or(\"{}\".into()).into()\n\n }\n\n}\n", "file_path": "src/connection/default.rs", "rank": 63, "score": 24233.947104106515 }, { "content": "//! The default implementation of the State layer.\n\n//!\n\n//! This implementation is of a Key/Value state where a value can be anything that can be serialized\n\n//! into a JSON representation.\n\n//!\n\n//! The default state has the following behavior:\n\n//! - It's a Key/Value store\n\n//! - It supports TTL\n\n//! - It resolves conflicts using a timestamp\n\n//! - It records version history\n\n//!\n\n//! # Keys\n\n//! This state expects keys to be of a string type.\n\n//!\n\n//! # Values\n\n//! The values are expected to be a JSON of the following form:\n\n//! ```json\n\n//! {\"value\": <anything JSON>, \"ttl\": <optional ttl in milliseconds>, \"ts\": <optional, manual\n\n//! setting of the timestamp of the value>}\n\n//! ```\n", "file_path": "src/state/default.rs", "rank": 64, "score": 24233.34930823829 }, { "content": " assert_eq!(first.version(), second.version());\n\n }\n\n\n\n #[test]\n\n fn state_versions_should_be_different() {\n\n let value1 = HashMap::unit(\"cat\".to_string(), Value {value: \"garfield\".into(), ts: 0, ttl: None}.into());\n\n let value2 = HashMap::unit(\"cat\".to_string(), Value {value: \"garfield\".into(), ts: 1, ttl: None}.into());\n\n\n\n let first = Default::default();\n\n let second = Default::default();\n\n\n\n first.set(&value1);\n\n second.set(&value2);\n\n\n\n assert_ne!(first.version(), second.version());\n\n }\n\n\n\n #[test]\n\n fn should_purge_items() {\n\n let value = HashMap::unit(\"dog\".to_string(), Value {value: \"snoopy\".into(), ts: 0, ttl: None}.into());\n", "file_path": "src/state/default.rs", "rank": 65, "score": 24233.188375088524 }, { "content": "\n\n let state = Default::default();\n\n\n\n // Force insersion of expired vlaue\n\n state.storage.write().unwrap().insert(\"cat\".to_string(), Value {value: \"garfield\".into(), ts: 0, ttl: Some(1)}.into());\n\n state.set(&value);\n\n\n\n assert_eq!(state.storage.read().unwrap().len(), 2);\n\n state.purge();\n\n assert_eq!(state.storage.read().unwrap().len(), 1);\n\n }\n\n\n\n #[test]\n\n fn should_not_return_expired_values() {\n\n let value = HashMap::unit(\"dog\".to_string(), Value {value: \"snoopy\".into(), ts: 0, ttl: None}.into());\n\n\n\n let state = Default::default();\n\n\n\n // Force insersion of expired vlaue\n\n state.storage.write().unwrap().insert(\"cat\".to_string(), Value {value: \"garfield\".into(), ts: 0, ttl: Some(1)}.into());\n", "file_path": "src/state/default.rs", "rank": 66, "score": 24233.040168655323 }, { "content": "//!\n\n//! Assuming usage of the [Default] state, a value might look like this:\n\n//!\n\n//! ```\n\n//! GET /cat\n\n//!\n\n//! {\"ts\":1601241450390,\"ttl\":null,\"value\":\"garfield\"}\n\n//! ```\n\n//!\n\n//! As you can see, the value holds more than the value itself. Included are the ttl (can be null)\n\n//! which is an absolute value of when this key will expire, and the timestamp (ts) that this key\n\n//! was created.\n\n//!\n\n//! A different form of the value might be returned, depending on which state layer is being used.\n\n//! In any case, this agent implementation does not assume anything about the format of the values\n\n//! returned by the state.\n\n//! # PUT /\n\n//! To set a value to the state, the app can send a `PUT` request with a body that conforms to the\n\n//! state expected value.\n\n//!\n", "file_path": "src/agent/default.rs", "rank": 67, "score": 24232.93420644582 }, { "content": "#[typetag::serde]\n\nimpl state::State for Default {\n\n /// Initializes the state.\n\n ///\n\n /// Seeds the state with the specified DataSeeder (if one was specified).\n\n ///\n\n /// Spawns an async set thread which will perform async commits to the state.\n\n ///\n\n /// Spawns a new thread to purge expired values and versions at a certain interval and returns a safe state\n\n /// to be shared with the connection and agent layers.\n\n fn init(&self) -> state::SafeState {\n\n let mut this = self.clone();\n\n\n\n // if we have a data seeder then use it to seed the data\n\n this.data_seeder.clone().and_then(|data_seeder| {\n\n info!(\"Seeding data...\");\n\n if let Err(e) = this.seed(data_seeder) {\n\n warn!(\"Failed to seed data; ({})\", e);\n\n }\n\n\n", "file_path": "src/state/default.rs", "rank": 68, "score": 24232.871368751552 }, { "content": "//!\n\n//! `value`\n\n//!\n\n//! The only required field is the `value` which can be anything JSON. Even a JSON object.\n\n//!\n\n//! `ttl`\n\n//!\n\n//! The `ttl` field is optional. If not used then the default ttl from the state configuration \n\n//! will be used, if one specified.\n\n//!\n\n//! `ts`\n\n//!\n\n//! The `ts` field is optional and can be used to override the timestamp that is automatically \n\n//! set for every new value.\n\n//!\n\n//! # TTL\n\n//! The state returns `None` for expired keys. Expired keys are filtered out when the\n\n//! storage is iterated and are purged by a thread running in the background.\n\n//!\n\n//! The purger thread uses the interval settings from the configuration of the state.\n", "file_path": "src/state/default.rs", "rank": 69, "score": 24232.851372426565 }, { "content": " state.set(&value);\n\n\n\n assert!(state.get(&\"dog\".to_string() as &dyn StateValue).is_some());\n\n assert!(state.get(&\"cat\".to_string() as &dyn StateValue).is_none());\n\n }\n\n\n\n #[test]\n\n fn should_be_marked_as_dirty() {\n\n let value = HashMap::unit(\"dog\".to_string(), Value {value: \"snoopy\".into(), ts: 0, ttl: None}.into());\n\n let state = Default::default();\n\n\n\n assert_eq!(*state.is_dirty.read().unwrap(), false);\n\n state.set(&value);\n\n assert_eq!(*state.is_dirty.read().unwrap(), true);\n\n }\n\n\n\n #[test]\n\n fn should_return_the_whole_state() {\n\n let value1 = HashMap::unit(\"cat\".to_string(), Value {value: \"garfield\".into(), ts: 0, ttl: None}.into());\n\n let value2 = HashMap::unit(\"dog\".to_string(), Value {value: \"snoopy\".into(), ts: 0, ttl: None}.into());\n", "file_path": "src/state/default.rs", "rank": 70, "score": 24232.627578234446 }, { "content": "\n\n /// Returns the difference between the current state and `other`.\n\n ///\n\n /// If a key is present in both the current state and `other`, it will check if \n\n /// the timestamps are equal and if not then it'll include either the current value or \n\n /// the one from `other`, based on who's value has the most recent timestamp.\n\n fn diff(&self, other: &dyn StateValue) -> Result<Box<dyn StateValue>, Box<dyn StdError>> {\n\n let other: Result<HashMap<String, Box<Value>>, Box<dyn StdError>> = other.into();\n\n let other = other?;\n\n\n\n let d = self.storage.read().unwrap().clone().difference_with(other, |left, right| {\n\n if left.ts == right.ts {\n\n None\n\n } else {\n\n Some(if left.ts < right.ts { left } else { right })\n\n }\n\n });\n\n\n\n Ok(Box::new(d))\n\n }\n\n}\n\n\n", "file_path": "src/state/default.rs", "rank": 71, "score": 24232.124784785723 }, { "content": " Some(())\n\n });\n\n\n\n // start the async_set consumer thread\n\n let (tx, rx) = mpsc::sync_channel(MAX_SET_OPS);\n\n this.tx = Some(tx);\n\n\n\n let this = Arc::new(this);\n\n let t = this.clone();\n\n tokio::task::spawn_blocking(move || {async_set(t, rx)});\n\n \n\n // start the purger thread\n\n tokio::spawn(purge(this.clone()));\n\n\n\n this\n\n }\n\n\n\n /// Returns the current state version.\n\n fn version(&self) -> String {\n\n if self.is_dirty.read().unwrap().clone() {\n", "file_path": "src/state/default.rs", "rank": 72, "score": 24232.11450129717 }, { "content": "\n\n /// Returns the value associated with the specified key.\n\n ///\n\n /// `key` is expected to resolve to a string.\n\n fn get(&self, key: &dyn StateValue) -> Option<Box<dyn StateValue>> {\n\n let key: String = String::from_utf8(key.as_bytes().unwrap_or(Vec::new())).unwrap();\n\n\n\n let storage = self.storage.read().unwrap().clone();\n\n storage\n\n .get(&key)\n\n .cloned()\n\n .filter(|v| !v.is_expired())\n\n .map(|v| v.into())\n\n }\n\n\n\n /// Returns the whole state (root).\n\n fn get_root(&self) -> Option<Box<dyn StateValue>> {\n\n let value: HashMap<String, Box<Value>> = self.storage.read().unwrap().clone();\n\n Some(value.into())\n\n }\n", "file_path": "src/state/default.rs", "rank": 73, "score": 24231.12699886438 }, { "content": " let mut last_published: Vec<u8> = Vec::<u8>::default();\n\n let mut last_published_version = String::default();\n\n\n\n loop {\n\n if last_published_version == state.version() {\n\n time::delay_for(time::Duration::from_millis(self.push_interval)).await;\n\n continue;\n\n }\n\n\n\n let last = last_published.clone();\n\n let state_clone = state.clone();\n\n let res = tokio::task::spawn_blocking(move || {\n\n // get the recent state\n\n let root = state_clone.get_root().unwrap_or(\"\".into()).as_bytes().unwrap();\n\n\n\n if root == last {\n\n return None;\n\n }\n\n\n\n let state_to_publish = state_clone.diff(&last).and_then(|diff| {\n", "file_path": "src/connection/default.rs", "rank": 74, "score": 24229.672560954972 }, { "content": " continue;\n\n }\n\n\n\n if self.ttl.is_some() && right.ttl.is_none() {\n\n right.ttl = self.ttl;\n\n }\n\n\n\n storage.entry(key)\n\n .and_modify(|v| {\n\n if v.ts < right.ts {\n\n *v = right.clone().into();\n\n is_dirty = true;\n\n }})\n\n .or_insert({\n\n is_dirty = true;\n\n right.into()\n\n }); \n\n }\n\n\n\n *self.is_dirty.write().unwrap() = is_dirty;\n", "file_path": "src/state/default.rs", "rank": 75, "score": 24227.975822247543 }, { "content": "\n\nconst DEFAULT_CONFIG_FILE: &str = \"config.yml\";\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct Config {\n\n pub version: String,\n\n pub spec: Spec,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug)]\n\npub struct Spec {\n\n pub agent: Box<dyn agent::Agent>,\n\n pub state: Box<dyn state::State>,\n\n pub connection: Box<dyn connection::Connection>,\n\n}\n\n\n\n/// Returns the configuration after dynanmically loading all layers.\n\n///\n\n/// Takes command line arguments, loads the YAML configuration file and initializes all layer\n\n/// objects.\n", "file_path": "src/config.rs", "rank": 76, "score": 15.153237686407534 }, { "content": "//! - 127.0.0.1\n\n//! ```\n\n//!\n\n//! The configuration file is devided into three parts, one for each layer. The configuration\n\n//! for each layer is specific to that layer based on its kind. In this example you can see that\n\n//! the configuration will load the Default agent, Default state and Default connection and within\n\n//! the connection layer it will load the static peer provider.\n\n//!\n\n//! See the documentation for each layer implementation to find out what available configuration\n\n//! settings there are.\n\n\n\nuse crate::agent;\n\nuse crate::connection;\n\nuse crate::state;\n\n\n\nuse clap::ArgMatches;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::error::Error;\n\nuse std::fmt;\n\nuse std::fs;\n", "file_path": "src/config.rs", "rank": 77, "score": 11.103775191159027 }, { "content": "pub mod data_seeder;\n\n\n\nuse std::error::Error as StdError;\n\nuse std::sync::Arc;\n\n\n\n/// An atomic reference to a state.\n\n///\n\n/// This type makes it so that we can share the state across threads.\n\npub type SafeState = Arc<dyn State>;\n\n\n\n/// A trait used as a protocol between different components that use the state.\n\n///\n\n/// Both the Connection and Agent layers use the state to get and set values. They do not know\n\n/// anything about the implementation of one another. This trait allows them to use the same state\n\n/// without assuming anything about the structure of a state value.\n\n///\n\n/// The trait offers functions to serialize to different formats. The default connection layer for\n\n/// example, serializes a state value to json to be exchanged with other peers.\n\n///\n\n/// The functions in this trait have a default implementation that always return `None` so that\n", "file_path": "src/state.rs", "rank": 78, "score": 10.716307656881728 }, { "content": "use std::error::Error as StdError;\n\nuse crate::state::StateValue;\n\nuse crate::state::data_seeder;\n\nuse std::fs;\n\nuse serde::{Deserialize, Serialize};\n\n\n", "file_path": "src/state/data_seeder/file.rs", "rank": 79, "score": 8.628437449475145 }, { "content": " fn as_bytes(&self) -> Option<Vec<u8>> {\n\n Some(Vec::from(*self))\n\n }\n\n}\n\n\n\nimpl StateValue for String {\n\n fn as_bytes(&self) -> Option<Vec<u8>> {\n\n Some(Vec::from(self.as_str()))\n\n }\n\n}\n\n\n\nimpl<T> From<T> for Box<dyn StateValue> \n\nwhere T: StateValue + 'static\n\n{\n\n fn from(t: T) -> Self {\n\n Box::new(t)\n\n }\n\n}\n", "file_path": "src/state.rs", "rank": 80, "score": 7.811274133415692 }, { "content": "//! The Connection layer.\n\n//!\n\n//! The connection layer is responsible for exchaging the state with other peers.\n\n//!\n\n//! There can be many implementations for the connection layer. One could choose the protocol, the\n\n//! transport layer, how to choose peers to exchange the state with, the rate in which the state is\n\n//! being exchanged, compression, etc...\n\n//!\n\n//! See the [default connection] layer implementation for some ideas.\n\n//!\n\n//! [default connection]: default\n\n\n\npub mod default;\n\npub mod peer_provider;\n\n\n\nuse crate::state;\n\nuse futures::future::BoxFuture;\n\nuse std::error::Error as StdError;\n\n\n\n/// The Connection Trait.\n\n///\n\n/// The only required method is the `start` method where the state is passed to the implementation\n\n/// to should be used to set and get values to and from the state.\n\n///\n\n/// See the default connection implementation for an idea on how to implement a connection layer.\n\n#[typetag::serde(tag = \"kind\")]\n", "file_path": "src/connection.rs", "rank": 81, "score": 7.573683729173931 }, { "content": "use http::Response;\n\nuse hyper::{Body, StatusCode};\n\n\n\npub struct Responses {}\n\n\n\nimpl Responses {\n\n pub fn response(status: StatusCode, body: Body) -> Response<Body> {\n\n Response::builder().status(status).body(body).unwrap()\n\n }\n\n\n\n pub fn ok(body: Body) -> Response<Body> {\n\n Responses::response(StatusCode::OK, body)\n\n }\n\n\n\n pub fn not_found(body: Option<Body>) -> Response<Body> {\n\n Responses::response(StatusCode::NOT_FOUND, body.unwrap_or(\"not found\".into()))\n\n }\n\n\n\n pub fn bad_request(body: Option<Body>) -> Response<Body> {\n\n Responses::response(\n", "file_path": "src/helpers/http/responses.rs", "rank": 82, "score": 7.378127068633493 }, { "content": "### 1. Configmap\n\n```yaml\n\napiVersion: v1\n\nkind: ConfigMap\n\nmetadata:\n\n name: c19-getting-started-config\n\nimmutable: true\n\ndata:\n\n config.yaml: |\n\n version: 0.1\n\n spec:\n\n agent:\n\n kind: Default\n\n port: 3097\n\n state:\n\n kind: Default\n\n ttl: null\n\n purge_interval: 60000\n\n connection:\n\n kind: Default\n\n port: 4097\n\n push_interval: 1000\n\n pull_interval: 60000\n\n r0: 3\n\n timeout: 1000\n\n peer_provider:\n\n kind: K8s\n\n selector:\n\n c19: getting-started\n\n namespace: default\n\n```\n\n\n\nSave this as `configmap.yaml` file and apply it to the cluster like so:\n\n```shell\n\nkubectl apply -f configmap.yaml\n\n```\n\n\n\n[Configuring the Agent]: getting-started-configuration.md\n\n\n\n### 2. Deployment File\n\n```yaml\n\napiVersion: apps/v1\n\nkind: Deployment\n\nmetadata:\n\n name: nginx-deployment\n\n labels:\n\n app: nginx\n\nspec:\n\n replicas: 3\n\n selector:\n\n matchLabels:\n\n app: nginx\n\n template:\n\n metadata:\n\n labels:\n\n app: nginx\n\n c19: getting-started\n\n spec:\n\n containers: \n\n - name: nginx \n\n image: nginx:1.14.2\n\n ports:\n\n - containerPort: 80\n\n - name: c19\n\n image: c19p/c19:0.1.0\n\n args: [\"/usr/local/bin/c19\", \"--config\", \"/etc/c19/config.yaml\"]\n\n ports:\n\n - name: agent\n\n containerPort: 3097\n\n protocol: TCP\n\n - name: connection\n\n containerPort: 4097\n\n protocol: TCP\n\n volumeMounts:\n\n - name: c19\n\n mountPath: /etc/c19\n\n volumes:\n\n - name: c19\n\n configMap:\n\n name: c19-getting-started-config\n\n```\n\n\n\nSave this as `deployment.yaml` file and apply it to the cluster like so:\n\n```shell\n\nkubectl apply -f deployment.yaml\n\n```\n\n\n\n**That's it! Your C19 powered Nginx is now deployed to Kubernetes and is ready to be used.**\n\n\n", "file_path": "README.md", "rank": 83, "score": 6.57842854182975 }, { "content": "//!\n\n//! Accompaning this documentation is the book about the high level design and some ideas on how to\n\n//! extend the protocol. Read more about it here. [FIXME: link to the developer guide]\n\n//!\n\n//! # Kubernetes\n\n//! While the C19 protocol can be used anywhere, it was design to be Kubernetes first. This means\n\n//! that you will find different deployment strategies, peer providers and an all-in-all mindset of\n\n//! Kubernetes. One of the goals of the project is to \"Just work\" and to allow a user of the\n\n//! project a fast and easy-to-reason-about deployment to a Kubernetes cluster.\n\n//!\n\nmod agent;\n\nmod connection;\n\nmod helpers;\n\nmod state;\n\n\n\npub mod config;\n\n\n\nuse futures::future::Future;\n\nuse futures::stream::FuturesUnordered;\n\nuse std::clone::Clone;\n", "file_path": "src/lib.rs", "rank": 84, "score": 6.057192699058332 }, { "content": "//! A collection of middlewares to be used with Hyper HTTP servers.\n\n//!\n\npub mod json;\n", "file_path": "src/helpers/middlewares.rs", "rank": 85, "score": 5.7752563996010196 }, { "content": "pub mod file;\n\n\n\nuse std::error::Error as StdError;\n\nuse crate::state::StateValue;\n\n\n\n#[typetag::serde(tag = \"kind\")]\n", "file_path": "src/state/data_seeder.rs", "rank": 86, "score": 5.755159597270545 }, { "content": "//! port: 3097\n\n//! state:\n\n//! kind: Default\n\n//! ttl: null\n\n//! purge_interval: 10000\n\n//! data_seeder:\n\n//! kind: File\n\n//! filename: data.json\n\n//! connection:\n\n//! kind: Default\n\n//! push_interval: 1000\n\n//! pull_interval: 60000\n\n//! force_publish: 0.1\n\n//! port: 4097\n\n//! target_port: 4098\n\n//! r0: 6\n\n//! timeout: 5000\n\n//! peer_provider:\n\n//! kind: Static\n\n//! peers:\n", "file_path": "src/config.rs", "rank": 87, "score": 5.747202454387337 }, { "content": "//! A collection of helpful utility functions.\n\n\n\nuse rand::rngs::SmallRng;\n\nuse rand::seq::IteratorRandom;\n\nuse rand::SeedableRng;\n\nuse std::convert::TryFrom;\n\nuse std::time::{SystemTime, UNIX_EPOCH};\n\n\n\n/// Returns the current time in seconds since epoch.\n", "file_path": "src/helpers/utils.rs", "rank": 88, "score": 5.658496999247241 }, { "content": "# The C19 Protocol\n\n\n\nThe C19 protocol is a variant of the [Gossip protocol]. It allows a group of services to agree on a service-wide state. It is a replicated cache \n\nand can be embedded right into your Kubernetes pods.\n\n\n\nThe C19 protocol can help you share a state between your Kubernetes pods and have that state available to your application locally, without worrying \n\nabout fetching the data.\n\n\n\n![Sharing state use case](resources/sharing-state.png)\n\n\n\nC19 attaches to your pods and decouples the process of fetching the data from using it. Using a label selector you can easily create a group of pods \n\nyou wish to share a state.\n\n\n\nC19 is a simple, powerful and extensible system and can reduce the complexities by taking care of fetching the data and making it available locally to your services. It can \n\nrun within a Kubernetes cluster or without one.\n\n\n\n## The Books\n\nThe best and most extensive source of information is the [User Guide]. Please read it!\n\nIt has anything from a step by step guide for running the C19 protocol to a drill down on architecture.\n\n\n\n[The User Guide]\n\n\n\nAnd we have a second book ready if you wish to contribute to the C19 project.\n\n\n\n[The Developer Guide]\n\n\n\n## A Very Quick Start\n\nThe following Kubernetes configuration files will deploy a cluster of Nginx pods with an attached c19 agent for each one. You can then explore the way data is shared across \n\nyour pods and how it becomes available locally to the Nginx service. Nginx in this example simulates your application.\n\n\n\nTo attach a C19 agent to your Nginx deployment, you will need two files:\n\n1. A configmap that will hold your C19 configuration. You can read about the C19 configuration [here].\n\n2. A Kubernetes deployment file.\n\n\n", "file_path": "README.md", "rank": 89, "score": 5.653588360916864 }, { "content": "//! The State layer.\n\n//!\n\n//! The state is responsible for holding the data and for exposing a way to get and set values to\n\n//! and from it.\n\n//!\n\n//! The [State] trait does its best to assume nothing about the data an implementor might hold. It\n\n//! can be a Key/Value store like the [default State] implementation, a blob of binary data or anything else an implementor wishes for.\n\n//!\n\n//! It does so by using a [StateValue] trait with default implementations of serializing the data to\n\n//! different formats. For example, the default state implementation implements a serialization of\n\n//! StateValue to JSON.\n\n//!\n\n//! The state layer can choose to implement other mechanism related to the state, like TTL,\n\n//! data compression, etc.\n\n//!\n\n//! [State]: State\n\n//! [default State]: default\n\n//! [StateValue]: StateValue\n\n\n\npub mod default;\n", "file_path": "src/state.rs", "rank": 90, "score": 5.232982404264018 }, { "content": "//!\n\n//! C19 should answer most use-cases as mostly systems can tolerate data that is near-realtime or stale.\n\n//!\n\n//! #### 2. When your data is very large and changes very often.\n\n//!\n\n//! There's no limit set in the code. It's up to you to decide the limit of your data. \n\n//! The data is held in memory and being exchanged over the network between peers. You\n\n//! should consider those two parameters when you decide on your data limits and the different\n\n//! layers you choose to use.\n\n//!\n\n//! # Who This Documentation Is For?\n\n//! C19 has been designed to be easily extensible. One could implement different strategies for\n\n//! exhanging state between peers, for holding data in memory, for applying different algorithms\n\n//! for state management, etc.\n\n//!\n\n//! This documentation is about that. It's a walkthrough of the code so that you can find your way\n\n//! around and extend the protocol as needed.\n\n//!\n\n//! If you'd like to learn more about using C19 as it is, please read the user-guide book. [FIXME: link to\n\n//! user-guide book]\n", "file_path": "src/lib.rs", "rank": 91, "score": 4.612333557267004 }, { "content": "use std::sync::Arc;\n\nuse std::error::Error as StdError;\n\n\n\n/// Initializes the state and runs the connection and agent layers.\n\n///\n\n/// The state is given a chance to be initialized by running state::init\n\n/// on the instance. The connection and agent layers are then started while\n\n/// given the initialized state.\n\n///\n\n/// The instances for the state, connection and agent are the ones\n\n/// initialized by the configuration.\n\n///\n\n/// The connection and agents layers are expected to return a future\n\n/// which is then being waited on until completion (mostly indfefinately)./\n", "file_path": "src/lib.rs", "rank": 92, "score": 4.541070862409646 }, { "content": " /// pairs where the key is a String and the value conforms to a serde_json::Value value.\n\n fn set(&self, value: &dyn StateValue) -> Result<(), Box<dyn StdError>>;\n\n\n\n /// Gets the value associated with the specified key.\n\n ///\n\n /// To allow maximum flexibility, the key itself is a StateValue, which in effect means it can\n\n /// be anything desired by the implementor.\n\n fn get(&self, key: &dyn StateValue) -> Option<Box<dyn StateValue>>;\n\n\n\n /// Returns the value associated with the specified key or the default if the key was not found \n\n /// in the state.\n\n fn get_or(&self, key: &dyn StateValue, default: Box<dyn StateValue>) -> Box<dyn StateValue> {\n\n self.get(key).unwrap_or(default)\n\n }\n\n\n\n /// Returns the difference between this and the `other` state.\n\n fn diff(&self, other: &dyn StateValue) -> Result<Box<dyn StateValue>, Box<dyn StdError>>;\n\n\n\n /// Returns the whole state as a StateValue.\n\n ///\n\n /// This is helpful when the connection layer wishes to publish the whole state to its peers.\n\n fn get_root(&self) -> Option<Box<dyn StateValue>>;\n\n}\n\n\n", "file_path": "src/state.rs", "rank": 93, "score": 4.486810143838149 }, { "content": "//! The Agent layer.\n\n//!\n\n//! The Agent is responsible for communicating with the application layer.\n\n//! It exposes endpoints to allow an application to get and set data from and to the state.\n\n//!\n\n//! The Agent trait assumes nothing about the state and the agent implementation. Agent implementors\n\n//! should consider how they wish to expose the agent to the app. Which protocol to use (HTTP for\n\n//! example) and what endpoints to expose (get and set for example).\n\n//!\n\n//! The [default agent] implementation exposes an HTTP GET and PUT endpoints to allow an app to get\n\n//! and set key/value pairs to the state. See the [default agent] implementation documentation below for more details.\n\n//!\n\n//! [default agent]: crate::agent::default\n\n\n\npub mod default;\n\n\n\nuse crate::state;\n\nuse futures::future::BoxFuture;\n\nuse std::error::Error as StdError;\n\n\n\n/// The Agent trait.\n\n///\n\n/// The only required method is start.\n\n/// The start method accepts a reference to the current state. Agent implementors should hold on\n\n/// that reference for their use. The run method will make sure to\n\n/// start the agent with an initialized state.\n\n#[typetag::serde(tag = \"kind\")]\n", "file_path": "src/agent.rs", "rank": 94, "score": 3.986200081836608 }, { "content": "//! A collection of helpful functions.\n\n\n\npub mod http;\n\npub mod middlewares;\n\npub mod utils;\n", "file_path": "src/helpers.rs", "rank": 95, "score": 3.8036786615421425 }, { "content": "layer implementations can be used and configured and this affects the consistency, durability and performance of the system.\n\n\n\nAt the time of writing there are only the default implementations for each layer. The `Default` layers guarantee the following:\n\n- `Consistency` - The data spreads across the system in a [Gossip] like way. Some call is convergence consistency.\n\n- `Durability` - The data is shared (duplicated) across the c19 agents so you have full redundancy.\n\n- `Performance` - This one is hard to tell since it depends on how you configure your `Connection` layer. Your configuration and size \n\nof data affects the rate in which the data is shared across the system. To get the most reliable metrics you will have to benchmark this yourself.\n\n- `Conflict Resolution` - The `Default` state layer implementation resolves conflicts by considering the time (in milliseconds resolution) for when a key \n\nwas created. If two c19 agents try to update a third one with different versions of a key, the one that is newer will be chosen.\n\n\n\nYou are encouraged to read more about the how the default layer implementations work in the [Architecture] section.\n\n\n\n[Appendix I] lists the available layer implementations you can choose from. You are welcome to visit this page from time to time as we expect it \n\nto grow and offer different implementations to answer a wide range of use cases.\n\n\n", "file_path": "README.md", "rank": 96, "score": 3.797090569350904 }, { "content": "//! The C19 configuration.\n\n//!\n\n//! The configuration automatically initializes and holds the instances of all layers (Agent, State\n\n//! and Connection).\n\n//!\n\n//! A YAML formatted configuration file is automatically loaded when the c19 process\n\n//! starts. The path to the configuration file is specified using the --config flag when running\n\n//! the process.\n\n//!\n\n//! # Examples:\n\n//!\n\n//! The configuration it based on the different layers chosen. Evey layer implementation has \n\n//! its own configuration.\n\n//!\n\n//! ## Here's a small example for a configuration file:\n\n//! ```\n\n//! version: 0.1\n\n//! spec:\n\n//! agent:\n\n//! kind: Default\n", "file_path": "src/config.rs", "rank": 97, "score": 3.691877035957305 }, { "content": "use crate::state;\n\nuse futures::future::{BoxFuture, Future, FutureExt, TryFutureExt};\n\nuse http::{Request, Response};\n\nuse hyper::{http::header::HeaderValue, Body};\n\nuse std::error::Error as StdError;\n\n\n", "file_path": "src/helpers/middlewares/json.rs", "rank": 98, "score": 3.4288083557581515 }, { "content": "use c19::config;\n\nuse clap::App;\n\nuse log::{error, info};\n\nuse std::process;\n\n\n\nuse c19;\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n env_logger::init();\n\n\n\n let args = App::new(\"The C19 Protocol\")\n\n .version(\"0.1.0\")\n\n .author(\"Chen Fisher\")\n\n .about(\"A variant of the gossip protocol. Allows a group of servies to agree on a service-wide state\")\n\n .arg(\"-c, --config=[FILE] 'Set the path to a c19 config file'\")\n\n .get_matches();\n\n\n\n // load config\n\n let config = config::new(&args).unwrap_or_else(|err| {\n", "file_path": "src/main.rs", "rank": 99, "score": 3.1223139667314133 } ]
Rust
Usermode/fileviewer/src/main.rs
ids1024/rust_os
236dfdca5660372ea6d4c3f1eea463dbd2c2945d
extern crate wtk; #[macro_use(kernel_log)] extern crate syscalls; mod hexview; mod textview; struct Viewer<'a> { dims: ::std::cell::RefCell<(u32, u32)>, file: ::std::cell::RefCell<&'a mut ::syscalls::vfs::File>, mode: ViewerMode, vscroll: ::wtk::ScrollbarV, hscroll: ::wtk::ScrollbarH, hex: ::hexview::Widget, text: ::textview::Widget, toggle_button: ::wtk::ButtonBcb<'static, ::wtk::Colour>, } enum ViewerMode { Hex, Text, } fn main() { ::wtk::initialise(); let mut file: ::syscalls::vfs::File = match ::syscalls::threads::S_THIS_PROCESS.receive_object("file") { Ok(v) => v, Err(e) => { kernel_log!("TOOD: Handle open error in fileviewer - {:?}", e); return ; }, }; for a in ::std::env::args_os() { kernel_log!("arg = {:?}", a); } let mut args = ::std::env::args_os().skip(0); let path = args.next(); let path: Option<&::std::ffi::OsStr> = path.as_ref().map(|x| x.as_ref()); let path = path.unwrap_or( ::std::ffi::OsStr::new(b"-") ); let use_hex = true; let root = Viewer::new(&mut file, use_hex); let mut window = ::wtk::Window::new_def("File viewer", &root).unwrap(); window.set_title( format!("File Viewer - {:?}", path) ); window.focus(&root); window.set_dims(root.min_width(), 150); window.set_pos(150, 100); window.show(); window.idle_loop(); } impl<'a> Viewer<'a> { fn new(file: &'a mut ::syscalls::vfs::File, init_use_hex: bool) -> Viewer<'a> { let rv = Viewer { dims: ::std::cell::RefCell::new( (0,0) ), file: ::std::cell::RefCell::new(file), mode: if init_use_hex { ViewerMode::Hex } else { ViewerMode::Text }, hex: ::hexview::Widget::new(), text: ::textview::Widget::new(), vscroll: ::wtk::ScrollbarV::new(), hscroll: ::wtk::ScrollbarH::new(), toggle_button: ::wtk::Button::new_boxfn( ::wtk::Colour::theme_body_bg(), |_,_| {} ), }; if init_use_hex { let mut file = rv.file.borrow_mut(); file.set_cursor(0); let _ = rv.hex.populate(&mut *file); } else { /* let mut file = rv.file.borrow_mut(); let mut n_lines = 0; let mut max_len = 0; for line in file.split(b'\n') { max_len = ::std::cmp::max(max_len, line.len()); n_lines += 1; } */ } rv } pub fn min_width(&self) -> u32 { SCROLL_SIZE + self.hex.min_width() + 2*2 } } const SCROLL_SIZE: u32 = 16; impl<'a> ::wtk::Element for Viewer<'a> { fn resize(&self, width: u32, height: u32) { *self.dims.borrow_mut() = (width, height); let body_width = width - SCROLL_SIZE; let body_height = height - SCROLL_SIZE; self.vscroll.resize(SCROLL_SIZE, body_height); match self.mode { ViewerMode::Hex => { use std::io::Seek; self.hex.resize(body_width, body_height); let ofs = self.hex.get_start(); let mut file = self.file.borrow_mut(); let _ = file.seek(::std::io::SeekFrom::Start(ofs)).and_then(|_| self.hex.populate(&mut *file)); }, ViewerMode::Text => { self.text.resize(body_width, body_height); let mut file = self.file.borrow_mut(); let _ = self.text.populate(&mut *file); }, } self.hscroll.resize(body_width, SCROLL_SIZE); let file = self.file.borrow(); let filesize = file.get_size(); if filesize > usize::max_value() as u64 { self.vscroll.set_bar( None ); } else if filesize <= self.hex.get_capacity() as u64 { self.vscroll.set_bar( Some( (0,0) ) ); } else { self.vscroll.set_bar( Some( (filesize as usize, self.hex.get_capacity() as usize) ) ); } self.vscroll.set_pos( 0 ); self.hscroll.set_bar( None ); } fn render(&self, surface: ::wtk::surface::SurfaceView, force: bool) { use wtk::geom::Rect; let (width, height) = (surface.width(), surface.height()); assert_eq!( (width,height), *self.dims.borrow() ); let body_width = width - SCROLL_SIZE; let body_height = height - SCROLL_SIZE; self.vscroll.render(surface.slice(Rect::new(body_width, 0, SCROLL_SIZE, body_height)), force); let body_view = surface.slice(Rect::new(0, 0, body_width, body_height)); match self.mode { ViewerMode::Hex => self.hex.render(body_view, force), ViewerMode::Text => self.text.render(body_view, force), } self.hscroll.render(surface.slice(Rect::new(0, height - SCROLL_SIZE, body_width, SCROLL_SIZE)), force); } fn with_element_at_pos(&self, pos: ::wtk::geom::PxPos, dims: ::wtk::geom::PxDims, f: ::wtk::WithEleAtPosCb) -> bool { let x = pos.x.0; let y = pos.y.0; let (width, height) = (dims.w.0, dims.h.0); let body_dims = ::wtk::geom::PxDims::new( width - SCROLL_SIZE, height - SCROLL_SIZE ); let vscroll_pos = ::wtk::geom::PxPos::new(body_dims.w.0, 0); let hscroll_pos = ::wtk::geom::PxPos::new(0, body_dims.h.0); if y < hscroll_pos.y.0 { if x > vscroll_pos.x.0 { self.vscroll.with_element_at_pos(pos - vscroll_pos, ::wtk::geom::PxDims::new(SCROLL_SIZE, body_dims.h.0), f) } else { match self.mode { ViewerMode::Hex => self.hex.with_element_at_pos(pos, body_dims, f), ViewerMode::Text => self.text.with_element_at_pos(pos, body_dims, f), } } } else { if x > body_dims.w.0 { self.toggle_button.with_element_at_pos(pos - body_dims.bottomright(), ::wtk::geom::PxDims::new(SCROLL_SIZE, SCROLL_SIZE), f) } else { self.hscroll.with_element_at_pos(pos - hscroll_pos, ::wtk::geom::PxDims::new(body_dims.w.0, SCROLL_SIZE), f) } } } }
extern crate wtk; #[macro_use(kernel_log)] extern crate syscalls; mod hexview; mod textview; struct Viewer<'a> { dims: ::std::cell::RefCell<(u32, u32)>, file: ::std::cell::RefCell<&'a mut ::syscalls::vfs::File>, mode: ViewerMode, vscroll: ::wtk::ScrollbarV, hscroll: ::wtk::ScrollbarH, hex: ::hexview::Widget, text: ::textview::Widget, toggle_button: ::wtk::ButtonBcb<'static, ::wtk::Colour>, } enum ViewerMode { Hex, Text, } fn main() { ::wtk::initialise(); let mut file: ::syscalls::vfs::File = match ::syscalls::threads::S_THIS_PROCESS.receive_object("file") { Ok(v) => v, Err(e) => { kernel_log!("TOOD: Handle open error in fileviewer - {:?}", e); return ; }, }; for a in ::std::env::args_os() { kernel_log!("arg = {:?}", a); } let mut args = ::std::env::args_os().skip(0); let path = args.next(); let path: Option<&::std::ffi::OsStr> = path.as_ref().map(|x| x.as_ref()); let path = path.unwrap_or( ::std::ffi::OsStr::new(b"-") ); let use_hex = true; let root = Viewer::new(&mut file, use_hex); let mut window = ::wtk::Window::new_def("File viewer", &root).unwrap(); window.set_title( format!("File Viewer - {:?}", path) ); window.focus(&root); window.set_dims(root.min_width(), 150); window.set_pos(150, 100); window.show(); window.idle_loop(); } impl<'a> Viewer<'a> { fn new(file: &'a mut ::syscalls::vfs::File, init_use_hex: bool) -> Viewer<'a> { let rv = Viewer { dims: ::std::cell::RefCell::new( (0,0) ), file: ::std::cell::RefCell::new(file), mode: if init_use_hex { ViewerMode::Hex } else { ViewerMode::Text }, hex: ::hexview::Widget::new(), text: ::textview::Widget::new(), vscroll: ::wtk::ScrollbarV::new(), hscroll: ::wtk::ScrollbarH::new(), toggle_button: ::wtk::Button::new_boxfn( ::wtk::Colour::theme_body_bg(), |_,_| {} ), }; if init_use_hex { let mut file = rv.file.borrow_mut(); file.set_cursor(0); let _ = rv.hex.populate(&mut *file); } else { /* let mut file = rv.file.borrow_mut(); let mut n_lines = 0; let mut max_len = 0; for line in file.split(b'\n') { max_len = ::std::cmp::max(max_len, line.len()); n_lines += 1; } */ } rv } pub fn min_width(&self) -> u32 { SCROLL_SIZE + self.hex.min_width() + 2*2 } } const SCROLL_SIZE: u32 = 16; impl<'a> ::wtk::Element for Viewer<'a> { fn resize(&self, width: u32, height: u32) { *self.dims.borrow_mut() = (width, height); let body_width = width - SCROLL_SIZE; let body_height = height - SCROLL_SIZE; self.vscroll.resize(SCROLL_SIZE, body_height); match self.mode { ViewerMode::Hex => { use std::io::Seek; self.hex.resize(body_width, body_height); let ofs = self.hex.get_start(); let mut file = self.file.borrow_mut(); let _ = file.seek(::std::io::SeekFrom::Start(ofs)).and_then(|_| self.hex.populate(&mut *file)); }, ViewerMode::Text => { self.text.resize(body_width, body_height); let mut file = self.file.borrow_mut(); let _ = self.text.populate(&mut *file); }, } self.hscroll.resize(body_width, SCROLL_SIZE); let file = self.file.borrow(); let filesize = file.get_size(); if filesize > usize::max_value() as u64 { self.vscroll.set_bar( None ); } else if filesize <= self.hex.get_capacity() as u64 { self.vscroll.set_bar( Some( (0,0) ) ); } else { self.vscroll.set_bar( Some( (filesize as usize, self.hex.get_capacity() as usize) ) ); } self.vscroll.set_pos( 0 ); self.hscroll.set_bar( None ); } fn render(&self, surface: ::wtk::surface::SurfaceView, force: bool) { use wtk::geom::Rect; let (width, height) = (surface.width(), surface.height()); assert_eq!( (width,height), *self.dims.borrow() ); let body_width = width - SCROLL_SIZE; let body_height = height - SCROLL_SIZE; self.vscroll.render(surface.slice(Rect::new(body_width, 0, SCROLL_SIZE, body_height)), force); let body_view = surface.slice(Rect::new(0, 0, body_width, body_height)); match self.mode { ViewerMode::Hex => self.hex.render(body_view, force), ViewerMode::Text => self.text.render(body_view, force), } self.hscroll.render(surface.slice(Rect::new(0, height - SCROLL_SIZE, body_width, SCROLL_SIZE)), force); } fn with_element_at_pos(&self, pos: ::wtk::geom::PxPos, dims: ::wtk::geom::PxDims, f: ::wtk::WithEleAtPosCb) -> bool { let x = pos.x.0; let y = pos.y.0; let (width, height) = (dims.w.0, dims.h.0); let body_dims = ::wtk::geom::PxDims::new( width - SCROLL_SIZE, height - SCROLL_SIZE ); let vscroll_pos = ::wtk::geom::PxPos::new(body_dims.w.0, 0); let hscroll_pos = ::wtk::geom::PxPos::new(0, body_dims.h.0); if y < hscroll_pos.y.0 {
}
if x > vscroll_pos.x.0 { self.vscroll.with_element_at_pos(pos - vscroll_pos, ::wtk::geom::PxDims::new(SCROLL_SIZE, body_dims.h.0), f) } else { match self.mode { ViewerMode::Hex => self.hex.with_element_at_pos(pos, body_dims, f), ViewerMode::Text => self.text.with_element_at_pos(pos, body_dims, f), } } } else { if x > body_dims.w.0 { self.toggle_button.with_element_at_pos(pos - body_dims.bottomright(), ::wtk::geom::PxDims::new(SCROLL_SIZE, SCROLL_SIZE), f) } else { self.hscroll.with_element_at_pos(pos - hscroll_pos, ::wtk::geom::PxDims::new(body_dims.w.0, SCROLL_SIZE), f) } } }
function_block-function_prefix_line
[ { "content": "#[inline(never)]\n\npub fn call_object_ref(handle: u32, call: u16, args: &mut Args) -> Result<u64,super::Error>\n\n{\n\n\t// Obtain reference/borrow to object (individually locked), and call the syscall on it\n\n\tget_process_local::<ProcessObjects>().with_object(handle, |obj| {\n\n\t\t//log_trace!(\"#{} {} Call Ref {} - args={:?}\", handle, obj.type_name(), call, args);\n\n\t\tobj.handle_syscall_ref(call, args)\n\n\t\t})\n\n}\n", "file_path": "Kernel/Modules/syscalls/objects.rs", "rank": 0, "score": 452745.9359077991 }, { "content": "#[inline(never)]\n\npub fn call_object_val(handle: u32, call: u16, args: &mut Args) -> Result<u64,super::Error>\n\n{\n\n\t// Obtain reference/borrow to object (individually locked), and call the syscall on it\n\n\tget_process_local::<ProcessObjects>().with_object_val(handle, |obj| {\n\n\t\t//log_trace!(\"#{} {} Call Val {} - args={:?}\", handle, obj.type_name(), call-0x400, args);\n\n\t\tobj.handle_syscall_val(call, args)\n\n\t\t})\n\n}\n", "file_path": "Kernel/Modules/syscalls/objects.rs", "rank": 1, "score": 452745.9359077991 }, { "content": "#[inline(never)]\n\npub fn bind_group(object_handle: u32) -> Result<bool,Error> {\n\n\tlet wgh = ::kernel::threads::get_process_local::<PLWindowGroup>();\n\n\tlet mut h = wgh.0.lock();\n\n\tif h.is_none() {\n\n\t\tlet group: Group = try!(::objects::take_object(object_handle));\n\n\t\t*h = Some(group.0);\n\n\t\tOk(true)\n\n\t}\n\n\telse {\n\n\t\tOk(false)\n\n\t}\n\n}\n\n\n", "file_path": "Kernel/Modules/syscalls/gui.rs", "rank": 2, "score": 419140.6234386824 }, { "content": "fn open_exe(path: &str) -> Result<::syscalls::vfs::File, ::syscalls::vfs::Error> {\n\n\tmatch ::syscalls::vfs::ROOT.open_child_path(path.as_bytes())\n\n\t{\n\n\tOk(v) => v.into_file(::syscalls::vfs::FileOpenMode::Execute),\n\n\tErr(e) => Err(e),\n\n\t}\n\n}\n\n\n", "file_path": "Usermode/login/src/main.rs", "rank": 3, "score": 413180.2772210646 }, { "content": "/// Validates that a buffer points to accessible memory\n\npub fn buf_valid(ptr: *const (), mut size: usize) -> bool\n\n{\n\n\tlet mut addr = ptr as VAddr;\n\n\tif size == 0 {\n\n\t\tif addr == 0 {\n\n\t\t\t// HACK: Strictly speaking, NULL would be valid according to this method\n\n\t\t\t// but checking it here makes life easier in the slice methods\n\n\t\t\treturn false;\n\n\t\t}\n\n\t\telse {\n\n\t\t\treturn true;\n\n\t\t}\n\n\t}\n\n\telse if ! ::arch::memory::virt::is_reserved(ptr) {\n\n\t\treturn false;\n\n\t}\n\n\tlet rem_ofs = ::PAGE_SIZE - addr % ::PAGE_SIZE;\n\n\t\n\n\tif size > rem_ofs\n\n\t{\n", "file_path": "Kernel/Core/memory/mod.rs", "rank": 4, "score": 411642.4044907689 }, { "content": "#[inline(never)]\n\npub fn get_class(handle: u32) -> Result<u64, super::Error>\n\n{\n\n\tget_process_local::<ProcessObjects>().with_object(handle, |obj| Ok(obj.class() as u64))\n\n}\n", "file_path": "Kernel/Modules/syscalls/objects.rs", "rank": 5, "score": 411562.2301586275 }, { "content": "pub fn clone_object(handle: u32) -> Result<u64, super::Error> {\n\n\tget_process_local::<ProcessObjects>().with_object(handle, |obj| {\n\n\t\tmatch obj.try_clone()\n\n\t\t{\n\n\t\tSome(v) => Ok(v as u64),\n\n\t\tNone => Ok(!0),\n\n\t\t}\n\n\t\t})\n\n}\n\n\n", "file_path": "Kernel/Modules/syscalls/objects.rs", "rank": 6, "score": 411562.2301586274 }, { "content": "#[inline(never)]\n\nfn invoke_int(call_id: u32, args: &mut Args) -> Result<u64,Error>\n\n{\n\n\tif call_id & 1 << 31 == 0\n\n\t{\n\n\t\t// Unbound system call\n\n\t\t// - Split using 15/16 into subsystems\n\n\t\tOk(match call_id\n\n\t\t{\n\n\t\t// === 0: Threads and core\n\n\t\t// - 0/0: Userland log\n\n\t\tCORE_LOGWRITE => {\n\n\t\t\tlet msg: Freeze<[u8]> = try!(args.get());\n\n\t\t\tsyscall_core_log(&msg); 0\n\n\t\t\t},\n\n\t\t// - Userland debug\n\n\t\tCORE_DBGVALUE => {\n\n\t\t\tlet msg: Freeze<[u8]> = try!(args.get());\n\n\t\t\tlet val: usize = try!(args.get());\n\n\t\t\tsyscall_core_dbgvalue(&msg, val); 0\n\n\t\t\t},\n", "file_path": "Kernel/Modules/syscalls/lib.rs", "rank": 7, "score": 407905.3319939684 }, { "content": "// Reads and applies a CRC32 to the file\n\nfn dump_file(level: usize, mut handle: File)\n\n{\n\n\tlet mut buffer = [0; 8*4096];\n\n\n\n\tlet mut crc = ::crc::Crc32::new();\n\n\tloop\n\n\t{\n\n\t\tlet len = match handle.read(&mut buffer)\n\n\t\t\t{\n\n\t\t\tOk(0) => break,\n\n\t\t\tOk(v) => v,\n\n\t\t\tErr(e) => {\n\n\t\t\t\tkernel_log!(\">> ERROR {:?}\", e);\n\n\t\t\t\treturn;\n\n\t\t\t\t},\n\n\t\t\t};\n\n\n\n\t\tcrc.update( &buffer[..len] );\n\n\t}\n\n\tkernel_log!(\"{}> CRC32={:#x}\", Repeat(level,\" \"), crc.finalise());\n\n}\n\n\n\n\n\n\n\n\n", "file_path": "Usermode/vfs_test/src/main.rs", "rank": 8, "score": 399990.15286280675 }, { "content": "#[inline(never)]\n\npub fn wait(events: &mut [values::WaitItem], wake_time_mono: u64) -> Result<u32,Error>\n\n{\n\n\t::kernel::threads::SleepObject::with_new(\"wait\", |waiter: &mut _| {\n\n\t\tlet mut num_bound = 0;\n\n\t\tfor ev in events.iter() {\n\n\t\t\tnum_bound += try!(::objects::wait_on_object(ev.object, ev.flags, waiter));\n\n\t\t}\n\n\n\n\t\tif num_bound == 0 && wake_time_mono == !0 {\n\n\t\t\t// Attempting to sleep on no events with an infinite timeout! Would sleep forever\n\n\t\t\tlog_error!(\"TODO: What to do when a thread tries to sleep forever\");\n\n\t\t\twaiter.wait();\n\n\t\t}\n\n\n\n\t\t// A wake time of 0 means to not sleep at all, just check the status of the events\n\n\t\t// TODO: There should be a more efficient way of doing this, than binding only to unbind again\n\n\t\tif wake_time_mono > 0 {\n\n\t\t\t// !0 indicates an unbounded wait (no need to set a wakeup time)\n\n\t\t\tif wake_time_mono != !0 {\n\n\t\t\t\ttodo!(\"Set a wakeup timer at {}\", wake_time_mono);\n", "file_path": "Kernel/Modules/syscalls/threads.rs", "rank": 9, "score": 387981.04648601107 }, { "content": "fn open_exec(path: &str) -> ::syscalls::vfs::File\n\n{\n\n\tmatch ::syscalls::vfs::ROOT.open_child_path(path.as_bytes())\n\n\t{\n\n\tOk(v) => match v.into_file(::syscalls::vfs::FileOpenMode::Execute)\n\n\t\t{\n\n\t\tOk(v) => v,\n\n\t\tErr(e) => panic!(\"Couldn't open '{}' as an executable file - {:?}\", path, e),\n\n\t\t},\n\n\tErr(e) => panic!(\"Couldn't open executable '{}' - {:?}\", path, e),\n\n\t}\n\n}\n\n\n", "file_path": "Usermode/init/src/main.rs", "rank": 10, "score": 380428.2604116706 }, { "content": "fn open_exec(path: &str) -> ::syscalls::vfs::File\n\n{\n\n\tmatch ::syscalls::vfs::ROOT.open_child_path(path.as_bytes())\n\n\t{\n\n\tOk(v) => match v.into_file(::syscalls::vfs::FileOpenMode::Execute)\n\n\t\t{\n\n\t\tOk(v) => v,\n\n\t\tErr(e) => panic!(\"Couldn't open '{}' as an executable file - {:?}\", path, e),\n\n\t\t},\n\n\tErr(e) => panic!(\"Couldn't open executable '{}' - {:?}\", path, e),\n\n\t}\n\n}\n", "file_path": "Usermode/shell/src/main.rs", "rank": 11, "score": 380428.26041167055 }, { "content": "pub fn clear_wait(handle: u32, mask: u32, sleeper: &mut ::kernel::threads::SleepObject) -> Result<u32,super::Error> {\n\n\tget_process_local::<ProcessObjects>().with_object(handle, |obj| {\n\n\t\tOk( obj.clear_wait(mask, sleeper) )\n\n\t\t})\n\n}\n\n\n", "file_path": "Kernel/Modules/syscalls/objects.rs", "rank": 12, "score": 369391.0889778837 }, { "content": "pub fn wait_on_object(handle: u32, mask: u32, sleeper: &mut ::kernel::threads::SleepObject) -> Result<u32,super::Error> {\n\n\tget_process_local::<ProcessObjects>().with_object(handle, |obj| {\n\n\t\tOk( obj.bind_wait(mask, sleeper) )\n\n\t\t})\n\n}\n", "file_path": "Kernel/Modules/syscalls/objects.rs", "rank": 13, "score": 369391.0889778837 }, { "content": "/// Handle a page fault in whatever way is suitable\n\npub fn handle_page_fault(accessed_address: usize, error_code: u32) -> bool\n\n{\n\n\t// Check clobbered bits first\n\n\tif error_code & FAULT_RESVD != 0 {\n\n\t\t// Reserved bits of the page directory were clobbered, this is a kernel panic\n\n\t\tpanic!(\"Reserved bits clobbered {:#x}\", accessed_address);\n\n\t}\n\n\t\n\n\tlet mut pte = get_page_ent(accessed_address, false, LargeOk::Yes);\n\n\t\n\n\t// - Global rules\n\n\t// > Copy-on-write pages\n\n\tif error_code & (FAULT_WRITE|FAULT_LOCKED) == (FAULT_WRITE|FAULT_LOCKED) && pte.is_cow() {\n\n\t\t// Poke the main VMM layer\n\n\t\t//::memory::virt::cow_write(accessed_address);\n\n\n\n\t\t// 1. Lock (relevant) address space\n\n\t\t// SAFE: Changes to address space are transparent\n\n\t\t::memory::virt::with_lock(accessed_address, || unsafe {\n\n\t\t\tlet frame = pte.addr();\n", "file_path": "Kernel/Core/arch/amd64/memory/virt.rs", "rank": 14, "score": 363926.535005073 }, { "content": "/// Returns program entry point\n\npub fn elf_load_segments(file_base: &ElfFile, output_base: *mut u8) -> u32\n\n{\n\n\tlog!(\"elf_load_segments(file_base={:p}, output_base={:p})\", file_base, output_base);\n\n\tfor phent in file_base.phents()\n\n\t{\n\n\t\tif phent.p_type == 1\n\n\t\t{\n\n\t\t\tlog!(\"- {:#x}+{:#x} loads +{:#x}+{:#x}\",\n\n\t\t\t\tphent.p_paddr, phent.p_memsz,\n\n\t\t\t\tphent.p_offset, phent.p_filesz\n\n\t\t\t\t);\n\n\t\t\t\n\n\t\t\tlet (dst,src) = unsafe {\n\n\t\t\t\tlet dst = ::core::slice::from_raw_parts_mut( (output_base as usize + phent.p_paddr as usize) as *mut u8, phent.p_memsz as usize );\n\n\t\t\t\tlet src = ::core::slice::from_raw_parts( (file_base as *const _ as usize + phent.p_offset as usize) as *const u8, phent.p_filesz as usize );\n\n\t\t\t\t(dst, src)\n\n\t\t\t\t};\n\n\t\t\tfor (d, v) in Iterator::zip( dst.iter_mut(), src.iter().cloned().chain(::core::iter::repeat(0)) )\n\n\t\t\t{\n\n\t\t\t\t*d = v;\n", "file_path": "Bootloaders/_common/elf.rs", "rank": 15, "score": 355282.65083800803 }, { "content": "pub fn set_panic(file: &str, line: usize, message: &::core::fmt::Arguments)\n\n{\n\n\tuse core::sync::atomic::{AtomicBool, Ordering};\n\n\tstatic LOOP_PREVENT: AtomicBool = AtomicBool::new(false);\n\n\tif LOOP_PREVENT.swap(true, Ordering::Relaxed) {\n\n\t\treturn ;\n\n\t}\n\n\tconst PANIC_COLOUR: u32 = 0x01346B;\n\n\tconst PANIC_TEXT_COLOUR: u32 = 0xFFFFFF;\n\n\tstatic mut PANIC_IMG_ROW_BUF: [u32; PANIC_IMAGE_DIMS.0 as usize] = [0; PANIC_IMAGE_DIMS.0 as usize];\n\n\n\n\t// SAFE: `LOOP_PREVENT` prevents this code from running over itself\n\n\tlet row_buf = unsafe { &mut PANIC_IMG_ROW_BUF };\n\n\n\n\tfor surf in S_DISPLAY_SURFACES.lock().iter_mut()\n\n\t{\n\n\t\tuse core::fmt::Write;\n\n\t\tlet dims = surf.fb.get_size();\n\n\t\t// 1. Fill\n\n\t\tsurf.fb.fill(Rect::new_pd(Pos::new(0,0), dims), PANIC_COLOUR);\n", "file_path": "Kernel/Core/metadevs/video/mod.rs", "rank": 16, "score": 353840.9396287248 }, { "content": "pub fn new_process(binary_file: ::syscalls::vfs::File, binary: &[u8], args: &[&[u8]]) -> Result<ProtoProcess,Error> {\n\n\t// SAFE: Call is actually to rust\n\n\tunsafe {\n\n\t\tint::new_process(binary_file, binary, args).map( |v| ProtoProcess(v) )\n\n\t}\n\n}\n\n\n", "file_path": "Usermode/loader/lib/lib.rs", "rank": 17, "score": 348136.3558794658 }, { "content": "/// Initialise PID0's handles\n\npub fn init(loader_handle: ::kernel::vfs::handle::File, init_handle: ::kernel::vfs::handle::File) {\n\n\tvfs::init_handles(loader_handle, init_handle);\n\n}\n\n\n\n#[no_mangle]\n\n/// Method called from architectue-specific (assembly) code\n\npub unsafe extern \"C\" fn syscalls_handler(id: u32, first_arg: *const usize, count: u32) -> u64\n\n{\n\n\t//log_debug!(\"syscalls_handler({}, {:p}+{})\", id, first_arg, count);\n\n\tinvoke(id, ::core::slice::from_raw_parts(first_arg, count as usize))\n\n}\n\n\n", "file_path": "Kernel/Modules/syscalls/lib.rs", "rank": 18, "score": 341090.65601545834 }, { "content": "pub fn init_handles(loader_handle: ::kernel::vfs::handle::File, init_handle: ::kernel::vfs::handle::File) {\n\n\t// - Forget the loader (no need)\n\n\t::core::mem::forget(loader_handle);\n\n\n\n\t// #1: Initial file handle\n\n\t::objects::new_object( File(init_handle) );\n\n\t// #2: Read-only root\n\n\t::objects::new_object(Dir::new( {\n\n\t\tlet root = handle::Dir::open(Path::new(\"/\")).unwrap();\n\n\t\t//root.set_permissions( handle::Perms::readonly() );\n\n\t\troot\n\n\t\t}));\n\n\n\n\t// - Read-write handle to /\n\n\t//::objects::push_as_unclaimed( ::objects::new_object( Dir::new( handle::Dir::open(Path::new(\"/\")).unwrap() ) ) );\n\n\t::objects::push_as_unclaimed(\"RwRoot\", ::objects::new_object( Dir::new( handle::Dir::open(Path::new(\"/\")).unwrap() ) ) );\n\n}\n\n\n\n\n\n// --------------------------------------------------------------------\n\n//\n\n// --------------------------------------------------------------------\n\n\n", "file_path": "Kernel/Modules/syscalls/vfs.rs", "rank": 19, "score": 340043.82090861583 }, { "content": "/// Pack a result into a u32\n\n/// Both `O` and `E` must be < 2^31\n\nfn from_result<O: Into<u32>, E: Into<u32>>(r: Result<O,E>) -> u64 {\n\n\tmatch r\n\n\t{\n\n\tOk(v) => {\n\n\t\tlet v: u32 = v.into();\n\n\t\tassert!(v < 1<<31, \"Result value {:#x} from {} is above 2^31\", v, type_name!(O));\n\n\t\tv as u64\n\n\t\t}\n\n\tErr(e) => {\n\n\t\tlet v: u32 = e.into();\n\n\t\tassert!(v < 1<<31, \"Result value {:#x} from {} is above 2^31\", v, type_name!(E));\n\n\t\t(1 << 31) | (v as u64)\n\n\t\t},\n\n\t}\n\n}\n\n\n\nuse self::values::*;\n\n#[path=\"../../../syscalls.inc.rs\"]\n\nmod values;\n\n\n", "file_path": "Kernel/Modules/syscalls/lib.rs", "rank": 20, "score": 338855.90758770774 }, { "content": "fn invoke(call_id: u32, args: &[usize]) -> u64 {\n\n\tmatch invoke_int(call_id, &mut Args::new(args))\n\n\t{\n\n\tOk(v) => v,\n\n\tErr(e) => {\n\n\t\tlog_log!(\"Syscall formatting error in call {:#x} - {:?} {}\", call_id, e, e);\n\n\t\t::kernel::threads::exit_process(0x8000_0000);\n\n\t\t// !0\n\n\t\t},\n\n\t}\n\n}\n\n\n", "file_path": "Kernel/Modules/syscalls/lib.rs", "rank": 21, "score": 338677.72633412026 }, { "content": "#[lang=\"panic_fmt\"]\n\nfn panic_fmt(msg: &::core::fmt::Arguments, file: &'static str, line: u32) -> ! {\n\n\tputs(\"PANIC @ \"); puts(file); puts(\"\\n\");\n\n\tlog!(\"panic: {}:{}: {}\\n\", file, line, msg);\n\n\tloop {}\n\n}\n\n\n\n\n\nextern \"C\" {\n\n\t#[link_name=\"puts\"]\n\n\tfn puts_raw(_: *const u8, _: u32);\n\n}\n", "file_path": "Bootloaders/aarch64/main.rs", "rank": 22, "score": 330326.3517876939 }, { "content": "/// Wait on the provided list of Waiter trait objects\n\n///\n\npub fn wait_on_list(waiters: &mut [&mut dyn Waiter], timeout: Option<u64>) -> Option<usize>\n\n{\n\n\tlog_trace!(\"wait_on_list(waiters = {:?}, timeout = {:?})\", waiters, timeout);\n\n\tif waiters.len() == 0\n\n\t{\n\n\t\tpanic!(\"wait_on_list - Nothing to wait on\");\n\n\t}\n\n\t\n\n\tif timeout.is_some() {\n\n\t\ttodo!(\"Support timeouts in wait_on_list\");\n\n\t}\n\n\t\n\n\t// Wait on primitives from the waiters, returning the indexes of those that need a state advance\n\n\t\n\n\t// - If there are no incomplete waiters, return None\n\n\tif waiters.iter().filter(|x| !x.is_complete()).count() == 0 {\n\n\t\treturn None;\n\n\t}\n\n\t\n\n\t// - Create an object for them to signal\n", "file_path": "Kernel/Core/async/mod.rs", "rank": 23, "score": 329135.31017010706 }, { "content": "#[inline]\n\npub fn wait(items: &mut [WaitItem], wake_time_mono: u64) -> u32 {\n\n\t// SAFE: Syscall\n\n\tunsafe {\n\n\t\t#[cfg(target_pointer_width=\"64\")]\n\n\t\tlet rv = syscall!(CORE_WAIT, items.as_ptr() as usize, items.len(), wake_time_mono as usize) as u32;\n\n\t\t#[cfg(target_pointer_width=\"32\")]\n\n\t\tlet rv = syscall!(CORE_WAIT, items.as_ptr() as usize, items.len(), (wake_time_mono & 0xFFFFFFFF) as usize, (wake_time_mono >> 32) as usize) as u32;\n\n\n\n\t\trv\n\n\t}\n\n}\n\n\n", "file_path": "Usermode/libsyscalls/threads.rs", "rank": 24, "score": 328786.70630745636 }, { "content": "/// Validate a C string (legacy)\n\n//#[deprecated=\"Use ::memory::c_string_as_byte_slice instead\"]\n\npub fn c_string_valid(c_str: *const i8) -> bool\n\n{\n\n\t// SAFE: Pointer is valid for lifetime of input pointer (barring odd input behavior)\n\n\tunsafe { c_string_as_byte_slice(c_str).is_some() }\n\n}\n\n\n\n// UNSAFE: Lifetime is inferred, and memory must point to a valid T instance\n\npub unsafe fn buf_to_slice<'a, T>(ptr: *const T, size: usize) -> Option<&'a [T]> {\n\n\t\n\n\tif size > 0 && ptr as usize % ::core::mem::align_of::<T>() != 0 {\n\n\t\tNone\n\n\t}\n\n\telse if ! buf_valid(ptr as *const (), size) {\n\n\t\tNone\n\n\t}\n\n\telse {\n\n\t\tSome( ::core::slice::from_raw_parts(ptr, size) )\n\n\t}\n\n}\n\npub unsafe fn buf_to_slice_mut<'a, T>(ptr: *mut T, size: usize) -> Option<&'a mut [T]> {\n", "file_path": "Kernel/Core/memory/mod.rs", "rank": 25, "score": 327521.8302371853 }, { "content": "pub fn elf_get_size(file_base: &ElfFile) -> u32\n\n{\n\n\tlog!(\"elf_get_size(file_base={:p})\", file_base);\n\n\tfile_base.check_header();\n\n\n\n\tlet mut max_end = 0;\n\n\tfor phent in file_base.phents()\n\n\t{\n\n\t\tif phent.p_type == 1\n\n\t\t{\n\n\t\t\tlog!(\"- {:#x}+{:#x} loads +{:#x}+{:#x}\",\n\n\t\t\t\tphent.p_paddr, phent.p_memsz,\n\n\t\t\t\tphent.p_offset, phent.p_filesz\n\n\t\t\t\t);\n\n\t\t\t\n\n\t\t\tlet end = (phent.p_paddr + phent.p_memsz) as usize;\n\n\t\t\tif max_end < end {\n\n\t\t\t\tmax_end = end;\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\t// Round the image size to 4KB\n\n\tlet max_end = (max_end + 0xFFF) & !0xFFF;\n\n\tlog!(\"return load_size={:#x}\", max_end);\n\n\tmax_end as u32\n\n}\n\n\n", "file_path": "Bootloaders/_common/elf.rs", "rank": 26, "score": 322804.6103679604 }, { "content": "struct File(::kernel::vfs::handle::File);\n\nimpl objects::Object for File\n\n{\n\n\tfn class(&self) -> u16 { values::CLASS_VFS_FILE }\n\n\tfn as_any(&self) -> &dyn Any { self }\n\n\tfn try_clone(&self) -> Option<u32> {\n\n\t\tSome( ::objects::new_object( File(self.0.clone()) ) )\n\n\t}\n\n\tfn handle_syscall_ref(&self, call: u16, args: &mut Args) -> Result<u64,Error> {\n\n\t\tmatch call\n\n\t\t{\n\n\t\tvalues::VFS_FILE_GETSIZE => {\n\n\t\t\tOk( self.0.size() )\n\n\t\t\t},\n\n\t\tvalues::VFS_FILE_READAT => {\n\n\t\t\tlet ofs: u64 = try!(args.get());\n\n\t\t\tlet mut dest: FreezeMut<[u8]> = try!(args.get());\n\n\t\t\tlog_debug!(\"File::readat({}, {:p}+{} bytes)\", ofs, dest.as_ptr(), dest.len());\n\n\t\t\tmatch self.0.read(ofs, &mut dest)\n\n\t\t\t{\n", "file_path": "Kernel/Modules/syscalls/vfs.rs", "rank": 27, "score": 321379.47811519576 }, { "content": "#[inline]\n\npub fn puth(v: u64) {\n\n\timp::puth(v)\n\n}\n\n\n", "file_path": "Kernel/Core/arch/mod.rs", "rank": 28, "score": 321100.28305870737 }, { "content": "#[inline(never)]\n\npub fn drop_object(handle: u32)\n\n{\n\n\tif handle == 0 {\n\n\t\t// Ignore, it's the \"this process\" object\n\n\t}\n\n\telse {\n\n\t\tmatch get_process_local::<ProcessObjects>().take_object(handle)\n\n\t\t{\n\n\t\tOk(v) => {\n\n\t\t\tlog_debug!(\"Object dropped #{}: {}\", handle, v.type_name());\n\n\t\t\t::core::mem::drop( v );\n\n\t\t\t},\n\n\t\tErr(_) => {}\n\n\t\t}\n\n\t}\n\n}\n\n\n\n\n\n\n\n\n", "file_path": "Kernel/Modules/syscalls/objects.rs", "rank": 29, "score": 317998.5887313828 }, { "content": "pub fn bind_gsi(idx: usize, handler: fn(*const ()), info: *const ()) -> Result<IRQHandle,BindError> {\n\n\tOk(IRQHandle)\n\n}\n\n\n", "file_path": "Kernel/Core/arch/armv8/interrupts.rs", "rank": 30, "score": 316328.006937976 }, { "content": "#[inline(never)]\n\n#[no_mangle]\n\npub fn puth(v: u64) {\n\n\tputb(b'0');\n\n\tputb(b'x');\n\n\tif v == 0 {\n\n\t\tputb(b'0');\n\n\t}\n\n\telse {\n\n\t\tfor i in (0 .. 16).rev() {\n\n\t\t\tif v >> (i * 4) > 0 {\n\n\t\t\t\tlet n = ((v >> (i * 4)) & 0xF) as u8;\n\n\t\t\t\tif n < 10 {\n\n\t\t\t\t\tputb( b'0' + n );\n\n\t\t\t\t}\n\n\t\t\t\telse {\n\n\t\t\t\t\tputb( b'a' + n - 10 );\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "Kernel/Core/arch/armv8/mod.rs", "rank": 31, "score": 315868.5091928556 }, { "content": "#[inline(never)]\n\n#[no_mangle]\n\npub fn puth(v: u64) {\n\n\tputb(b'0');\n\n\tputb(b'x');\n\n\tif v == 0 {\n\n\t\tputb(b'0');\n\n\t}\n\n\telse {\n\n\t\tfor i in (0 .. 16).rev() {\n\n\t\t\tif v >> (i * 4) > 0 {\n\n\t\t\t\tlet n = ((v >> (i * 4)) & 0xF) as u8;\n\n\t\t\t\tif n < 10 {\n\n\t\t\t\t\tputb( b'0' + n );\n\n\t\t\t\t}\n\n\t\t\t\telse {\n\n\t\t\t\t\tputb( b'a' + n - 10 );\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "Kernel/Core/arch/armv7/mod.rs", "rank": 32, "score": 315868.5091928556 }, { "content": "pub fn bind_object(num: u32, obj: Box<dyn FnMut()->bool + Send + 'static>) -> ObjectHandle\n\n{\n\n\tObjectHandle( bind(num, obj) )\n\n}\n\n\n\nimpl IRQBinding\n\n{\n\n\tfn new_boxed(num: u32) -> Box<IRQBinding>\n\n\t{\n\n\t\tlet mut rv = Box::new( IRQBinding::default());\n\n\t\tassert!(num < 256, \"{} < 256 failed\", num);\n\n\t\t// TODO: Use a better function, needs to handle IRQ routing etc.\n\n\t\t// - In theory, the IRQ num shouldn't be a u32, instead be an opaque IRQ index\n\n\t\t// that the arch code understands (e.g. value for PciLineA that gets translated into an IOAPIC line)\n\n\t\tlet context = &*rv as *const IRQBinding as *const ();\n\n\t\trv.arch_handle = match interrupts::bind_gsi(num as usize, IRQBinding::handler_raw, context)\n\n\t\t\t{\n\n\t\t\tOk(v) => v,\n\n\t\t\tErr(e) => panic!(\"Unable to bind handler to GSI {}: {:?}\", num, e),\n\n\t\t\t};\n", "file_path": "Kernel/Core/irqs.rs", "rank": 33, "score": 313040.91463002627 }, { "content": "pub fn take_object<T: Object+'static>(handle: u32) -> Result<T,super::Error> {\n\n\tlet obj = try!(get_process_local::<ProcessObjects>().take_object(handle));\n\n\t// SAFE: ptr::read is called on a pointer to a value that is subsequently forgotten\n\n\tunsafe {\n\n\t\tlet rv = {\n\n\t\t\tlet r = obj.as_any().downcast_ref::<T>().expect(\"Object was not expected type (TODO: Proper error)\");\n\n\t\t\t::core::ptr::read(r)\n\n\t\t\t};\n\n\t\t::core::mem::forget(obj);\n\n\t\tOk(rv)\n\n\t}\n\n}\n\n\n", "file_path": "Kernel/Modules/syscalls/objects.rs", "rank": 34, "score": 312874.134779494 }, { "content": "/// Obtain a string from the kernel\n\n/// \n\n/// Accepts a buffer and returns a string slice from that buffer.\n\npub fn get_text_info(unit: u32, id: u32, buf: &mut [u8]) -> &str {\n\n\t// SAFE: Syscall\n\n\tlet len: usize = unsafe { syscall!(CORE_TEXTINFO, unit as usize, id as usize, buf.as_ptr() as usize, buf.len()) } as usize;\n\n\t::core::str::from_utf8(&buf[..len]).expect(\"TODO: get_text_info handle error\")\n\n}\n\n\n\n\n\n\n", "file_path": "Usermode/libsyscalls/lib.rs", "rank": 35, "score": 310891.3062277353 }, { "content": "pub fn delegate(num_pages: usize) -> Result<*mut (), Error>\n\n{\n\n\tloop\n\n\t{\n\n\t\tlet cur = CURPOS.load(Ordering::Acquire);\n\n\t\tlet new = cur + num_pages * ::PAGE_SIZE;\n\n\t\tassert!(new >= cur);\n\n\t\tif new > BUMP_END {\n\n\t\t\treturn Err(Error);\n\n\t\t}\n\n\t\t\n\n\t\tif cur == CURPOS.compare_and_swap(cur, new, Ordering::Acquire) {\n\n\t\t\treturn Ok(cur as *mut _);\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "Kernel/Core/memory/bump_region.rs", "rank": 36, "score": 310864.487586251 }, { "content": "pub fn is_fixed_alloc(addr: *const (), count: usize) -> bool\n\n{\n\n\tfalse\n\n}\n\n\n\n\n\npub unsafe fn temp_map<T>(phys: u64) -> *mut T\n\n{\n\n\ttodo!(\"\");\n\n}\n\npub unsafe fn temp_unmap<T>(addr: *mut T)\n\n{\n\n\ttodo!(\"\");\n\n}\n\n\n\n\n\nimpl AddressSpace\n\n{\n\n\tpub fn pid0() -> AddressSpace\n\n\t{\n", "file_path": "Kernel/Core/arch/armv8/memory/virt.rs", "rank": 37, "score": 306303.5813689645 }, { "content": "/// Give the target process the object specified by `handle`\n\npub fn give_object(target: &::kernel::threads::ProcessHandle, tag: &str, handle: u32) -> Result<(),super::Error> {\n\n\tlog_debug!(\"give_object(target={:?}, handle={:?})\", target, handle);\n\n\tlet target_list = target.get_process_local_alloc::<ProcessObjects>();\n\n\tlet obj = try!(get_process_local::<ProcessObjects>().take_object(handle));\n\n\tlet class_id = obj.class();\n\n\tlet id = try!( target_list.find_and_fill_slot(|| UserObject { data: obj }) );\n\n\t\n\n\tlog_trace!(\"- Giving object {} ({} {}) as '{}' (handle {})\",\n\n\t\thandle, class_id, ::values::get_class_name(class_id),\n\n\t\ttag, id\n\n\t\t);\n\n\ttarget_list.push_given( id, tag );\n\n\n\n\tOk( () )\n\n}\n\n\n", "file_path": "Kernel/Modules/syscalls/objects.rs", "rank": 38, "score": 304906.282400648 }, { "content": "/// Allocate at a given address\n\npub fn allocate(address: *mut ()) -> bool {\n\n\tallocate_int(Some(address)).is_ok()\n\n}\n\n\n", "file_path": "Kernel/Core/memory/phys.rs", "rank": 39, "score": 303046.7091892845 }, { "content": "/// Returns true if the passed virtual address is within the fixed allocation region\n\npub fn is_fixed_alloc(addr: *const (), page_count: usize) -> bool\n\n{\n\n\tuse super::addresses::{IDENT_START,IDENT_END};\n\n\t\n\n\tlet vaddr = addr as usize;\n\n\tif IDENT_START <= vaddr && vaddr < IDENT_END {\n\n\t\tlet space = IDENT_END - vaddr;\n\n\t\tassert!(space >> 12 >= page_count);\n\n\t\ttrue\n\n\t}\n\n\telse {\n\n\t\tfalse\n\n\t}\n\n}\n\n\n", "file_path": "Kernel/Core/arch/amd64/memory/virt.rs", "rank": 40, "score": 301786.19028933 }, { "content": "#[inline(never)]\n\npub fn get_group() -> Result<ObjectHandle,u32>\n\n{\n\n\tlet wgh = ::kernel::threads::get_process_local::<PLWindowGroup>();\n\n\twgh.with(|h| objects::new_object(Group( h.clone() )))\n\n}\n\n\n", "file_path": "Kernel/Modules/syscalls/gui.rs", "rank": 41, "score": 300529.5708712345 }, { "content": "/// Update window dimensions and positions after the display organsisation changes\n\npub fn update_dims()\n\n{\n\n\t// Iterate all windows\n\n\tfor grp in S_WINDOW_GROUPS.lock().iter()\n\n\t{\n\n\t\tlet mut lh = grp.lock();\n\n\t\tfor &mut (ref mut pos, ref win) in lh.windows.iter_mut()\n\n\t\t{\n\n\t\t\t// if window is maximised, keep it that way\n\n\t\t\tif win.flags.lock().maximised\n\n\t\t\t{\n\n\t\t\t\t// Locate screen for the upper-left corner\n\n\t\t\t\tlet screen = match ::kernel::metadevs::video::get_display_for_pos(*pos)\n\n\t\t\t\t\t{\n\n\t\t\t\t\tOk(x) => x,\n\n\t\t\t\t\t// TODO: If now off-screen, warp to a visible position (with ~20px leeway)\n\n\t\t\t\t\tErr(r) => {\n\n\t\t\t\t\t\ttodo!(\"update_dims: Handle full-screen window moving off display area - {:?} - {:?}\", *pos, r)\n\n\t\t\t\t\t\t},\n\n\t\t\t\t\t};\n", "file_path": "Kernel/Modules/gui/windows/mod.rs", "rank": 42, "score": 297749.07314298226 }, { "content": "fn begin_panic_fmt(msg: &::core::fmt::Arguments, (file, line): (&str, u32)) -> !\n\n{\n\n\tstatic NESTED: ::core::sync::atomic::AtomicBool = ::core::sync::atomic::AtomicBool::new(false);\n\n\t::arch::puts(\"\\nERROR: rust_begin_unwind: \");\n\n\t::arch::puts(file);\n\n\t::arch::puts(\":\");\n\n\t::arch::puth(line as u64);\n\n\t::arch::puts(\"\\n\");\n\n\tif NESTED.swap(true, ::core::sync::atomic::Ordering::SeqCst) {\n\n\t\t::arch::puts(\"NESTED!\\n\");\n\n\t\tloop {}\n\n\t}\n\n\t::arch::print_backtrace();\n\n\tlog_panic!(\"{}:{}: Panicked \\\"{:?}\\\"\", file, line, msg);\n\n\t::metadevs::video::set_panic(file, line as usize, msg);\n\n\tloop{}\n\n}\n", "file_path": "Kernel/Core/unwind.rs", "rank": 43, "score": 297294.0592411059 }, { "content": "/// Startup: Pushes the specified index as an unclaimed object\n\npub fn push_as_unclaimed(tag: &str, handle: u32) {\n\n\tlet objs = get_process_local::<ProcessObjects>();\n\n\tobjs.push_given(handle, tag);\n\n}\n\n\n", "file_path": "Kernel/Modules/syscalls/objects.rs", "rank": 44, "score": 295136.44909729785 }, { "content": "pub fn bind_gsi(gsi: usize, handler: fn(*const()), info: *const ()) -> Result<IRQHandle,()> {\n\n\n\n\tif gsi >= S_IRQS.len() {\n\n\t\tErr( () )\n\n\t}\n\n\telse {\n\n\t\tlet mut lh = S_IRQS[gsi].lock();\n\n\t\tif lh.is_some() {\n\n\t\t\tErr( () )\n\n\t\t}\n\n\t\telse {\n\n\t\t\t// TODO: Enable this interrupt on the GIC?\n\n\t\t\t*lh = Some(Binding {\n\n\t\t\t\thandler: handler,\n\n\t\t\t\tinfo: info,\n\n\t\t\t\t});\n\n\t\t\tOk( IRQHandle(gsi as u32) )\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "Kernel/Core/arch/armv7/interrupts.rs", "rank": 45, "score": 293640.53241128294 }, { "content": "#[inline(never)]\n\nfn syscall_core_textinfo(group: u32, id: usize, buf: &mut [u8]) -> usize\n\n{\n\n\tmatch group\n\n\t{\n\n\t::values::TEXTINFO_KERNEL =>\n\n\t\tmatch id\n\n\t\t{\n\n\t\t0 => { buf.clone_from_slice( ::kernel::VERSION_STRING.as_bytes() ); ::kernel::VERSION_STRING.len() },\n\n\t\t1 => { buf.clone_from_slice( ::kernel::BUILD_STRING.as_bytes() ); ::kernel::BUILD_STRING.len() },\n\n\t\t_ => 0,\n\n\t\t},\n\n\t_ => 0,\n\n\t}\n\n}\n\n\n", "file_path": "Kernel/Modules/syscalls/lib.rs", "rank": 46, "score": 292260.07221661136 }, { "content": "pub fn object_has_no_such_method_val(name: &str, call: u16) -> Result<u64,::Error> {\n\n\tif call < 0x400 {\n\n\t\tpanic!(\"BUGCHECK: Call ID {:#x} < 0x400 invoked by-value call on {}\", call, name);\n\n\t}\n\n\telse {\n\n\t\tlog_notice!(\"User called non-existent mathod (by-value) {} on {}\", call-0x400, name);\n\n\t}\n\n\tErr( ::Error::UnknownCall )\n\n}\n", "file_path": "Kernel/Modules/syscalls/objects.rs", "rank": 47, "score": 290304.49173244165 }, { "content": "pub fn object_has_no_such_method_ref(name: &str, call: u16) -> Result<u64,::Error> {\n\n\tif call >= 0x400 {\n\n\t\tpanic!(\"BUGCHECK: Call ID {:#x} > 0x400 invoked by-ref call on {}\", call, name);\n\n\t}\n\n\telse {\n\n\t\tlog_notice!(\"User called non-existent mathod (by-ref) {} on {}\", call, name);\n\n\t}\n\n\tErr( ::Error::UnknownCall )\n\n}\n\n\n", "file_path": "Kernel/Modules/syscalls/objects.rs", "rank": 48, "score": 290304.49173244165 }, { "content": "/// Registers an interrupt\n\npub fn register_irq(global_num: usize, callback: IRQHandler, info: *const() ) -> Result<IRQHandle,IrqError>\n\n{\n\n\t// Locate the relevant apic\n\n\tlet (ioapic,ofs) = match get_ioapic(global_num) {\n\n\t\tSome(x) => x,\n\n\t\tNone => return Err( IrqError::BadIndex ),\n\n\t\t};\n\n\t\n\n\t// Bind ISR\n\n\t// TODO: Pick a suitable processor, and maybe have separate IDTs (and hence separate ISR lists)\n\n\tlet lapic_id = 0u32;\n\n\tlet isr_handle = match ::arch::imp::interrupts::bind_free_isr(lapic_irq_handler, info, global_num)\n\n\t\t{\n\n\t\tOk(v) => v,\n\n\t\tErr(e) => return Err(IrqError::BindFail(e)),\n\n\t\t};\n\n\n\n\t// Enable the relevant IRQ on the LAPIC and IOAPIC\n\n\tioapic.set_irq(ofs, isr_handle.idx() as u8, lapic_id, raw::TriggerMode::EdgeHi, callback);\n\n\t//ioapic.set_irq(ofs, isr_handle.idx() as u8, lapic_id, raw::TriggerMode::LevelHi, callback);\n", "file_path": "Kernel/Core/arch/amd64/hw/apic/mod.rs", "rank": 49, "score": 288507.5292550046 }, { "content": "#[inline(always)]\n\npub fn checkmark_val<T>(v: *const T) {\n\n\t// SAFE: nop ASM\n\n\tunsafe { asm!(\"mov r1, r1; mov $0,$0\" : : \"r\"(v) : \"memory\" : \"volatile\"); }\n\n}\n\n\n\n#[allow(improper_ctypes)]\n\nextern \"C\" {\n\n\tpub fn drop_to_user(entry: usize, stack: usize, args_len: usize) -> !;\n\n}\n\n\n", "file_path": "Kernel/Core/arch/armv7/mod.rs", "rank": 50, "score": 286472.32906951953 }, { "content": "#[inline(always)]\n\npub fn checkmark_val<T>(v: *const T) {\n\n\t// SAFE: nop ASM (TODO: Ensure)\n\n\tunsafe { asm!(\"xchg %bx, %bx; mov $0,$0\" : : \"r\"(v) : \"memory\" : \"volatile\"); }\n\n}\n\n\n\n#[allow(improper_ctypes)]\n\nextern \"C\" {\n\n\tpub fn drop_to_user(entry: usize, stack: usize, cmdline_len: usize) -> !;\n\n}\n\n\n", "file_path": "Kernel/Core/arch/amd64/mod.rs", "rank": 51, "score": 286472.32906951953 }, { "content": "#[inline]\n\npub fn cur_timestamp() -> u64 {\n\n\timp::cur_timestamp()\n\n}\n", "file_path": "Kernel/Core/arch/mod.rs", "rank": 52, "score": 286435.15703712014 }, { "content": "#[start]\n\nfn main(_: isize, _: *const *const u8) -> isize {\n\n\t::syscalls::log_write(\"Hello World!\");\n\n\t0\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn register_arguments() {\n\n\t// Does nothing\n\n}\n\n\n", "file_path": "Usermode/hello_world/src/main.rs", "rank": 53, "score": 286330.3412051031 }, { "content": "pub fn is_fixed_alloc<T>(addr: *const T, size: usize) -> bool {\n\n\tconst BASE: usize = super::addresses::KERNEL_BASE;\n\n\tconst ONEMEG: usize = 1024*1024;\n\n\tconst LIMIT: usize = super::addresses::KERNEL_BASE + 8*ONEMEG;\n\n\tlet addr = addr as usize;\n\n\tif BASE <= addr && addr < LIMIT {\n\n\t\tif addr + size <= LIMIT {\n\n\t\t\ttrue\n\n\t\t}\n\n\t\telse {\n\n\t\t\tfalse\n\n\t\t}\n\n\t}\n\n\telse {\n\n\t\tfalse\n\n\t}\n\n}\n\n// UNSAFE: Can cause aliasing\n\npub unsafe fn fixed_alloc(_p: PAddr, _count: usize) -> Option<*mut ()> {\n\n\tNone\n\n}\n\n\n", "file_path": "Kernel/Core/arch/armv7/memory/virt.rs", "rank": 54, "score": 286259.08680026146 }, { "content": "fn view_file(p: &::std::fs::Path, nh: ::syscalls::vfs::Node) {\n\n\tkernel_log!(\"view_file(p={:?})\", p);\n\n\tlet byte_args: &[&[u8]] = &[ p.as_ref(), ];\n\n\tmatch ::loader::new_process(get_app_exe(b\"fileviewer\").unwrap(), b\"/sysroot/bin/fileviewer\", byte_args)\n\n\t{\n\n\tOk(app) => {\n\n\t\tkernel_log!(\"- Sending WGH\");\n\n\t\tapp.send_obj( \"guigrp\", ::syscalls::gui::clone_group_handle() );\n\n\t\tkernel_log!(\"- Transforming into file\");\n\n\t\tapp.send_obj( \"file\", nh.into_file(::syscalls::vfs::FileOpenMode::ReadOnly).unwrap() );\n\n\t\tapp.start();\n\n\t\t},\n\n\tErr(_e) => {},\n\n\t}\n\n}\n", "file_path": "Usermode/filebrowser/src/main.rs", "rank": 55, "score": 285845.829710233 }, { "content": "/// Returns true if the frame was marked as allocated\n\npub fn mark_free(frame_idx: u64) -> bool {\n\n\tlet mask = 1 << ((frame_idx % 32) as usize);\n\n\twith_bm( (frame_idx / 32) as usize, |c| {\n\n\t\tlet mut old = c.load(Ordering::Relaxed);\n\n\t\tif old & mask == 0\n\n\t\t{\n\n\t\t\t// Bit was clear, frame was already free?\n\n\t\t\tfalse\n\n\t\t}\n\n\t\telse {\n\n\t\t\t// Bit set, loop until a compare+swap succeeds\n\n\t\t\tloop\n\n\t\t\t{\n\n\t\t\t\tlet new_old = c.compare_and_swap(old, old & !mask, Ordering::Relaxed);\n\n\t\t\t\tif old == new_old {\n\n\t\t\t\t\tbreak ;\n\n\t\t\t\t}\n\n\t\t\t\told = new_old;\n\n\t\t\t}\n\n\t\t\ttrue\n\n\t\t}\n\n\t\t}).unwrap_or(false)\n\n}\n", "file_path": "Kernel/Core/arch/amd64/memory/phys.rs", "rank": 56, "score": 285760.80955351284 }, { "content": "pub fn mark_free(frame_idx: u64) -> bool {\n\n\tlog_warning!(\"TODO: mark_free - frame_idx={:#x} ({:#x})\", frame_idx, frame_idx*::PAGE_SIZE as u64);\n\n\t// HACK: Assume it was used\n\n\ttrue\n\n}\n", "file_path": "Kernel/Core/arch/armv8/memory/phys.rs", "rank": 57, "score": 285749.29865351773 }, { "content": "pub fn mark_free(frame_idx: u64) -> bool {\n\n\tlog_warning!(\"TODO: mark_free - frame_idx={:#x} ({:#x})\", frame_idx, frame_idx*::PAGE_SIZE as u64);\n\n\t// HACK: Assume it was used\n\n\ttrue\n\n}\n", "file_path": "Kernel/Core/arch/armv7/memory/phys.rs", "rank": 58, "score": 285749.29865351773 }, { "content": "pub fn can_map_without_alloc(a: *mut ()) -> bool {\n\n\tget_table_addr(a, false).is_some()\n\n}\n\n\n\npub unsafe fn map(a: *mut (), p: PAddr, mode: ProtectionMode) {\n\n\tlog_debug!(\"map({:p} = {:#x}, {:?})\", a, p, mode);\n\n\treturn map_int(a,p,mode);\n\n\t\n\n\t// \"Safe\" helper to constrain interior unsafety\n\n\tfn map_int(a: *mut (), p: PAddr, mode: ProtectionMode) {\n\n\t\t// 1. Map the relevant table in the temp area\n\n\t\tlet (mh, idx) = get_table_addr(a, true).unwrap();\n\n\t\tassert!(mode != ProtectionMode::Unmapped, \"Invalid pass of ProtectionMode::Unmapped to map\");\n\n\t\tassert!( idx % 2 == 0 );\t// two entries per 8KB page\n\n\n\n\t\t// 2. Insert\n\n\t\tlet mode_flags = prot_mode_to_flags(mode);\n\n\t\tlet old = mh[idx+0].compare_and_swap(0, p + mode_flags, Ordering::SeqCst);\n\n\t\tassert!(old == 0, \"map() called over existing allocation: a={:p}, old={:#x}\", a, old);\n\n\t\tmh[idx+1].swap(p + 0x1000 + mode_flags, Ordering::SeqCst);\n", "file_path": "Kernel/Core/arch/armv7/memory/virt.rs", "rank": 59, "score": 285726.3441685411 }, { "content": "pub fn deref_frame(frame_idx: u64) -> u32 {\n\n\twith_ref(frame_idx, |r|\n\n\t\tif r.load(Ordering::Relaxed) != 0 {\n\n\t\t\tr.fetch_sub(1, Ordering::Release)\n\n\t\t}\n\n\t\telse {\n\n\t\t\t0\n\n\t\t}\n\n\t\t).unwrap_or(0)\n\n}\n", "file_path": "Kernel/Core/arch/amd64/memory/phys.rs", "rank": 60, "score": 285394.3461484496 }, { "content": "pub fn deref_frame(frame_idx: u64) -> u32 {\n\n\twith_ref(frame_idx, |r|\n\n\t\tif r.load(Ordering::Relaxed) != 0 {\n\n\t\t\tr.fetch_sub(1, Ordering::Release)\n\n\t\t}\n\n\t\telse {\n\n\t\t\t0\n\n\t\t}\n\n\t\t).unwrap_or(0)\n\n}\n", "file_path": "Kernel/Core/arch/armv7/memory/phys.rs", "rank": 61, "score": 285394.3461484496 }, { "content": "pub fn deref_frame(frame_idx: u64) -> u32 {\n\n\twith_ref(frame_idx, |r|\n\n\t\tif r.load(Ordering::Relaxed) != 0 {\n\n\t\t\tr.fetch_sub(1, Ordering::Release)\n\n\t\t}\n\n\t\telse {\n\n\t\t\t0\n\n\t\t}\n\n\t\t).unwrap_or(0)\n\n}\n", "file_path": "Kernel/Core/arch/armv8/memory/phys.rs", "rank": 62, "score": 285394.3461484496 }, { "content": "#[inline(never)]\n\npub fn newwindow(name: &str) -> Result<ObjectHandle,u32> {\n\n\tlog_trace!(\"syscall_gui_newwindow(name={})\", name);\n\n\t// Get window group for this process\n\n\tlet wgh = ::kernel::threads::get_process_local::<PLWindowGroup>();\n\n\twgh.with( |wgh| objects::new_object( Window(Mutex::new(wgh.create_window(name))) ) )\n\n}\n\n\n", "file_path": "Kernel/Modules/syscalls/gui.rs", "rank": 63, "score": 285055.8798260796 }, { "content": "#[inline(never)]\n\npub fn newgroup(name: &str) -> Result<ObjectHandle,u32> {\n\n\t// Only init can create new sessions\n\n\t// TODO: Use a capability system instead of hardcoding to only PID0\n\n\tif ::kernel::threads::get_process_id() == 0 {\n\n\t\tOk(objects::new_object(Group(::gui::WindowGroupHandle::alloc(name))))\n\n\t}\n\n\telse {\n\n\t\ttodo!(\"syscall_gui_newgroup(name={}) - PID != 0\", name);\n\n\t}\n\n}\n\n\n", "file_path": "Kernel/Modules/syscalls/gui.rs", "rank": 64, "score": 285055.8798260796 }, { "content": "#[inline(never)]\n\nfn log_closure<F: FnOnce(&mut ::core::fmt::Write)>(f: F) {\n\n\tuse core::fmt::Write;\n\n\tlet mut lh = ::Logger;\n\n\tlet _ = write!(lh, \"[loader log] \");\n\n\tf(&mut lh);\n\n\tlet _ = write!(lh, \"\\n\");\n\n}\n\n\n\n/// Stub logging macro\n\nmacro_rules! log {\n\n\t($($v:tt)*) => {{\n\n\t\t::log_closure(|lh| {let _ = write!(lh, $($v)*);});\n\n\t\t}};\n\n}\n\n\n\n\n\npub struct ElfFile(elf_fmt::ElfHeader);\n\nimpl ElfFile\n\n{\n\n\tpub fn check_header(&self) {\n", "file_path": "Bootloaders/aarch64/main.rs", "rank": 65, "score": 284694.17872715584 }, { "content": "fn error_code(value: u32) -> usize {\n\n\tvalue as usize + (!0 / 2)\n\n}\n\n\n", "file_path": "Kernel/Modules/syscalls/lib.rs", "rank": 66, "score": 284623.02533507394 }, { "content": "pub fn new_pair() -> Result< (u32,u32), () >\n\n{\n\n\tlet (a_obj, b_obj) = SyncChannel::new_pair();\n\n\n\n\tlet a = ::objects::new_object(a_obj);\n\n\tif a == !0 {\n\n\t\treturn Err( () );\n\n\t}\n\n\n\n\tlet b = ::objects::new_object(b_obj);\n\n\tif b == !0 {\n\n\t\t::objects::drop_object(a);\n\n\t\treturn Err( () );\n\n\t}\n\n\n\n\tOk( (a,b) )\n\n}\n\n\n", "file_path": "Kernel/Modules/syscalls/ipc_calls.rs", "rank": 67, "score": 284395.91073910956 }, { "content": "type Entrypoint = extern \"cdecl\" fn(usize, *const kernel_proto::Info)->!;\n", "file_path": "Bootloaders/uefi/main.rs", "rank": 68, "score": 283600.53781830164 }, { "content": "struct Line\n\n{\n\n\tfile_offset: u64,\n\n\tfile_size: usize,\t// May be != data.len() if the line wasn't valid UTF-8\n\n\t// TODO: Use ByteString or other - and do box-chars for invalid codepoints?\n\n\tdata: String,\n\n}\n\n\n\n\n\nimpl Widget\n\n{\n\n\tpub fn new() -> Widget {\n\n\t\tWidget {\n\n\t\t\tvisible_line_count: Default::default(),\n\n\t\t\tfirst_line: 0,\n\n\t\t\tlines: Default::default(),\n\n\t\t\t}\n\n\t}\n\n\n\n\t/// Populate the buffer with the provided \"file\"\n", "file_path": "Usermode/fileviewer/src/textview.rs", "rank": 69, "score": 283108.1438800433 }, { "content": "pub fn get_info<T>(addr: *const T) -> Option<(u64, ProtectionMode)>\n\n{\n\n\tif let Some(paddr) = get_phys_raw(addr)\n\n\t{\n\n\t\tlet a = with_entry(Level::Middle, addr as usize >> (14+11), |e| {\n\n\t\t\tlet v = e.load(Ordering::Relaxed);\n\n\t\t\tif v & 3 == 3 { None } else { Some(v) }\n\n\t\t\t})\n\n\t\t\t.unwrap_or_else(|| with_entry(Level::Bottom, addr as usize >> 14, |e| e.load(Ordering::Relaxed)))\n\n\t\t\t;\n\n\t\tlet prot = attrs_to_prot_mode(a & 0xFF000000_000003FC);\n\n\t\tSome( (paddr, prot) )\n\n\t}\n\n\telse\n\n\t{\n\n\t\tNone\n\n\t}\n\n}\n", "file_path": "Kernel/Core/arch/armv8/memory/virt.rs", "rank": 70, "score": 282067.0567496323 }, { "content": "pub fn can_map_without_alloc(addr: *mut ()) -> bool {\n\n\t// The following only returns PTE::null() if an intermediate step was unallocated\n\n\t! get_page_ent(addr as usize, false, LargeOk::No).is_null()\n\n}\n\n\n\n/// Maps a physical frame to a page, with the provided protection mode\n\npub unsafe fn map(addr: *mut (), phys: PAddr, prot: ::memory::virt::ProtectionMode)\n\n{\n\n\tlet mut pte = get_page_ent(addr as usize, true, LargeOk::No);\n\n\tassert!( !pte.is_null(), \"Failed to obtain ent for {:p}\", addr );\n\n\tif pte.set_if_unset( phys, prot ).is_err() {\n\n\t\tpanic!(\"Attempting to map over existing allocation addr={:p}\", addr);\n\n\t}\n\n\tinvlpg(addr);\n\n}\n\n/// Removes a mapping\n\npub unsafe fn unmap(addr: *mut ()) -> Option<PAddr>\n\n{\n\n\tlet mut pte = get_page_ent(addr as usize, false, LargeOk::No);\n\n\tassert!( !pte.is_null(), \"Failed to obtain ent for {:p}\", addr );\n", "file_path": "Kernel/Core/arch/amd64/memory/virt.rs", "rank": 71, "score": 281926.97643225925 }, { "content": "pub fn can_map_without_alloc(addr: *mut ()) -> bool\n\n{\n\n\tfalse\n\n}\n\npub unsafe fn map(addr: *const (), phys: u64, prot: ProtectionMode)\n\n{\n\n\tlog_debug!(\"map({:p} = {:#x}, {:?})\", addr, phys, prot);\n\n\n\n\tlet page = addr as usize / PAGE_SIZE;\n\n\tif page >> (48-14) > 0\n\n\t{\n\n\t\t// Kernel AS doesn't need a deletion lock, as it's never pruned\n\n\t\t// Mutation lock also not needed (but is provided in VMM)\n\n\t\t\n\n\t\tlet mask = (1 << 33)-1;\n\n\t\tlet page = page & mask;\n\n\t\t//log_trace!(\"page = {:#x}\", page);\n\n\t\t// 1. Ensure that top-level region is valid.\n\n\t\twith_entry(Level::Root, page >> 22, |e| {\n\n\t\t\tif e.load(Ordering::Relaxed) == 0 {\n", "file_path": "Kernel/Core/arch/armv8/memory/virt.rs", "rank": 72, "score": 281926.97643225925 }, { "content": "pub fn get_multiref_count(frame_idx: u64) -> u32 {\n\n\twith_ref( frame_idx, |r| r.load(Ordering::Relaxed) ).unwrap_or(0)\n\n}\n\n\n", "file_path": "Kernel/Core/arch/armv7/memory/phys.rs", "rank": 73, "score": 281596.8959598977 }, { "content": "pub fn get_multiref_count(frame_idx: u64) -> u32 {\n\n\twith_ref( frame_idx, |r| r.load(Ordering::Relaxed) ).unwrap_or(0)\n\n}\n\n\n", "file_path": "Kernel/Core/arch/amd64/memory/phys.rs", "rank": 74, "score": 281596.8959598977 }, { "content": "pub fn get_multiref_count(frame_idx: u64) -> u32 {\n\n\twith_ref( frame_idx, |r| r.load(Ordering::Relaxed) ).unwrap_or(0)\n\n}\n\n\n", "file_path": "Kernel/Core/arch/armv8/memory/phys.rs", "rank": 75, "score": 281596.8959598977 }, { "content": "/// Return the system timestamp (miliseconds since an arbitary point)\n\npub fn cur_timestamp() -> u64\n\n{\n\n\thw::hpet::get_timestamp()\n\n}\n\n\n", "file_path": "Kernel/Core/arch/amd64/mod.rs", "rank": 76, "score": 281501.34610040585 }, { "content": "pub fn cur_timestamp() -> u64 {\n\n\t0\n\n}\n\n\n", "file_path": "Kernel/Core/arch/armv7/mod.rs", "rank": 77, "score": 281495.70817152347 }, { "content": "pub fn cur_timestamp() -> u64 {\n\n\t0\n\n}\n\n\n\nextern \"C\" {\n\n\tpub fn drop_to_user(entry: usize, stack: usize, args_len: usize) -> !;\n\n}\n\n\n\n\n\npub mod x86_io {\n\n\tpub unsafe fn inb(_p: u16) -> u8 { panic!(\"calling inb on ARM\") }\n\n\tpub unsafe fn inw(_p: u16) -> u16 { panic!(\"calling inw on ARM\") }\n\n\tpub unsafe fn inl(_p: u16) -> u32 { panic!(\"calling inl on ARM\") }\n\n\tpub unsafe fn outb(_p: u16, _v: u8) {}\n\n\tpub unsafe fn outw(_p: u16, _v: u16) {}\n\n\tpub unsafe fn outl(_p: u16, _v: u32) {}\n\n}\n\n\n\n\n", "file_path": "Kernel/Core/arch/armv8/mod.rs", "rank": 78, "score": 281495.70817152347 }, { "content": "#[inline(never)]\n\npub fn exit(status: u32) {\n\n\t::kernel::threads::exit_process(status);\n\n}\n", "file_path": "Kernel/Modules/syscalls/threads.rs", "rank": 80, "score": 279617.2934615496 }, { "content": "fn get_fixed_vec<F: ::std::io::Read>(f: &mut F, size: usize) -> Result<Vec<u8>, ::std::io::Error> {\n\n\tlet mut data: Vec<u8> = (0 .. size).map(|_| 0u8).collect();\n\n\tif f.read(&mut data)? != size {\n\n\t\ttodo!(\"Handle unexpected EOF in get_fixed_vec\");\n\n\t}\n\n\tOk( data )\n\n}\n\n\n\n/// Full-colour raster image\n\npub struct RasterRGB\n\n{\n\n\twidth: usize,\n\n\tdata: Vec<u8>,\n\n}\n\nimpl RasterRGB\n\n{\n\n\tpub fn new_img<P: AsRef<::std::fs::Path>>(path: P) -> Result<Image<Self>,LoadError> {\n\n\t\tSelf::new(path).map(|b| Image::new(b))\n\n\t}\n\n\tpub fn new<P: AsRef<::std::fs::Path>>(path: P) -> Result<RasterRGB,LoadError> {\n", "file_path": "Usermode/libwtk/image.rs", "rank": 81, "score": 277963.36405571736 }, { "content": "#[inline(never)]\n\nfn log_closure<F: FnOnce(&mut ::core::fmt::Write)>(f: F) {\n\n\tuse core::fmt::Write;\n\n\tlet mut lh = ::Logger;\n\n\tlet _ = write!(lh, \"[loader log] \");\n\n\tf(&mut lh);\n\n\tlet _ = write!(lh, \"\\n\");\n\n}\n\n\n\n/// Stub logging macro\n\nmacro_rules! log{\n\n\t($($v:tt)*) => {{\n\n\t\t::log_closure(|lh| {let _ = write!(lh, $($v)*);});\n\n\t\t}};\n\n}\n\n\n\n\n\npub struct ElfFile(elf_fmt::ElfHeader);\n\nimpl ElfFile\n\n{\n\n\tpub fn check_header(&self) {\n", "file_path": "Kernel/rundir/arm_bootloader/main.rs", "rank": 82, "score": 277218.26082929986 }, { "content": "/// Start a new thread using the provided TCB\n\n///\n\n/// Allocates a new stack within the current address space\n\npub fn start_thread<F: FnOnce()+Send>(thread: &mut ::threads::Thread, code: F)\n\n{\n\n\tlet stack = ::memory::virt::alloc_stack().into_array::<u8>();\n\n\t\n\n\tlet stack_rgn_top = &stack[stack.len()-1] as *const _ as usize + 1;\n\n\tlet mut stack_top = stack_rgn_top;\n\n\tlet stack_bottom = &stack[0] as *const _ as usize;\n\n\t\n\n\t// 1. Allocate TLS block at the top of the stack\n\n\tlog_trace!(\"prep_tls({:#x},{:#x},{:p})\", stack_top, stack_bottom, &*thread);\n\n\t// SAFE: Pointer is valid\n\n\tlet tls_base = unsafe { prep_tls(stack_top, stack_bottom, thread as *mut _) };\n\n\tstack_top = tls_base;\n\n\t\n\n\t// 2. Populate stack with `code`\n\n\tstack_top -= ::core::mem::size_of::<F>();\n\n\tstack_top -= stack_top % ::core::mem::align_of::<F>();\n\n\tlet code_ptr = stack_top;\n\n\t// SAFE: Pointer is valid\n\n\tunsafe {\n", "file_path": "Kernel/Core/arch/amd64/threads.rs", "rank": 83, "score": 276715.1886080293 }, { "content": "/// Switch the currently active window group\n\n//#[tag_safe(irq)]\n\npub fn switch_active(new: usize)\n\n{\n\n\t// TODO: I would like to check the validity of this value BEFORE attempting a re-render, but that\n\n\t// would require locking the S_WINDOW_GROUPS vector.\n\n\t// - Technically it shouldn't (reading the size is just racy, not unsafe), but representing that is nigh-on\n\n\t// impossible.\n\n\tlog_log!(\"Switching to group {}\", new);\n\n\tS_CURRENT_GROUP.store(new as usize, atomic::Ordering::Relaxed);\n\n\tS_RENDER_NEEDED.store(true, atomic::Ordering::Relaxed);\n\n\tS_FULL_REDRAW.store(true, atomic::Ordering::Relaxed);\n\n\tS_RENDER_REQUEST.post();\n\n}\n\n\n", "file_path": "Kernel/Modules/gui/windows/mod.rs", "rank": 85, "score": 275987.2586789129 }, { "content": "/// Run the provided closure with no changes possible to the address space\n\npub fn with_lock<F>(addr: usize, fcn: F)\n\nwhere\n\n\tF: FnOnce()\n\n{\n\n\t// TODO: Lock\n\n\tlog_notice!(\"TODO: with_lock(addr={:#x})\", addr);\n\n\tfcn();\n\n}\n\n\n", "file_path": "Kernel/Core/memory/virt.rs", "rank": 86, "score": 275081.73722007487 }, { "content": "fn bind(num: u32, obj: Box<dyn FnMut()->bool + Send>) -> BindingHandle\n\n{\t\n\n\tlog_trace!(\"bind(num={}, obj={:?})\", num, \"TODO\"/*obj*/);\n\n\t// 1. (if not already) bind a handler on the architecture's handlers\n\n\tlet mut map_lh = S_IRQ_BINDINGS.lock_init(|| Bindings { mapping: VecMap::new(), next_index: 0 });\n\n\tlet index = map_lh.next_index;\n\n\tmap_lh.next_index += 1;\n\n\tlet binding = match map_lh.mapping.entry(num)\n\n\t\t{\n\n\t\t::lib::vec_map::Entry::Occupied(e) => e.into_mut(),\n\n\t\t// - Vacant, create new binding (pokes arch IRQ clode)\n\n\t\t::lib::vec_map::Entry::Vacant(e) => e.insert( IRQBinding::new_boxed(num) ),\n\n\t\t};\n\n\t// 2. Add this handler to the meta-handler\n\n\tbinding.handlers.lock().push( obj );\n\n\t\n\n\tBindingHandle( num, index as u32 )\n\n}\n\nimpl Drop for BindingHandle\n\n{\n\n\tfn drop(&mut self)\n\n\t{\n\n\t\ttodo!(\"Drop IRQ binding handle: IRQ {} idx {}\", self.0, self.1);\n\n\t}\n\n}\n\n\n", "file_path": "Kernel/Core/irqs.rs", "rank": 87, "score": 274901.76470162754 }, { "content": "pub fn exit_process(status: u32) -> ! {\n\n\t// Requirements:\n\n\t// - Save exit status somewhere\n\n\tmatch with_cur_thread( |cur| cur.get_process_info().mark_exit(status) )\n\n\t{\n\n\tOk(_) => {},\n\n\tErr(_) => todo!(\"Two threads raced to exit\"),\n\n\t}\n\n\tlog_notice!(\"Terminating process with status={:#x}\", status);\n\n\n\n\t// - Request all other threads terminate\n\n\t// TODO: How would this be done cleanly? Need to wake all and terminate on syscall boundary?\n\n\t\n\n\t// - Terminate this thread\n\n\t// > Process reaping is handled by the PCB dropping when refcount reaches zero\n\n\tterminate_thread();\n\n}\n\n\n", "file_path": "Kernel/Core/threads/mod.rs", "rank": 88, "score": 274434.62499687047 }, { "content": "/// Set the boot video mode.\n\n///\n\n/// NOTE: Must be called before this module is initialised to have any effect\n\npub fn set_boot_mode(mode: bootvideo::VideoMode)\n\n{\n\n\tlet mut lh = S_BOOT_MODE.lock();\n\n\tassert!(lh.is_none(), \"Boot video mode set multiple times\");\n\n\t*lh = Some(mode);\n\n}\n\n\n", "file_path": "Kernel/Core/metadevs/video/mod.rs", "rank": 89, "score": 272401.11362511537 }, { "content": "/// Returns true if the passed address is \"valid\" (allocated, or delay allocated)\n\npub fn is_reserved<T>(addr: *const T) -> bool\n\n{\n\n\tlet pte = get_page_ent(addr as usize, false, LargeOk::Yes);\n\n\treturn !pte.is_null() && pte.is_reserved();\n\n}\n", "file_path": "Kernel/Core/arch/amd64/memory/virt.rs", "rank": 90, "score": 272091.0828646264 }, { "content": "pub fn is_reserved<T>(addr: *const T) -> bool\n\n{\n\n\tget_phys_raw(addr).is_some()\n\n}\n", "file_path": "Kernel/Core/arch/armv8/memory/virt.rs", "rank": 91, "score": 272079.93627154006 }, { "content": "pub fn is_reserved<T>(addr: *const T) -> bool {\n\n\tget_phys_opt(addr).is_some()\n\n\t//PageEntry::get(addr as *const ()).is_reserved()\n\n}\n", "file_path": "Kernel/Core/arch/armv7/memory/virt.rs", "rank": 92, "score": 272079.93627154006 }, { "content": "fn load_loader(loader: &::kernel::vfs::handle::File) -> Result<(&'static LoaderHeader, usize), &'static str>\n\n{\n\n\tuse core::mem::forget;\n\n\tuse kernel::vfs::handle;\n\n\tuse kernel::PAGE_SIZE;\n\n\n\n\tlet ondisk_size = loader.size();\n\n\tlet mh_firstpage = {\n\n\t\tif ondisk_size > MAX_SIZE as u64 {\n\n\t\t\tlog_error!(\"Loader is too large to fit in reserved region ({}, max {})\",\n\n\t\t\t\tondisk_size, MAX_SIZE);\n\n\t\t\treturn Err(\"Loader too large\");\n\n\t\t}\n\n\t\tloader.memory_map(LOAD_BASE, 0, PAGE_SIZE, handle::MemoryMapMode::Execute).expect(\"Loader first page\")\n\n\t\t};\n\n\t// - 2. Parse the header\n\n\t// SAFE: LoaderHeader is POD, and pointer is valid (not Sync, so passing to another thread/process is invalid)\n\n\tlet header_ptr = unsafe { &*(LOAD_BASE as *const LoaderHeader) };\n\n\tif header_ptr.magic != MAGIC || header_ptr.info != INFO {\n\n\t\tlog_error!(\"Loader header is invalid: magic {:#x} != {:#x} or info {:#x} != {:#x}\",\n", "file_path": "Kernel/main/main.rs", "rank": 93, "score": 271824.5951692676 }, { "content": "pub fn puth(v: u64) {\n\n\tprint!(\"{:08x}\", v);\n\n}\n", "file_path": "Kernel/Core/arch/imp-test.rs", "rank": 94, "score": 271665.58757387777 }, { "content": "/// Ensure that the provded pages are valid (i.e. backed by memory)\n\npub fn allocate(addr: *mut (), page_count: usize) -> Result<(), MapError> {\n\n\tallocate_int(addr, page_count, false)\n\n}\n", "file_path": "Kernel/Core/memory/virt.rs", "rank": 95, "score": 271269.90264657047 }, { "content": "/// Simple async task executor\n\npub fn runner(mut f: impl FnMut(&mut task::Context))\n\n{\n\n\tlet waiter = SimpleWaiter::new();\n\n\n\n\t// SAFE: The inner waker above won't move\n\n\tlet waker = unsafe { task::Waker::from_raw(waiter.raw_waker()) };\n\n\tlet mut context = task::Context::from_waker(&waker);\n\n\n\n\tloop\n\n\t{\n\n\t\tf(&mut context);\n\n\t\twaiter.sleep();\n\n\t}\n\n}\n\n\n", "file_path": "Kernel/Core/futures.rs", "rank": 96, "score": 270312.6560556802 }, { "content": "pub fn start_thread<F: FnOnce()+Send+'static>(thread: &mut ::threads::Thread, code: F) {\n\n\tlet mut stack = StackInit::new();\n\n\n\n\t// 2. Populate stack with `code`\n\n\tstack.push(code);\n\n\tlog_debug!(\"stack.pos() = {:#x}\", stack.pos());\n\n\tlet a = stack.pos();\n\n\tstack.align(8);\n\n\tstack.push(a);\n\n\t\n\n\t// 3. Populate with task_switch state\n\n\t// - Root function defined below\n\n\tstack.push( thread_root::<F> as usize );\n\n\t// - LR popped by task_switch - Trampoline that sets R0 to the address of 'code'\n\n\tstack.push( thread_trampoline as usize );\n\n\t// - R4-R12 saved by task_switch\n\n\tfor _ in 4 .. 12+1 {\n\n\t\tstack.push(0u32);\n\n\t}\n\n\tstack.push(0u32);\t// User SP\n", "file_path": "Kernel/Core/arch/armv7/threads.rs", "rank": 97, "score": 269475.55518749723 }, { "content": "pub fn start_thread<F: FnOnce()+Send+'static>(thread: &mut ::threads::Thread, code: F) {\n\n\tlet mut stack = StackInit::new();\n\n\n\n\t// 2. Populate stack with `code`\n\n\tstack.push(code);\n\n\tlet a = stack.pos();\n\n\tstack.align(16);\n\n\t// State for `thread_trampoline`\n\n\tstack.push(a);\n\n\tstack.push( thread_root::<F> as usize );\n\n\t\n\n\t// 3. Populate with task_switch state\n\n\t// - R19-R28 saved by task_switch\n\n\tfor _ in 19 .. 28+1 {\n\n\t\tstack.push(0_usize);\n\n\t}\n\n\t// - LR popped by task_switch - Trampoline that sets R0 to the address of 'code'\n\n\tstack.push( thread_trampoline as usize );\t// R30 - aka LR\n\n\tstack.push(0_usize);\t// R29\n\n\n", "file_path": "Kernel/Core/arch/armv8/threads.rs", "rank": 98, "score": 269475.55518749723 }, { "content": "pub fn handle_packet(_physical_interface: &dyn crate::nic::Interface, _source_mac: [u8; 6], mut r: crate::nic::PacketReader)\n\n{\n\n\t// TODO: Length test\n\n\tlet hw_ty = r.read_u16n().unwrap();\n\n\tlet sw_ty = r.read_u16n().unwrap();\n\n\tlet hwsize = r.read_u8().unwrap();\n\n\tlet swsize = r.read_u8().unwrap();\n\n\tlet code = r.read_u16n().unwrap();\n\n\tlog_debug!(\"ARP HW {:04x} {}B SW {:04x} {}B req={}\", hw_ty, hwsize, sw_ty, swsize, code);\n\n\tif hwsize == 6 {\n\n\t\tlet mac = {\n\n\t\t\tlet mut b = [0; 6];\n\n\t\t\tr.read(&mut b).unwrap();\n\n\t\t\tb\n\n\t\t\t};\n\n\t\tlog_debug!(\"ARP HW {:?}\", ::kernel::logging::HexDump(&mac));\n\n\t}\n\n\tif swsize == 4 {\n\n\t\tlet ip = {\n\n\t\t\tlet mut b = [0; 4];\n\n\t\t\tr.read(&mut b).unwrap();\n\n\t\t\tb\n\n\t\t\t};\n\n\t\tlog_debug!(\"ARP SW {:?}\", ip);\n\n\t}\n\n}\n\n\n", "file_path": "Kernel/Modules/network/arp.rs", "rank": 99, "score": 269005.0197034344 } ]
Rust
vchain/src/acc/mod.rs
hkbudb/vchain-demo
8e12ac2d1a3b38cb7009f400b8601b6bfc28f23d
pub mod digest_set; pub mod serde_impl; pub mod utils; pub use ark_bls12_381::{ Bls12_381 as Curve, Fq12, Fr, G1Affine, G1Projective, G2Affine, G2Projective, }; pub type DigestSet = digest_set::DigestSet<Fr>; use crate::digest::{Digest, Digestible}; use crate::set::{MultiSet, SetElement}; use anyhow::{self, bail, ensure, Context}; use ark_ec::{msm::VariableBaseMSM, AffineCurve, PairingEngine, ProjectiveCurve}; use ark_ff::{Field, One, PrimeField, ToBytes, Zero}; use ark_poly::{univariate::DensePolynomial, Polynomial}; use core::any::Any; use core::str::FromStr; use rayon::prelude::*; use serde::{Deserialize, Serialize}; use utils::{xgcd, FixedBaseCurvePow, FixedBaseScalarPow}; #[cfg(test)] const GS_VEC_LEN: usize = 0; #[cfg(not(test))] const GS_VEC_LEN: usize = 5000; lazy_static! { static ref PUB_Q: Fr = Fr::from_str("480721077433357505777975950918924200361380912084288598463024400624539293706").unwrap(); static ref PRI_S: Fr = Fr::from_str("259535143263514268207918833918737523409").unwrap(); static ref G1_POWER: FixedBaseCurvePow<G1Projective> = FixedBaseCurvePow::build(&G1Projective::prime_subgroup_generator()); static ref G2_POWER: FixedBaseCurvePow<G2Projective> = FixedBaseCurvePow::build(&G2Projective::prime_subgroup_generator()); static ref PRI_S_POWER: FixedBaseScalarPow<Fr> = FixedBaseScalarPow::build(&PRI_S); static ref G1_S_VEC: Vec<G1Affine> = { info!("Initialize G1_S_VEC..."); let timer = howlong::ProcessCPUTimer::new(); let mut res: Vec<G1Affine> = Vec::with_capacity(GS_VEC_LEN); (0..GS_VEC_LEN) .into_par_iter() .map(|i| get_g1s(Fr::from(i as u64))) .collect_into_vec(&mut res); info!("Done in {}.", timer.elapsed()); res }; static ref G2_S_VEC: Vec<G2Affine> = { info!("Initialize G2_S_VEC..."); let timer = howlong::ProcessCPUTimer::new(); let mut res: Vec<G2Affine> = Vec::with_capacity(GS_VEC_LEN); (0..GS_VEC_LEN) .into_par_iter() .map(|i| get_g2s(Fr::from(i as u64))) .collect_into_vec(&mut res); info!("Done in {}.", timer.elapsed()); res }; static ref E_G_G: Fq12 = Curve::pairing( G1Affine::prime_subgroup_generator(), G2Affine::prime_subgroup_generator() ); } fn get_g1s(coeff: Fr) -> G1Affine { let si = PRI_S_POWER.apply(&coeff); G1_POWER.apply(&si).into_affine() } fn get_g2s(coeff: Fr) -> G2Affine { let si = PRI_S_POWER.apply(&coeff); G2_POWER.apply(&si).into_affine() } #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)] pub enum Type { ACC1, ACC2, } pub trait Accumulator { const TYPE: Type; type Proof; fn cal_acc_g1_sk<T: SetElement>(set: &MultiSet<T>) -> G1Affine { Self::cal_acc_g1_sk_d(&DigestSet::new(set)) } fn cal_acc_g1<T: SetElement>(set: &MultiSet<T>) -> G1Affine { Self::cal_acc_g1_d(&DigestSet::new(set)) } fn cal_acc_g2_sk<T: SetElement>(set: &MultiSet<T>) -> G2Affine { Self::cal_acc_g2_sk_d(&DigestSet::new(set)) } fn cal_acc_g2<T: SetElement>(set: &MultiSet<T>) -> G2Affine { Self::cal_acc_g2_d(&DigestSet::new(set)) } fn cal_acc_g1_sk_d(set: &DigestSet) -> G1Affine; fn cal_acc_g1_d(set: &DigestSet) -> G1Affine; fn cal_acc_g2_sk_d(set: &DigestSet) -> G2Affine; fn cal_acc_g2_d(set: &DigestSet) -> G2Affine; fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self::Proof>; } pub trait AccumulatorProof: Eq + PartialEq { const TYPE: Type; fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self> where Self: core::marker::Sized; fn combine_proof(&mut self, other: &Self) -> anyhow::Result<()>; fn as_any(&self) -> &dyn Any; } pub struct Acc1; impl Acc1 { fn poly_to_g1(poly: DensePolynomial<Fr>) -> G1Affine { let mut idxes: Vec<usize> = Vec::with_capacity(poly.degree() + 1); for (i, coeff) in poly.coeffs.iter().enumerate() { if coeff.is_zero() { continue; } idxes.push(i); } let mut bases: Vec<G1Affine> = Vec::with_capacity(idxes.len()); let mut scalars: Vec<<Fr as PrimeField>::BigInt> = Vec::with_capacity(idxes.len()); (0..idxes.len()) .into_par_iter() .map(|i| { G1_S_VEC.get(i).copied().unwrap_or_else(|| { trace!("access g1 pub key at {}", i); get_g1s(Fr::from(i as u64)) }) }) .collect_into_vec(&mut bases); (0..idxes.len()) .into_par_iter() .map(|i| poly.coeffs[i].into_repr()) .collect_into_vec(&mut scalars); VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine() } fn poly_to_g2(poly: DensePolynomial<Fr>) -> G2Affine { let mut idxes: Vec<usize> = Vec::with_capacity(poly.degree() + 1); for (i, coeff) in poly.coeffs.iter().enumerate() { if coeff.is_zero() { continue; } idxes.push(i); } let mut bases: Vec<G2Affine> = Vec::with_capacity(idxes.len()); let mut scalars: Vec<<Fr as PrimeField>::BigInt> = Vec::with_capacity(idxes.len()); (0..idxes.len()) .into_par_iter() .map(|i| { G2_S_VEC.get(i).copied().unwrap_or_else(|| { trace!("access g2 pub key at {}", i); get_g2s(Fr::from(i as u64)) }) }) .collect_into_vec(&mut bases); (0..idxes.len()) .into_par_iter() .map(|i| poly.coeffs[i].into_repr()) .collect_into_vec(&mut scalars); VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine() } } #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] pub struct Acc1Proof { #[serde(with = "serde_impl")] f1: G2Affine, #[serde(with = "serde_impl")] f2: G2Affine, } impl AccumulatorProof for Acc1Proof { const TYPE: Type = Type::ACC1; fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self> { Acc1::gen_proof(set1, set2) } fn combine_proof(&mut self, _other: &Self) -> anyhow::Result<()> { bail!("invalid operation"); } fn as_any(&self) -> &dyn Any { self } } impl Acc1Proof { pub fn verify(&self, acc1: &G1Affine, acc2: &G1Affine) -> bool { Curve::product_of_pairings(&[ ((*acc1).into(), self.f1.into()), ((*acc2).into(), self.f2.into()), ]) == *E_G_G } } impl Accumulator for Acc1 { const TYPE: Type = Type::ACC1; type Proof = Acc1Proof; fn cal_acc_g1_sk_d(set: &DigestSet) -> G1Affine { let x = set .par_iter() .map(|(v, exp)| { let s = *PRI_S + v; let exp = [*exp as u64]; s.pow(&exp) }) .reduce(Fr::one, |a, b| a * &b); G1_POWER.apply(&x).into_affine() } fn cal_acc_g1_d(set: &DigestSet) -> G1Affine { let poly = set.expand_to_poly(); Self::poly_to_g1(poly) } fn cal_acc_g2_sk_d(set: &DigestSet) -> G2Affine { let x = set .par_iter() .map(|(v, exp)| { let s = *PRI_S + v; let exp = [*exp as u64]; s.pow(&exp) }) .reduce(Fr::one, |a, b| a * &b); G2_POWER.apply(&x).into_affine() } fn cal_acc_g2_d(set: &DigestSet) -> G2Affine { let poly = set.expand_to_poly(); Self::poly_to_g2(poly) } fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self::Proof> { let poly1 = set1.expand_to_poly(); let poly2 = set2.expand_to_poly(); let (g, x, y) = xgcd(poly1, poly2).context("failed to compute xgcd")?; ensure!(g.degree() == 0, "cannot generate proof"); Ok(Acc1Proof { f1: Self::poly_to_g2(&x / &g), f2: Self::poly_to_g2(&y / &g), }) } } pub struct Acc2; #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] pub struct Acc2Proof { #[serde(with = "serde_impl")] f: G1Affine, } impl AccumulatorProof for Acc2Proof { const TYPE: Type = Type::ACC2; fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self> { Acc2::gen_proof(set1, set2) } fn combine_proof(&mut self, other: &Self) -> anyhow::Result<()> { let mut f = self.f.into_projective(); f.add_assign_mixed(&other.f); self.f = f.into_affine(); Ok(()) } fn as_any(&self) -> &dyn Any { self } } impl Acc2Proof { pub fn verify(&self, acc1: &G1Affine, acc2: &G2Affine) -> bool { let a = Curve::pairing(*acc1, *acc2); let b = Curve::pairing(self.f, G2Affine::prime_subgroup_generator()); a == b } } impl Accumulator for Acc2 { const TYPE: Type = Type::ACC2; type Proof = Acc2Proof; fn cal_acc_g1_sk_d(set: &DigestSet) -> G1Affine { let x = set .par_iter() .map(|(a, b)| { let s = PRI_S_POWER.apply(a); s * &Fr::from(*b) }) .reduce(Fr::zero, |a, b| a + &b); G1_POWER.apply(&x).into_affine() } fn cal_acc_g1_d(set: &DigestSet) -> G1Affine { let mut bases: Vec<G1Affine> = Vec::with_capacity(set.len()); let mut scalars: Vec<<Fr as PrimeField>::BigInt> = Vec::with_capacity(set.len()); (0..set.len()) .into_par_iter() .map(|i| get_g1s(set[i].0)) .collect_into_vec(&mut bases); (0..set.len()) .into_par_iter() .map(|i| <Fr as PrimeField>::BigInt::from(set[i].1 as u64)) .collect_into_vec(&mut scalars); VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine() } fn cal_acc_g2_sk_d(set: &DigestSet) -> G2Affine { let x = set .par_iter() .map(|(a, b)| { let s = PRI_S_POWER.apply(&(*PUB_Q - a)); s * &Fr::from(*b) }) .reduce(Fr::zero, |a, b| a + &b); G2_POWER.apply(&x).into_affine() } fn cal_acc_g2_d(set: &DigestSet) -> G2Affine { let mut bases: Vec<G2Affine> = Vec::with_capacity(set.len()); let mut scalars: Vec<<Fr as PrimeField>::BigInt> = Vec::with_capacity(set.len()); (0..set.len()) .into_par_iter() .map(|i| get_g2s(*PUB_Q - &set[i].0)) .collect_into_vec(&mut bases); (0..set.len()) .into_par_iter() .map(|i| <Fr as PrimeField>::BigInt::from(set[i].1 as u64)) .collect_into_vec(&mut scalars); VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine() } fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self::Proof> { let produce_size = set1.len() * set2.len(); let mut product: Vec<(Fr, u64)> = Vec::with_capacity(produce_size); (0..produce_size) .into_par_iter() .map(|i| { let set1idx = i / set2.len(); let set2idx = i % set2.len(); let (s1, q1) = set1[set1idx]; let (s2, q2) = set2[set2idx]; (*PUB_Q + &s1 - &s2, (q1 * q2) as u64) }) .collect_into_vec(&mut product); if product.par_iter().any(|(x, _)| *x == *PUB_Q) { bail!("cannot generate proof"); } let mut bases: Vec<G1Affine> = Vec::with_capacity(produce_size); let mut scalars: Vec<<Fr as PrimeField>::BigInt> = Vec::with_capacity(produce_size); (0..produce_size) .into_par_iter() .map(|i| get_g1s(product[i].0)) .collect_into_vec(&mut bases); (0..produce_size) .into_par_iter() .map(|i| <Fr as PrimeField>::BigInt::from(product[i].1)) .collect_into_vec(&mut scalars); let f = VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine(); Ok(Acc2Proof { f }) } } #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] pub enum Proof { ACC1(Box<Acc1Proof>), ACC2(Box<Acc2Proof>), } impl Digestible for G1Affine { fn to_digest(&self) -> Digest { let mut buf = Vec::<u8>::new(); self.write(&mut buf) .unwrap_or_else(|_| panic!("failed to serialize {:?}", self)); buf.to_digest() } } #[cfg(test)] mod tests { use super::*; fn init_logger() { let _ = env_logger::builder().is_test(true).try_init(); } #[test] fn test_cal_acc() { init_logger(); let set = MultiSet::from_vec(vec![1, 1, 2, 3, 4, 4, 5, 6, 6, 7, 8, 9]); assert_eq!(Acc1::cal_acc_g1(&set), Acc1::cal_acc_g1_sk(&set)); assert_eq!(Acc1::cal_acc_g2(&set), Acc1::cal_acc_g2_sk(&set)); assert_eq!(Acc2::cal_acc_g1(&set), Acc2::cal_acc_g1_sk(&set)); assert_eq!(Acc2::cal_acc_g2(&set), Acc2::cal_acc_g2_sk(&set)); } #[test] fn test_acc1_proof() { init_logger(); let set1 = DigestSet::new(&MultiSet::from_vec(vec![1, 2, 3])); let set2 = DigestSet::new(&MultiSet::from_vec(vec![4, 5, 6])); let set3 = DigestSet::new(&MultiSet::from_vec(vec![1, 1])); let proof = Acc1::gen_proof(&set1, &set2).unwrap(); let acc1 = Acc1::cal_acc_g1_sk_d(&set1); let acc2 = Acc1::cal_acc_g1_sk_d(&set2); assert!(proof.verify(&acc1, &acc2)); assert!(Acc1::gen_proof(&set1, &set3).is_err()); } #[test] fn test_acc2_proof() { init_logger(); let set1 = DigestSet::new(&MultiSet::from_vec(vec![1, 2, 3])); let set2 = DigestSet::new(&MultiSet::from_vec(vec![4, 5, 6])); let set3 = DigestSet::new(&MultiSet::from_vec(vec![1, 1])); let proof = Acc2::gen_proof(&set1, &set2).unwrap(); let acc1 = Acc2::cal_acc_g1_sk_d(&set1); let acc2 = Acc2::cal_acc_g2_sk_d(&set2); assert!(proof.verify(&acc1, &acc2)); assert!(Acc2::gen_proof(&set1, &set3).is_err()); } #[test] fn test_acc2_proof_sum() { init_logger(); let set1 = DigestSet::new(&MultiSet::from_vec(vec![1, 2, 3])); let set2 = DigestSet::new(&MultiSet::from_vec(vec![4, 5, 6])); let set3 = DigestSet::new(&MultiSet::from_vec(vec![7, 8, 9])); let mut proof1 = Acc2::gen_proof(&set1, &set2).unwrap(); let proof2 = Acc2::gen_proof(&set1, &set3).unwrap(); proof1.combine_proof(&proof2).unwrap(); let acc1 = Acc2::cal_acc_g1_sk_d(&set1); let acc2 = Acc2::cal_acc_g2_sk_d(&set2); let acc3 = Acc2::cal_acc_g2_sk_d(&set3); let acc4 = { let mut acc = acc2.into_projective(); acc.add_assign_mixed(&acc3); acc.into_affine() }; assert!(proof1.verify(&acc1, &acc4)); } }
pub mod digest_set; pub mod serde_impl; pub mod utils; pub use ark_bls12_381::{ Bls12_381 as Curve, Fq12, Fr, G1Affine, G1Projective, G2Affine, G2Projective, }; pub type DigestSet = digest_set::DigestSet<Fr>; use crate::digest::{Digest, Digestible}; use crate::set::{MultiSet, SetElement}; use anyhow::{self, bail, ensure, Context}; use ark_ec::{msm::VariableBaseMSM, AffineCurve, PairingEngine, ProjectiveCurve}; use ark_ff::{Field, One, PrimeField, ToBytes, Zero}; use ark_poly::{univariate::DensePolynomial, Polynomial}; use core::any::Any; use core::str::FromStr; use rayon::prelude::*; use serde::{Deserialize, Serialize}; use utils::{xgcd, FixedBaseCurvePow, FixedBaseScalarPow}; #[cfg(test)] const GS_VEC_LEN: usize = 0; #[cfg(not(test))] const GS_VEC_LEN: usize = 5000; lazy_static! { static ref PUB_Q: Fr = Fr::from_str("480721077433357505777975950918924200361380912084288598463024400624539293706").unwrap(); static ref PRI_S: Fr = Fr::from_str("259535143263514268207918833918737523409").unwrap(); static ref G1_POWER: FixedBaseCurvePow<G1Projective> = FixedBaseCurvePow::build(&G1Projective::prime_subgroup_generator()); static ref G2_POWER: FixedBaseCurvePow<G2Projective> = FixedBaseCurvePow::build(&G2Projective::prime_subgroup_generator()); static ref PRI_S_POWER: FixedBaseScalarPow<Fr> = FixedBaseScalarPow::build(&PRI_S); static ref G1_S_VEC: Vec<G1Affine> = { info!("Initialize G1_S_VEC..."); let timer = howlong::ProcessCPUTimer::new(); let mut res: Vec<G1Affine> = Vec::with_capacity(GS_VEC_LEN); (0..GS_VEC_LEN) .into_par_iter() .map(|i| get_g1s(Fr::from(i as u64))) .collect_into_vec(&mut res); info!("Done in {}.", timer.elapsed()); res }; static ref G2_S_VEC: Vec<G2Affine> = { info!("Initialize G2_S_VEC..."); let timer = howlong::ProcessCPUTimer::new(); let mut res: Vec<G2Affine> = Vec::with_capacity(GS_VEC_LEN); (0..GS_VEC_LEN) .into_par_iter() .map(|i| get_g2s(Fr::from(i as u64))) .collect_into_vec(&mut res); info!("Done in {}.", timer.elapsed()); res }; static ref E_G_G: Fq12 = Curve::pairing( G1Affine::prime_subgroup_generator(), G2Affine::prime_subgroup_generator() ); } fn get_g1s(coeff: Fr) -> G1Affine { let si = PRI_S_POWER.apply(&coeff); G1_POWER.apply(&si).into_affine() } fn get_g2s(coeff: Fr) -> G2Affine { let si = PRI_S_POWER.apply(&coeff); G2_POWER.apply(&si).into_affine() } #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)] pub enum Type { ACC1, ACC2, } pub trait Accumulator { const TYPE: Type; type Proof; fn cal_acc_g1_sk<T: SetElement>(set: &MultiSet<T>) -> G1Affine { Self::cal_acc_g1_sk_d(&DigestSet::new(set)) } fn cal_acc_g1<T: SetElement>(set: &MultiSet<T>) -> G1Affine { Self::cal_acc_g1_d(&DigestSet::new(set)) } fn cal_acc_g2_sk<T: SetElement>(set: &MultiSet<T>) -> G2Affine { Self::cal_acc_g2_sk_d(&DigestSet::new(set)) } fn cal_acc_g2<T: SetElement>(set: &MultiSet<T>) -> G2Affine { Self::cal_acc_g2_d(&DigestSet::new(set)) } fn cal_acc_g1_sk_d(set: &DigestSet) -> G1Affine; fn cal_acc_g1_d(set: &DigestSet) -> G1Affine; fn cal_acc_g2_sk_d(set: &DigestSet) -> G2Affine; fn cal_acc_g2_d(set: &DigestSet) -> G2Affine; fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self::Proof>; } pub trait AccumulatorProof: Eq + PartialEq { const TYPE: Type; fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self> where Self: core::marker::Sized; fn combine_proof(&mut self, other: &Self) -> anyhow::Result<()>; fn as_any(&self) -> &dyn Any; } pub struct Acc1; impl Acc1 { fn poly_to_g1(poly: DensePolynomial<Fr>) -> G1Affine { let mut idxes: Vec<usize> = Vec::with_capacity(poly.degree() + 1); for (i, coeff) in poly.coeffs.iter().enumerate() { if coeff.is_zero() { continue; } idxes.push(i); } let mut bases: Vec<G1Affine> = Vec::with_capacity(idxes.len()); let mut scalars: Vec<<Fr as PrimeField>::BigInt> = Vec::with_capacity(idxes.len()); (0..idxes.len()) .into_par_iter() .map(|i| { G1_S_VEC.get(i).copied().unwrap_or_else(|| { trace!("access g1 pub key at {}", i); get_g1s(Fr::from(i as u64)) }) }) .collect_into_vec(&mut bases); (0..idxes.len()) .into_par_iter() .map(|i| poly.coeffs[i].into_repr()) .collect_into_vec(&mut scalars); VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine() } fn poly_to_g2(poly: DensePolynomial<Fr>) -> G2Affine { let mut idxes: Vec<usize> = Vec::with_capacity(poly.degree() + 1); for (i, coeff) in poly.coeffs.iter().enumerate() { if coeff.is_zero() { continue; } idxes.push(i); } let mut bases: Vec<G2Affine> = Vec::with_capacity(idxes.len()); let mut scalars: Vec<<Fr as PrimeField>::BigInt> = Vec::with_capacity(idxes.len()); (0..idxes.len()) .into_par_iter() .map(|i| { G2_S_VEC.get(i).copied().unwrap_or_else(|| { trace!("access g2 pub key at {}", i); get_g2s(Fr::from(i as u64)) }) }) .collect_into_vec(&mut bases); (0..idxes.len()) .into_par_iter() .map(|i| poly.coeffs[i].into_repr()) .collect_into_vec(&mut scalars); VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine() } } #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] pub struct Acc1Proof { #[serde(with = "serde_impl")] f1: G2Affine, #[serde(with = "serde_impl")] f2: G2Affine, } impl AccumulatorProof for Acc1Proof { const TYPE: Type = Type::ACC1; fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self> { Acc1::gen_proof(set1, set2) } fn combine_proof(&mut self, _other: &Self) -> anyhow::Result<()> { bail!("invalid operation"); } fn as_any(&self) -> &dyn Any { self } } impl Acc1Proof { pub fn verify(&self, acc1: &G1Affine, acc2: &G1Aff
.map(|i| <Fr as PrimeField>::BigInt::from(product[i].1)) .collect_into_vec(&mut scalars); let f = VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine(); Ok(Acc2Proof { f }) } } #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] pub enum Proof { ACC1(Box<Acc1Proof>), ACC2(Box<Acc2Proof>), } impl Digestible for G1Affine { fn to_digest(&self) -> Digest { let mut buf = Vec::<u8>::new(); self.write(&mut buf) .unwrap_or_else(|_| panic!("failed to serialize {:?}", self)); buf.to_digest() } } #[cfg(test)] mod tests { use super::*; fn init_logger() { let _ = env_logger::builder().is_test(true).try_init(); } #[test] fn test_cal_acc() { init_logger(); let set = MultiSet::from_vec(vec![1, 1, 2, 3, 4, 4, 5, 6, 6, 7, 8, 9]); assert_eq!(Acc1::cal_acc_g1(&set), Acc1::cal_acc_g1_sk(&set)); assert_eq!(Acc1::cal_acc_g2(&set), Acc1::cal_acc_g2_sk(&set)); assert_eq!(Acc2::cal_acc_g1(&set), Acc2::cal_acc_g1_sk(&set)); assert_eq!(Acc2::cal_acc_g2(&set), Acc2::cal_acc_g2_sk(&set)); } #[test] fn test_acc1_proof() { init_logger(); let set1 = DigestSet::new(&MultiSet::from_vec(vec![1, 2, 3])); let set2 = DigestSet::new(&MultiSet::from_vec(vec![4, 5, 6])); let set3 = DigestSet::new(&MultiSet::from_vec(vec![1, 1])); let proof = Acc1::gen_proof(&set1, &set2).unwrap(); let acc1 = Acc1::cal_acc_g1_sk_d(&set1); let acc2 = Acc1::cal_acc_g1_sk_d(&set2); assert!(proof.verify(&acc1, &acc2)); assert!(Acc1::gen_proof(&set1, &set3).is_err()); } #[test] fn test_acc2_proof() { init_logger(); let set1 = DigestSet::new(&MultiSet::from_vec(vec![1, 2, 3])); let set2 = DigestSet::new(&MultiSet::from_vec(vec![4, 5, 6])); let set3 = DigestSet::new(&MultiSet::from_vec(vec![1, 1])); let proof = Acc2::gen_proof(&set1, &set2).unwrap(); let acc1 = Acc2::cal_acc_g1_sk_d(&set1); let acc2 = Acc2::cal_acc_g2_sk_d(&set2); assert!(proof.verify(&acc1, &acc2)); assert!(Acc2::gen_proof(&set1, &set3).is_err()); } #[test] fn test_acc2_proof_sum() { init_logger(); let set1 = DigestSet::new(&MultiSet::from_vec(vec![1, 2, 3])); let set2 = DigestSet::new(&MultiSet::from_vec(vec![4, 5, 6])); let set3 = DigestSet::new(&MultiSet::from_vec(vec![7, 8, 9])); let mut proof1 = Acc2::gen_proof(&set1, &set2).unwrap(); let proof2 = Acc2::gen_proof(&set1, &set3).unwrap(); proof1.combine_proof(&proof2).unwrap(); let acc1 = Acc2::cal_acc_g1_sk_d(&set1); let acc2 = Acc2::cal_acc_g2_sk_d(&set2); let acc3 = Acc2::cal_acc_g2_sk_d(&set3); let acc4 = { let mut acc = acc2.into_projective(); acc.add_assign_mixed(&acc3); acc.into_affine() }; assert!(proof1.verify(&acc1, &acc4)); } }
ine) -> bool { Curve::product_of_pairings(&[ ((*acc1).into(), self.f1.into()), ((*acc2).into(), self.f2.into()), ]) == *E_G_G } } impl Accumulator for Acc1 { const TYPE: Type = Type::ACC1; type Proof = Acc1Proof; fn cal_acc_g1_sk_d(set: &DigestSet) -> G1Affine { let x = set .par_iter() .map(|(v, exp)| { let s = *PRI_S + v; let exp = [*exp as u64]; s.pow(&exp) }) .reduce(Fr::one, |a, b| a * &b); G1_POWER.apply(&x).into_affine() } fn cal_acc_g1_d(set: &DigestSet) -> G1Affine { let poly = set.expand_to_poly(); Self::poly_to_g1(poly) } fn cal_acc_g2_sk_d(set: &DigestSet) -> G2Affine { let x = set .par_iter() .map(|(v, exp)| { let s = *PRI_S + v; let exp = [*exp as u64]; s.pow(&exp) }) .reduce(Fr::one, |a, b| a * &b); G2_POWER.apply(&x).into_affine() } fn cal_acc_g2_d(set: &DigestSet) -> G2Affine { let poly = set.expand_to_poly(); Self::poly_to_g2(poly) } fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self::Proof> { let poly1 = set1.expand_to_poly(); let poly2 = set2.expand_to_poly(); let (g, x, y) = xgcd(poly1, poly2).context("failed to compute xgcd")?; ensure!(g.degree() == 0, "cannot generate proof"); Ok(Acc1Proof { f1: Self::poly_to_g2(&x / &g), f2: Self::poly_to_g2(&y / &g), }) } } pub struct Acc2; #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] pub struct Acc2Proof { #[serde(with = "serde_impl")] f: G1Affine, } impl AccumulatorProof for Acc2Proof { const TYPE: Type = Type::ACC2; fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self> { Acc2::gen_proof(set1, set2) } fn combine_proof(&mut self, other: &Self) -> anyhow::Result<()> { let mut f = self.f.into_projective(); f.add_assign_mixed(&other.f); self.f = f.into_affine(); Ok(()) } fn as_any(&self) -> &dyn Any { self } } impl Acc2Proof { pub fn verify(&self, acc1: &G1Affine, acc2: &G2Affine) -> bool { let a = Curve::pairing(*acc1, *acc2); let b = Curve::pairing(self.f, G2Affine::prime_subgroup_generator()); a == b } } impl Accumulator for Acc2 { const TYPE: Type = Type::ACC2; type Proof = Acc2Proof; fn cal_acc_g1_sk_d(set: &DigestSet) -> G1Affine { let x = set .par_iter() .map(|(a, b)| { let s = PRI_S_POWER.apply(a); s * &Fr::from(*b) }) .reduce(Fr::zero, |a, b| a + &b); G1_POWER.apply(&x).into_affine() } fn cal_acc_g1_d(set: &DigestSet) -> G1Affine { let mut bases: Vec<G1Affine> = Vec::with_capacity(set.len()); let mut scalars: Vec<<Fr as PrimeField>::BigInt> = Vec::with_capacity(set.len()); (0..set.len()) .into_par_iter() .map(|i| get_g1s(set[i].0)) .collect_into_vec(&mut bases); (0..set.len()) .into_par_iter() .map(|i| <Fr as PrimeField>::BigInt::from(set[i].1 as u64)) .collect_into_vec(&mut scalars); VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine() } fn cal_acc_g2_sk_d(set: &DigestSet) -> G2Affine { let x = set .par_iter() .map(|(a, b)| { let s = PRI_S_POWER.apply(&(*PUB_Q - a)); s * &Fr::from(*b) }) .reduce(Fr::zero, |a, b| a + &b); G2_POWER.apply(&x).into_affine() } fn cal_acc_g2_d(set: &DigestSet) -> G2Affine { let mut bases: Vec<G2Affine> = Vec::with_capacity(set.len()); let mut scalars: Vec<<Fr as PrimeField>::BigInt> = Vec::with_capacity(set.len()); (0..set.len()) .into_par_iter() .map(|i| get_g2s(*PUB_Q - &set[i].0)) .collect_into_vec(&mut bases); (0..set.len()) .into_par_iter() .map(|i| <Fr as PrimeField>::BigInt::from(set[i].1 as u64)) .collect_into_vec(&mut scalars); VariableBaseMSM::multi_scalar_mul(&bases[..], &scalars[..]).into_affine() } fn gen_proof(set1: &DigestSet, set2: &DigestSet) -> anyhow::Result<Self::Proof> { let produce_size = set1.len() * set2.len(); let mut product: Vec<(Fr, u64)> = Vec::with_capacity(produce_size); (0..produce_size) .into_par_iter() .map(|i| { let set1idx = i / set2.len(); let set2idx = i % set2.len(); let (s1, q1) = set1[set1idx]; let (s2, q2) = set2[set2idx]; (*PUB_Q + &s1 - &s2, (q1 * q2) as u64) }) .collect_into_vec(&mut product); if product.par_iter().any(|(x, _)| *x == *PUB_Q) { bail!("cannot generate proof"); } let mut bases: Vec<G1Affine> = Vec::with_capacity(produce_size); let mut scalars: Vec<<Fr as PrimeField>::BigInt> = Vec::with_capacity(produce_size); (0..produce_size) .into_par_iter() .map(|i| get_g1s(product[i].0)) .collect_into_vec(&mut bases); (0..produce_size) .into_par_iter()
random
[ { "content": "pub trait SetElement: Digestible + Clone + Send + Sync + Eq + PartialEq + core::hash::Hash {}\n\n\n\nimpl<T> SetElement for T where\n\n T: Digestible + Clone + Send + Sync + Eq + PartialEq + core::hash::Hash\n\n{\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Default)]\n\npub struct MultiSet<T: SetElement> {\n\n pub(crate) inner: HashMap<T, u32>,\n\n}\n\n\n\nimpl<T: SetElement> MultiSet<T> {\n\n pub fn new() -> Self {\n\n Self {\n\n inner: HashMap::new(),\n\n }\n\n }\n\n\n\n pub fn from_vec(input: Vec<T>) -> Self {\n", "file_path": "vchain/src/set.rs", "rank": 1, "score": 258072.01282438508 }, { "content": "pub fn concat_digest_ref<'a>(input: impl Iterator<Item = &'a Digest>) -> Digest {\n\n let mut state = blake2().to_state();\n\n for d in input {\n\n state.update(&d.0);\n\n }\n\n Digest::from(state.finalize())\n\n}\n\n\n", "file_path": "vchain/src/digest.rs", "rank": 2, "score": 207480.16113956898 }, { "content": "pub fn concat_digest(input: impl Iterator<Item = Digest>) -> Digest {\n\n let mut state = blake2().to_state();\n\n for d in input {\n\n state.update(&d.0);\n\n }\n\n Digest::from(state.finalize())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_to_digest() {\n\n let expect = Digest(*b\"\\x32\\x4d\\xcf\\x02\\x7d\\xd4\\xa3\\x0a\\x93\\x2c\\x44\\x1f\\x36\\x5a\\x25\\xe8\\x6b\\x17\\x3d\\xef\\xa4\\xb8\\xe5\\x89\\x48\\x25\\x34\\x71\\xb8\\x1b\\x72\\xcf\");\n\n assert_eq!(b\"hello\"[..].to_digest(), expect);\n\n assert_eq!(\"hello\".to_digest(), expect);\n\n assert_eq!(\"hello\".to_owned().to_digest(), expect);\n\n }\n\n\n", "file_path": "vchain/src/digest.rs", "rank": 5, "score": 187980.86354729885 }, { "content": "pub fn bench_pow_g2(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"pow_g2\");\n\n let num = Fr::from_str(\"1050806240378915932164293810269605748\").unwrap();\n\n let g2p = FixedBaseCurvePow::build(&G2::prime_subgroup_generator());\n\n group.bench_function(\"nomral\", |b| {\n\n b.iter(|| {\n\n let mut ans = G2::prime_subgroup_generator();\n\n ans.mul_assign(black_box(num));\n\n })\n\n });\n\n group.bench_function(\"optimized\", |b| b.iter(|| g2p.apply(black_box(&num))));\n\n group.finish();\n\n}\n\n\n", "file_path": "vchain/benches/fixed_base_pow.rs", "rank": 6, "score": 186535.02315305592 }, { "content": "pub fn bench_pow_fr(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"pow_fr\");\n\n let base = Fr::from_str(\"186375271183577333671420248211302045980\").unwrap();\n\n let num = Fr::from_str(\"1050806240378915932164293810269605748\").unwrap();\n\n let frp = FixedBaseScalarPow::build(&base);\n\n group.bench_function(\"nomral\", |b| {\n\n b.iter(|| base.pow(black_box(num.into_repr())))\n\n });\n\n group.bench_function(\"optimized\", |b| b.iter(|| frp.apply(black_box(&num))));\n\n group.finish();\n\n}\n\n\n\ncriterion_group!(benches, bench_pow_g1, bench_pow_g2, bench_pow_fr);\n\ncriterion_main!(benches);\n", "file_path": "vchain/benches/fixed_base_pow.rs", "rank": 7, "score": 186535.02315305592 }, { "content": "pub fn bench_pow_g1(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"pow_g1\");\n\n let num = Fr::from_str(\"1050806240378915932164293810269605748\").unwrap();\n\n let g1p = FixedBaseCurvePow::build(&G1::prime_subgroup_generator());\n\n group.bench_function(\"normal\", |b| {\n\n b.iter(|| {\n\n let mut ans = G1::prime_subgroup_generator();\n\n ans.mul_assign(black_box(num));\n\n })\n\n });\n\n group.bench_function(\"optimized\", |b| b.iter(|| g1p.apply(black_box(&num))));\n\n group.finish();\n\n}\n\n\n", "file_path": "vchain/benches/fixed_base_pow.rs", "rank": 8, "score": 186535.02315305592 }, { "content": "pub fn historical_query<AP: AccumulatorProof + Serialize>(\n\n q: &Query,\n\n chain: &impl ReadInterface,\n\n) -> Result<OverallResult<AP>> {\n\n info!(\"process query {:?}\", q);\n\n let param = chain.get_parameter()?;\n\n let cpu_timer = howlong::ProcessCPUTimer::new();\n\n let timer = howlong::HighResolutionTimer::new();\n\n\n\n let query_exp = q.to_bool_exp(&param.v_bit_len);\n\n let mut res = OverallResult {\n\n res_objs: ResultObjs::new(),\n\n res_vo: ResultVO::<AP>::new(),\n\n query: q.clone(),\n\n query_exp_set: query_exp.inner.clone(),\n\n query_time_in_ms: 0,\n\n v_bit_len: param.v_bit_len.clone(),\n\n vo_size: 0,\n\n vo_stats: VOStatistic::default(),\n\n };\n", "file_path": "vchain/src/chain/historical_query.rs", "rank": 9, "score": 183066.95371155962 }, { "content": "pub fn digest_to_prime_field<F: PrimeField>(input: &Digest) -> F {\n\n try_digest_to_prime_field(input).expect(\"failed to convert digest to prime field\")\n\n}\n\n\n", "file_path": "vchain/src/acc/utils.rs", "rank": 10, "score": 171776.41151968803 }, { "content": "pub fn try_digest_to_prime_field<F: PrimeField>(input: &Digest) -> Option<F> {\n\n let mut num = F::from_be_bytes_mod_order(&input.0).into_repr();\n\n // ensure the result is at most in 248 bits. so PUB_Q - Fr and Fr + PUB_Q - Fr never overflow.\n\n for v in num.as_mut().iter_mut().skip(3) {\n\n *v = 0;\n\n }\n\n num.as_mut().get_mut(3).map(|v| *v &= 0x00ff_ffff_ffff_ffff);\n\n F::from_repr(num)\n\n}\n\n\n", "file_path": "vchain/src/acc/utils.rs", "rank": 12, "score": 164373.0948326765 }, { "content": "#[inline]\n\npub fn multiset_to_g1(set: &MultiSet<SetElementType>, param: &Parameter) -> G1Affine {\n\n match (param.acc_type, param.use_sk) {\n\n (acc::Type::ACC1, true) => acc::Acc1::cal_acc_g1_sk(&set),\n\n (acc::Type::ACC1, false) => acc::Acc1::cal_acc_g1(&set),\n\n (acc::Type::ACC2, true) => acc::Acc2::cal_acc_g1_sk(&set),\n\n (acc::Type::ACC2, false) => acc::Acc2::cal_acc_g1(&set),\n\n }\n\n}\n\n\n", "file_path": "vchain/src/chain/utils.rs", "rank": 13, "score": 153780.9046584882 }, { "content": "#[inline]\n\npub fn multiset_to_g2(set: &MultiSet<SetElementType>, param: &Parameter) -> G2Affine {\n\n match (param.acc_type, param.use_sk) {\n\n (acc::Type::ACC1, true) => acc::Acc1::cal_acc_g2_sk(&set),\n\n (acc::Type::ACC1, false) => acc::Acc1::cal_acc_g2(&set),\n\n (acc::Type::ACC2, true) => acc::Acc2::cal_acc_g2_sk(&set),\n\n (acc::Type::ACC2, false) => acc::Acc2::cal_acc_g2(&set),\n\n }\n\n}\n\n\n", "file_path": "vchain/src/chain/utils.rs", "rank": 14, "score": 153780.90465848817 }, { "content": "pub trait Digestible {\n\n fn to_digest(&self) -> Digest;\n\n}\n\n\n\nimpl Digestible for [u8] {\n\n fn to_digest(&self) -> Digest {\n\n Digest::from(blake2().hash(self))\n\n }\n\n}\n\n\n\nimpl Digestible for str {\n\n fn to_digest(&self) -> Digest {\n\n self.as_bytes().to_digest()\n\n }\n\n}\n\n\n\nimpl Digestible for String {\n\n fn to_digest(&self) -> Digest {\n\n self.as_bytes().to_digest()\n\n }\n", "file_path": "vchain/src/digest.rs", "rank": 15, "score": 146499.0734638838 }, { "content": "pub fn deserialize<'de, D: Deserializer<'de>, C: AffineCurve>(d: D) -> Result<C, D::Error> {\n\n use core::fmt;\n\n use serde::de::Error as DeError;\n\n\n\n struct HexVisitor<C>(PhantomData<C>);\n\n\n\n impl<'de, C: AffineCurve> Visitor<'de> for HexVisitor<C> {\n\n type Value = C;\n\n\n\n fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_str(\"AffineCurve\")\n\n }\n\n\n\n fn visit_str<E: DeError>(self, value: &str) -> Result<C, E> {\n\n let data = hex::decode(value).map_err(E::custom)?;\n\n C::deserialize(&data[..]).map_err(E::custom)\n\n }\n\n }\n\n\n\n struct BytesVisitor<C>(PhantomData<C>);\n", "file_path": "vchain/src/acc/serde_impl.rs", "rank": 16, "score": 143757.48258733738 }, { "content": "pub fn serialize<S: Serializer, C: AffineCurve>(c: &C, s: S) -> Result<S::Ok, S::Error> {\n\n let mut buf = Vec::<u8>::new();\n\n c.serialize(&mut buf)\n\n .map_err(<S::Error as serde::ser::Error>::custom)?;\n\n if s.is_human_readable() {\n\n s.serialize_str(&hex::encode(&buf))\n\n } else {\n\n s.serialize_bytes(&buf)\n\n }\n\n}\n\n\n", "file_path": "vchain/src/acc/serde_impl.rs", "rank": 17, "score": 143068.1256904167 }, { "content": "#[inline]\n\npub fn skipped_blocks_num(level: SkipLstLvlType) -> IdType {\n\n 1 << (level + 2)\n\n}\n\n\n", "file_path": "vchain/src/chain/utils.rs", "rank": 18, "score": 137729.6101012134 }, { "content": "/// Return (g, x, y) s.t. a*x + b*y = g = gcd(a, b)\n\npub fn xgcd<'a, F: PrimeField>(\n\n a: impl Into<DenseOrSparsePolynomial<'a, F>>,\n\n b: impl Into<DenseOrSparsePolynomial<'a, F>>,\n\n) -> Option<(DensePolynomial<F>, DensePolynomial<F>, DensePolynomial<F>)> {\n\n let mut a = a.into();\n\n let mut b = b.into();\n\n let mut x0 = DensePolynomial::<F>::zero();\n\n let mut x1 = DensePolynomial::<F>::from_coefficients_vec(vec![F::one()]);\n\n let mut y0 = DensePolynomial::<F>::from_coefficients_vec(vec![F::one()]);\n\n let mut y1 = DensePolynomial::<F>::zero();\n\n while !a.is_zero() {\n\n let (q, r) = b.divide_with_q_and_r(&a)?;\n\n b = a.into();\n\n a = r.into();\n\n let y1old = y1;\n\n y1 = &y0 - &(&q * &y1old);\n\n y0 = y1old;\n\n let x1old = x1;\n\n x1 = &x0 - &(&q * &x1old);\n\n x0 = x1old;\n", "file_path": "vchain/src/acc/utils.rs", "rank": 19, "score": 134536.80191618836 }, { "content": "pub trait ReadInterface {\n\n fn get_parameter(&self) -> Result<Parameter>;\n\n fn read_block_header(&self, id: IdType) -> Result<BlockHeader>;\n\n fn read_block_data(&self, id: IdType) -> Result<BlockData>;\n\n fn read_intra_index_node(&self, id: IdType) -> Result<IntraIndexNode>;\n\n fn read_skip_list_node(&self, id: IdType) -> Result<SkipListNode>;\n\n fn read_object(&self, id: IdType) -> Result<Object>;\n\n}\n\n\n", "file_path": "vchain/src/chain/mod.rs", "rank": 20, "score": 132535.58112277568 }, { "content": "pub trait WriteInterface {\n\n fn set_parameter(&mut self, param: Parameter) -> Result<()>;\n\n fn write_block_header(&mut self, header: BlockHeader) -> Result<()>;\n\n fn write_block_data(&mut self, data: BlockData) -> Result<()>;\n\n fn write_intra_index_node(&mut self, node: IntraIndexNode) -> Result<()>;\n\n fn write_skip_list_node(&mut self, node: SkipListNode) -> Result<()>;\n\n fn write_object(&mut self, obj: Object) -> Result<()>;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "vchain/src/chain/mod.rs", "rank": 21, "score": 132535.58112277568 }, { "content": "#[async_trait::async_trait]\n\npub trait LightNodeInterface {\n\n async fn lightnode_get_parameter(&self) -> Result<Parameter>;\n\n async fn lightnode_read_block_header(&self, id: IdType) -> Result<BlockHeader>;\n\n}\n\n\n", "file_path": "vchain/src/chain/mod.rs", "rank": 22, "score": 129961.67174964052 }, { "content": "pub fn bench_points_mul_sum(c: &mut Criterion) {\n\n const SAMPLES: usize = 1 << 10;\n\n let mut rng = rand::rngs::StdRng::seed_from_u64(123_456_789u64);\n\n\n\n let v = (0..SAMPLES)\n\n .map(|_| Fr::rand(&mut rng).into_repr())\n\n .collect::<Vec<_>>();\n\n let g = (0..SAMPLES)\n\n .map(|_| G1::rand(&mut rng).into_affine())\n\n .collect::<Vec<_>>();\n\n let mut gp: Vec<FixedBaseCurvePow<G1>> = Vec::with_capacity(g.len());\n\n (0..g.len())\n\n .into_par_iter()\n\n .map(|i| FixedBaseCurvePow::build(&g[i].into_projective()))\n\n .collect_into_vec(&mut gp);\n\n\n\n let mut group = c.benchmark_group(\"points_mul_sum\");\n\n group.sample_size(10);\n\n group.bench_function(\"naive\", |b| {\n\n b.iter(|| black_box(naive(g.as_slice(), v.as_slice())))\n", "file_path": "vchain/benches/points_mul_sum.rs", "rank": 23, "score": 128245.01374713329 }, { "content": "pub fn blake2() -> blake2b_simd::Params {\n\n let mut params = blake2b_simd::Params::new();\n\n params.hash_length(DIGEST_LEN);\n\n params\n\n}\n\n\n", "file_path": "vchain/src/digest.rs", "rank": 24, "score": 119285.81359003542 }, { "content": "fn query_block_no_intra_index<AP: AccumulatorProof>(\n\n query_exp: &BoolExp<SetElementType>,\n\n query_exp_digest_set: &[DigestSet],\n\n block_header: &BlockHeader,\n\n block_data: &BlockData,\n\n chain: &impl ReadInterface,\n\n res: &mut OverallResult<AP>,\n\n) -> Result<()> {\n\n let mut vo_blk = vo::FlatBlkNode {\n\n block_id: block_header.block_id,\n\n skip_list_root: block_header.skip_list_root,\n\n sub_nodes: Vec::new(),\n\n };\n\n\n\n let objs = match &block_data.data {\n\n IntraData::Flat(ids) => ids\n\n .iter()\n\n .map(|&id| chain.read_object(id))\n\n .collect::<Result<Vec<_>>>()?,\n\n _ => bail!(\"invalid data\"),\n", "file_path": "vchain/src/chain/historical_query.rs", "rank": 25, "score": 118058.54436064392 }, { "content": "fn query_block_intra_index<AP: AccumulatorProof>(\n\n query_exp: &BoolExp<SetElementType>,\n\n query_exp_digest_set: &[DigestSet],\n\n block_header: &BlockHeader,\n\n block_data: &BlockData,\n\n chain: &impl ReadInterface,\n\n res: &mut OverallResult<AP>,\n\n) -> Result<()> {\n\n let mut vo_blk = vo::BlkNode {\n\n block_id: block_header.block_id,\n\n skip_list_root: block_header.skip_list_root,\n\n sub_node: vo::IntraNode::Empty,\n\n };\n\n\n\n let root = match &block_data.data {\n\n IntraData::Index(id) => match chain.read_intra_index_node(*id)? {\n\n IntraIndexNode::NonLeaf(n) => n,\n\n IntraIndexNode::Leaf(_) => bail!(\"invalid data\"),\n\n },\n\n _ => bail!(\"invalid data\"),\n", "file_path": "vchain/src/chain/historical_query.rs", "rank": 26, "score": 118058.54436064391 }, { "content": "// input format: block_id sep [ v_data ] sep { w_data }\n\n// sep = \\t or space\n\n// v_data = v_1 comma v_2 ...\n\n// w_data = w_1 comma w_2 ...\n\npub fn load_raw_obj_from_file(path: &Path) -> Result<BTreeMap<IdType, Vec<RawObject>>> {\n\n let mut reader = BufReader::new(File::open(path)?);\n\n let mut buf = String::new();\n\n reader.read_to_string(&mut buf)?;\n\n load_raw_obj_from_str(&buf)\n\n}\n", "file_path": "vchain/src/chain/utils.rs", "rank": 27, "score": 112941.98637595648 }, { "content": "pub fn load_raw_obj_from_str(input: &str) -> Result<BTreeMap<IdType, Vec<RawObject>>> {\n\n let mut res = BTreeMap::new();\n\n for line in input.lines() {\n\n let line = line.trim();\n\n if line.is_empty() {\n\n continue;\n\n }\n\n let mut split_str = line.splitn(3, |c| c == '[' || c == ']');\n\n let block_id: IdType = split_str\n\n .next()\n\n .context(format!(\"failed to parse line {}\", line))?\n\n .trim()\n\n .parse()?;\n\n let v_data: Vec<u32> = split_str\n\n .next()\n\n .context(format!(\"failed to parse line {}\", line))?\n\n .trim()\n\n .split(',')\n\n .map(|s| s.trim())\n\n .filter(|s| !s.is_empty())\n", "file_path": "vchain/src/chain/utils.rs", "rank": 28, "score": 112941.98637595648 }, { "content": "#[exonum_interface]\n\npub trait VChainInterface {\n\n fn add_objs(&self, ctx: CallContext<'_>, arg: TxAddObjs) -> Result<(), Error>;\n\n}\n\n\n\n#[derive(Debug, ServiceFactory, ServiceDispatcher)]\n\n#[service_dispatcher(implements(\"VChainInterface\"))]\n\n#[service_factory(proto_sources = \"crate::proto\")]\n\npub struct VChainService;\n\n\n\nimpl VChainInterface for VChainService {\n\n fn add_objs(&self, ctx: CallContext<'_>, arg: TxAddObjs) -> Result<(), Error> {\n\n let core = ctx.data().for_core();\n\n let block_id = core.height().0;\n\n warn!(\n\n \"receive tx at blk #{} with {} objects\",\n\n block_id,\n\n arg.objs.len()\n\n );\n\n let mut schema = VChainSchema::new(ctx.service_data());\n\n schema.objs_in_this_round.extend(arg.objs.iter().cloned());\n", "file_path": "vchain-exonum/src/service.rs", "rank": 29, "score": 100116.74038389415 }, { "content": "fn fixed_base_pow<G: ProjectiveCurve>(\n\n bases: &[FixedBaseCurvePow<G>],\n\n scalars: &[<G::ScalarField as PrimeField>::BigInt],\n\n) -> G {\n\n let mut acc = G::zero();\n\n\n\n for (base, scalar) in bases.iter().zip(scalars.iter()) {\n\n acc += &base.apply(\n\n &<G::ScalarField as PrimeField>::from_repr(*scalar)\n\n .expect(\"failed to convert to prime field\"),\n\n );\n\n }\n\n acc\n\n}\n\n\n", "file_path": "vchain/benches/points_mul_sum.rs", "rank": 30, "score": 98563.39689194108 }, { "content": "pub fn v_data_to_set(input: &[u32], bit_len: &[u8]) -> MultiSet<SetElementType> {\n\n input\n\n .iter()\n\n .enumerate()\n\n .flat_map(|(i, &v)| {\n\n let m: u32 = !(0xffff_ffff << bit_len[i]);\n\n (0..bit_len[i]).map(move |j| {\n\n let mask = (0xffff_ffff << j) & m;\n\n let val = v & mask;\n\n SetElementType::V {\n\n dim: i as u32,\n\n val,\n\n mask,\n\n }\n\n })\n\n })\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "vchain/src/chain/object.rs", "rank": 31, "score": 96021.67728960383 }, { "content": "pub fn build_block<'a>(\n\n block_id: IdType,\n\n prev_hash: Digest,\n\n raw_objs: impl Iterator<Item = &'a RawObject>,\n\n chain: &mut (impl ReadInterface + WriteInterface),\n\n) -> Result<BlockHeader> {\n\n debug!(\"build block #{}\", block_id);\n\n\n\n let param = chain.get_parameter()?;\n\n let objs: Vec<Object> = raw_objs.map(|o| Object::create(o, &param)).collect();\n\n for obj in &objs {\n\n chain.write_object(obj.clone())?;\n\n }\n\n\n\n let mut block_header = BlockHeader {\n\n block_id,\n\n prev_hash,\n\n ..Default::default()\n\n };\n\n\n", "file_path": "vchain/src/chain/build.rs", "rank": 32, "score": 92710.14320621126 }, { "content": "fn naive<G: AffineCurve>(\n\n bases: &[G],\n\n scalars: &[<G::ScalarField as PrimeField>::BigInt],\n\n) -> G::Projective {\n\n let mut acc = G::Projective::zero();\n\n\n\n for (base, scalar) in bases.iter().zip(scalars.iter()) {\n\n acc += &base.mul(*scalar);\n\n }\n\n acc\n\n}\n\n\n", "file_path": "vchain/benches/points_mul_sum.rs", "rank": 33, "score": 73490.9818063022 }, { "content": "struct VChainApi {\n\n pub inner: TestKitApi,\n\n}\n\n\n\nimpl VChainApi {\n\n fn add_objs(&self, input: TxAddObjs) -> (Verified<AnyTx>, SecretKey) {\n\n let (pubkey, key) = crypto::gen_keypair();\n\n let tx = input.sign(INSTANCE_ID, pubkey, &key);\n\n let tx_info: serde_json::Value = self\n\n .inner\n\n .public(ApiKind::Explorer)\n\n .query(&json!({ \"tx_body\": tx }))\n\n .post(\"v1/transactions\")\n\n .unwrap();\n\n assert_eq!(tx_info, json!({ \"tx_hash\": tx.object_hash() }));\n\n (tx, key)\n\n }\n\n\n\n fn get_param(&self) -> vchain::Parameter {\n\n self.inner\n", "file_path": "vchain-exonum/src/tests.rs", "rank": 34, "score": 56111.444797645316 }, { "content": "#[derive(Debug, Default)]\n\nstruct FakeInMemChain {\n\n param: Option<Parameter>,\n\n block_headers: HashMap<IdType, BlockHeader>,\n\n block_data: HashMap<IdType, BlockData>,\n\n intra_index_nodes: HashMap<IdType, IntraIndexNode>,\n\n skip_list_nodes: HashMap<IdType, SkipListNode>,\n\n objects: HashMap<IdType, Object>,\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl LightNodeInterface for FakeInMemChain {\n\n async fn lightnode_get_parameter(&self) -> Result<Parameter> {\n\n self.get_parameter()\n\n }\n\n async fn lightnode_read_block_header(&self, id: IdType) -> Result<BlockHeader> {\n\n self.read_block_header(id)\n\n }\n\n}\n\n\n\nimpl ReadInterface for FakeInMemChain {\n", "file_path": "vchain/src/chain/tests.rs", "rank": 35, "score": 56111.444797645316 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct ElementTuple<T> {\n\n obj: T,\n\n cnt: u32,\n\n}\n\n\n\nimpl<T: SetElement + Serialize> Serialize for MultiSet<T> {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n if serializer.is_human_readable() {\n\n let mut seq = serializer.serialize_seq(Some(self.len()))?;\n\n for (k, v) in self.iter() {\n\n seq.serialize_element(&ElementTuple {\n\n obj: k.clone(),\n\n cnt: *v,\n\n })?;\n\n }\n\n seq.end()\n\n } else {\n", "file_path": "vchain/src/set.rs", "rank": 36, "score": 55428.83003533826 }, { "content": "fn main() {\n\n ProtobufGenerator::with_mod_name(\"protobuf_mod.rs\")\n\n .with_input_dir(\"src/proto\")\n\n .with_crypto()\n\n .generate();\n\n}\n", "file_path": "vchain-exonum/build.rs", "rank": 37, "score": 50478.39457891775 }, { "content": "#[test]\n\nfn test_initialize() {\n\n let (_, api) = create_testkit(InitParam {\n\n v_bit_len: vec![16],\n\n is_acc2: true,\n\n intra_index: true,\n\n skip_list_max_level: 2,\n\n });\n\n let param = api.get_param();\n\n assert_eq!(param.v_bit_len, vec![16]);\n\n assert_eq!(param.acc_type, acc::Type::ACC2);\n\n assert_eq!(param.use_sk, false);\n\n assert_eq!(param.intra_index, true);\n\n assert_eq!(param.skip_list_max_level, 2);\n\n}\n\n\n", "file_path": "vchain-exonum/src/tests.rs", "rank": 38, "score": 48504.19193474212 }, { "content": "fn init_logger() {\n\n let _ = env_logger::builder().is_test(true).try_init();\n\n}\n\n\n\n#[actix_rt::test]\n\nasync fn test_data1_acc1_flat() {\n\n init_logger();\n\n let mut chain = FakeInMemChain::new();\n\n let param = Parameter {\n\n v_bit_len: vec![3],\n\n acc_type: acc::Type::ACC1,\n\n use_sk: true,\n\n intra_index: false,\n\n skip_list_max_level: 0,\n\n };\n\n chain.build_chain(TEST_DATA_1, &param).unwrap();\n\n let query = serde_json::from_value::<Query>(json!({\n\n \"start_block\": 1,\n\n \"end_block\": 2,\n\n \"range\": [\n", "file_path": "vchain/src/chain/tests.rs", "rank": 39, "score": 48504.19193474212 }, { "content": "#[test]\n\nfn test_add_objs() {\n\n let (mut testkit, api) = create_testkit(InitParam {\n\n v_bit_len: vec![16],\n\n is_acc2: true,\n\n intra_index: true,\n\n skip_list_max_level: 2,\n\n });\n\n let tx_input = TxAddObjs {\n\n objs: vec![\n\n RawObject {\n\n v_data: vec![1],\n\n w_data: vec![\"a\".to_owned()],\n\n },\n\n RawObject {\n\n v_data: vec![2],\n\n w_data: vec![\"b\".to_owned()],\n\n },\n\n ],\n\n };\n\n\n\n let (tx1, _) = api.add_objs(tx_input.clone());\n\n let (tx2, _) = api.add_objs(tx_input);\n\n testkit.create_block();\n\n api.assert_tx_status(tx1.object_hash(), &json!({ \"type\": \"success\" }));\n\n api.assert_tx_status(tx2.object_hash(), &json!({ \"type\": \"success\" }));\n\n}\n", "file_path": "vchain-exonum/src/tests.rs", "rank": 40, "score": 47601.87137389119 }, { "content": "fn handle_err(e: anyhow::Error) -> api::Error {\n\n api::Error::InternalError(failure::format_err!(\"{:?}\", e))\n\n}\n\n\n\nimpl VChainApi {\n\n pub fn get_param(self, state: &ServiceApiState<'_>) -> api::Result<vchain::Parameter> {\n\n let schema = VChainSchema::new(state.service_data());\n\n schema.get_parameter().map_err(handle_err)\n\n }\n\n\n\n pub fn get_object(\n\n self,\n\n state: &ServiceApiState<'_>,\n\n query: QueryInput,\n\n ) -> api::Result<vchain::Object> {\n\n let schema = VChainSchema::new(state.service_data());\n\n schema.read_object(query.id).map_err(handle_err)\n\n }\n\n\n\n pub fn get_block_header(\n", "file_path": "vchain-exonum/src/api.rs", "rank": 41, "score": 40789.47264127025 }, { "content": "fn create_testkit(param: InitParam) -> (TestKit, VChainApi) {\n\n let mut testkit = TestKit::for_rust_service(VChainService, INSTANCE_NAME, INSTANCE_ID, param);\n\n let api = VChainApi {\n\n inner: testkit.api(),\n\n };\n\n (testkit, api)\n\n}\n\n\n", "file_path": "vchain-exonum/src/tests.rs", "rank": 42, "score": 38786.530609119895 }, { "content": "use core::fmt;\n\nuse serde::{\n\n de::{Deserializer, SeqAccess, Visitor},\n\n ser::{SerializeTupleStruct, Serializer},\n\n Deserialize, Serialize,\n\n};\n\n\n\npub const DIGEST_LEN: usize = 32;\n\n\n\n#[derive(Clone, Copy, Eq, PartialEq, Hash, Default)]\n\npub struct Digest(pub [u8; DIGEST_LEN]);\n\n\n\nimpl fmt::Display for Digest {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", hex::encode(&self.0))\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Digest {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n", "file_path": "vchain/src/digest.rs", "rank": 43, "score": 34996.98279707408 }, { "content": " V: SeqAccess<'de>,\n\n {\n\n let inner = seq\n\n .next_element()?\n\n .ok_or_else(|| DeError::invalid_length(0, &self))?;\n\n Ok(Digest(inner))\n\n }\n\n }\n\n\n\n if deserializer.is_human_readable() {\n\n deserializer.deserialize_str(HexVisitor)\n\n } else {\n\n deserializer.deserialize_tuple_struct(\"Digest\", 1, BytesVisitor)\n\n }\n\n }\n\n}\n\n\n\nimpl From<blake2b_simd::Hash> for Digest {\n\n fn from(input: blake2b_simd::Hash) -> Self {\n\n let data = input.as_bytes();\n\n debug_assert_eq!(data.len(), DIGEST_LEN);\n\n let mut out = Self::default();\n\n out.0.copy_from_slice(&data[..DIGEST_LEN]);\n\n out\n\n }\n\n}\n\n\n", "file_path": "vchain/src/digest.rs", "rank": 44, "score": 34983.53591948159 }, { "content": " write!(f, \"{}\", hex::encode(&self.0))\n\n }\n\n}\n\n\n\n// Ref: https://github.com/slowli/hex-buffer-serde\n\n\n\nimpl Serialize for Digest {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n if serializer.is_human_readable() {\n\n serializer.serialize_str(&hex::encode(&self.0))\n\n } else {\n\n let mut state = serializer.serialize_tuple_struct(\"Digest\", 1)?;\n\n state.serialize_field(&self.0)?;\n\n state.end()\n\n }\n\n }\n\n}\n", "file_path": "vchain/src/digest.rs", "rank": 45, "score": 34975.451151269066 }, { "content": "\n\nimpl<'de> Deserialize<'de> for Digest {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n use serde::de::Error as DeError;\n\n\n\n struct HexVisitor;\n\n\n\n impl<'de> Visitor<'de> for HexVisitor {\n\n type Value = Digest;\n\n\n\n fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_str(\"struct Digest\")\n\n }\n\n\n\n fn visit_str<E: DeError>(self, value: &str) -> Result<Digest, E> {\n\n let data = hex::decode(value).map_err(E::custom)?;\n\n if data.len() == DIGEST_LEN {\n", "file_path": "vchain/src/digest.rs", "rank": 46, "score": 34974.71109332474 }, { "content": " let mut out = Digest::default();\n\n out.0.copy_from_slice(&data[..DIGEST_LEN]);\n\n Ok(out)\n\n } else {\n\n Err(E::custom(format!(\"invalid length: {}\", data.len())))\n\n }\n\n }\n\n }\n\n\n\n struct BytesVisitor;\n\n\n\n impl<'de> Visitor<'de> for BytesVisitor {\n\n type Value = Digest;\n\n\n\n fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_str(\"struct Digest\")\n\n }\n\n\n\n fn visit_seq<V>(self, mut seq: V) -> Result<Digest, V::Error>\n\n where\n", "file_path": "vchain/src/digest.rs", "rank": 47, "score": 34969.6948922959 }, { "content": " #[test]\n\n fn test_digest_concat() {\n\n let input = vec![\"hello\".to_digest(), \"world!\".to_digest()];\n\n let expect = {\n\n let mut buf: Vec<u8> = Vec::new();\n\n buf.extend_from_slice(&input[0].0[..]);\n\n buf.extend_from_slice(&input[1].0[..]);\n\n buf.as_slice().to_digest()\n\n };\n\n assert_eq!(concat_digest_ref(input.iter()), expect);\n\n assert_eq!(concat_digest(input.into_iter()), expect);\n\n }\n\n\n\n #[test]\n\n fn test_serde() {\n\n let digest = \"hello\".to_digest();\n\n let json = serde_json::to_string_pretty(&digest).unwrap();\n\n assert_eq!(\n\n json,\n\n \"\\\"324dcf027dd4a30a932c441f365a25e86b173defa4b8e58948253471b81b72cf\\\"\"\n", "file_path": "vchain/src/digest.rs", "rank": 48, "score": 34969.35837574169 }, { "content": " );\n\n let bin = bincode::serialize(&digest).unwrap();\n\n assert_eq!(\n\n bin,\n\n b\"\\x32\\x4d\\xcf\\x02\\x7d\\xd4\\xa3\\x0a\\x93\\x2c\\x44\\x1f\\x36\\x5a\\x25\\xe8\\x6b\\x17\\x3d\\xef\\xa4\\xb8\\xe5\\x89\\x48\\x25\\x34\\x71\\xb8\\x1b\\x72\\xcf\",\n\n );\n\n\n\n assert_eq!(serde_json::from_str::<Digest>(&json).unwrap(), digest);\n\n assert_eq!(bincode::deserialize::<Digest>(&bin[..]).unwrap(), digest);\n\n }\n\n}\n", "file_path": "vchain/src/digest.rs", "rank": 49, "score": 34969.22098139687 }, { "content": "}\n\n\n\nmacro_rules! impl_digestable_for_numeric {\n\n ($x: ty) => {\n\n impl Digestible for $x {\n\n fn to_digest(&self) -> Digest {\n\n self.to_le_bytes().to_digest()\n\n }\n\n }\n\n };\n\n ($($x: ty),*) => {$(impl_digestable_for_numeric!($x);)*}\n\n}\n\n\n\nimpl_digestable_for_numeric!(i8, i16, i32, i64, i128);\n\nimpl_digestable_for_numeric!(u8, u16, u32, u64, u128);\n\nimpl_digestable_for_numeric!(f32, f64);\n\n\n", "file_path": "vchain/src/digest.rs", "rank": 50, "score": 34968.260788282074 }, { "content": " }\n\n Some((b.into(), x0, y0))\n\n}\n\n\n\n// Ref: https://github.com/blynn/pbc/blob/fbf4589036ce4f662e2d06905862c9e816cf9d08/arith/field.c#L251-L330\n\npub struct FixedBaseCurvePow<G: ProjectiveCurve> {\n\n table: Vec<Vec<G>>,\n\n}\n\n\n\nimpl<G: ProjectiveCurve> FixedBaseCurvePow<G> {\n\n const K: usize = 5;\n\n\n\n pub fn build(base: &G) -> Self {\n\n let bits =\n\n <<G as ProjectiveCurve>::ScalarField as PrimeField>::Params::MODULUS_BITS as usize;\n\n let num_lookups = bits / Self::K + 1;\n\n let lookup_size = (1 << Self::K) - 1;\n\n let last_lookup_size = (1 << (bits - (num_lookups - 1) * Self::K)) - 1;\n\n\n\n let mut table: Vec<Vec<G>> = Vec::with_capacity(num_lookups);\n", "file_path": "vchain/src/acc/utils.rs", "rank": 51, "score": 33804.69611059416 }, { "content": " fn test_pow_g1() {\n\n let g1p = FixedBaseCurvePow::build(&G1Projective::prime_subgroup_generator());\n\n let mut rng = rand::thread_rng();\n\n let num: Fr = rng.gen();\n\n let mut expect = G1Projective::prime_subgroup_generator();\n\n expect.mul_assign(num);\n\n assert_eq!(g1p.apply(&num), expect);\n\n }\n\n\n\n #[test]\n\n fn test_pow_g2() {\n\n let g2p = FixedBaseCurvePow::build(&G2Projective::prime_subgroup_generator());\n\n let mut rng = rand::thread_rng();\n\n let num: Fr = rng.gen();\n\n let mut expect = G2Projective::prime_subgroup_generator();\n\n expect.mul_assign(num);\n\n assert_eq!(g2p.apply(&num), expect);\n\n }\n\n\n\n #[test]\n", "file_path": "vchain/src/acc/utils.rs", "rank": 52, "score": 33801.372970082804 }, { "content": " }\n\n}\n\n\n\npub struct FixedBaseScalarPow<F: PrimeField> {\n\n table: Vec<Vec<F>>,\n\n}\n\n\n\nimpl<F: PrimeField> FixedBaseScalarPow<F> {\n\n const K: usize = 8;\n\n\n\n pub fn build(base: &F) -> Self {\n\n let bits = <F as PrimeField>::Params::MODULUS_BITS as usize;\n\n let num_lookups = bits / Self::K + 1;\n\n let lookup_size = (1 << Self::K) - 1;\n\n let last_lookup_size = (1 << (bits - (num_lookups - 1) * Self::K)) - 1;\n\n\n\n let mut table: Vec<Vec<F>> = Vec::with_capacity(num_lookups);\n\n\n\n let mut multiplier = *base;\n\n for i in 0..num_lookups {\n", "file_path": "vchain/src/acc/utils.rs", "rank": 53, "score": 33801.280291948715 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use ark_bls12_381::{Fr, G1Projective, G2Projective};\n\n use ark_ff::Field;\n\n use ark_poly::Polynomial;\n\n use core::ops::MulAssign;\n\n use rand::Rng;\n\n\n\n #[test]\n\n fn test_xgcd() {\n\n let poly1 = DensePolynomial::from_coefficients_vec(vec![Fr::from(1u32), Fr::from(1u32)]);\n\n let poly2 = DensePolynomial::from_coefficients_vec(vec![Fr::from(2u32), Fr::from(1u32)]);\n\n let (g, x, y) = xgcd(&poly1, &poly2).unwrap();\n\n assert_eq!(g.degree(), 0);\n\n let gcd = &(&poly1 * &x) + &(&poly2 * &y);\n\n assert_eq!(gcd, g);\n\n }\n\n\n\n #[test]\n", "file_path": "vchain/src/acc/utils.rs", "rank": 54, "score": 33801.03133788928 }, { "content": " }\n\n Self { table }\n\n }\n\n\n\n pub fn apply(&self, input: &<G as ProjectiveCurve>::ScalarField) -> G {\n\n let mut res = G::zero();\n\n let input_repr = input.into_repr();\n\n let num_lookups = input_repr.num_bits() as usize / Self::K + 1;\n\n for i in 0..num_lookups {\n\n let mut word: usize = 0;\n\n for j in 0..Self::K {\n\n if input_repr.get_bit(i * Self::K + j) {\n\n word |= 1 << j;\n\n }\n\n }\n\n if word > 0 {\n\n res.add_assign(&self.table[i][word - 1]);\n\n }\n\n }\n\n res\n", "file_path": "vchain/src/acc/utils.rs", "rank": 55, "score": 33799.93072727597 }, { "content": "use super::{IdType, Parameter, RawObject, SetElementType, SkipLstLvlType};\n\nuse crate::acc::{self, Accumulator, G1Affine, G2Affine};\n\nuse crate::set::MultiSet;\n\nuse anyhow::{Context, Error, Result};\n\nuse std::collections::{BTreeMap, HashSet};\n\nuse std::fs::File;\n\nuse std::io::prelude::*;\n\nuse std::io::BufReader;\n\nuse std::path::Path;\n\n\n\n#[inline]\n", "file_path": "vchain/src/chain/utils.rs", "rank": 56, "score": 33799.08093715514 }, { "content": "use crate::digest::Digest;\n\nuse ark_ec::ProjectiveCurve;\n\nuse ark_ff::{BigInteger, FpParameters, PrimeField, Zero};\n\nuse ark_poly::{\n\n univariate::{DenseOrSparsePolynomial, DensePolynomial},\n\n UVPolynomial,\n\n};\n\nuse itertools::unfold;\n\n\n", "file_path": "vchain/src/acc/utils.rs", "rank": 57, "score": 33796.59066347319 }, { "content": " fn test_pow_fr() {\n\n let mut rng = rand::thread_rng();\n\n let base: Fr = rng.gen();\n\n let num: Fr = rng.gen();\n\n let frp = FixedBaseScalarPow::build(&base);\n\n let expect = base.pow(num.into_repr());\n\n assert_eq!(frp.apply(&num), expect);\n\n }\n\n}\n", "file_path": "vchain/src/acc/utils.rs", "rank": 58, "score": 33795.49094783171 }, { "content": "\n\n pub fn apply(&self, input: &F) -> F {\n\n let mut res = F::one();\n\n let input_repr = input.into_repr();\n\n let num_lookups = input_repr.num_bits() as usize / Self::K + 1;\n\n for i in 0..num_lookups {\n\n let mut word: usize = 0;\n\n for j in 0..Self::K {\n\n if input_repr.get_bit(i * Self::K + j) {\n\n word |= 1 << j;\n\n }\n\n }\n\n if word > 0 {\n\n res.mul_assign(&self.table[i][word - 1]);\n\n }\n\n }\n\n res\n\n }\n\n}\n\n\n", "file_path": "vchain/src/acc/utils.rs", "rank": 59, "score": 33794.64814388811 }, { "content": " Ok(res)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_load_raw_obj() {\n\n let input = \"1\\t[1,2]\\t{a,b}\\n2 [ 3, 4 ] { c, d, }\\n2\\t[ 5, 6 ]\\t { e }\\n\";\n\n let expect = {\n\n let mut out: BTreeMap<IdType, Vec<RawObject>> = BTreeMap::new();\n\n out.insert(\n\n 1,\n\n vec![RawObject {\n\n block_id: 1,\n\n v_data: vec![1, 2],\n\n w_data: [\"a\".to_owned(), \"b\".to_owned()].iter().cloned().collect(),\n\n }],\n\n );\n", "file_path": "vchain/src/chain/utils.rs", "rank": 60, "score": 33791.59174752088 }, { "content": " out.insert(\n\n 2,\n\n vec![\n\n RawObject {\n\n block_id: 2,\n\n v_data: vec![3, 4],\n\n w_data: [\"c\".to_owned(), \"d\".to_owned()].iter().cloned().collect(),\n\n },\n\n RawObject {\n\n block_id: 2,\n\n v_data: vec![5, 6],\n\n w_data: [\"e\".to_owned()].iter().cloned().collect(),\n\n },\n\n ],\n\n );\n\n out\n\n };\n\n assert_eq!(load_raw_obj_from_str(&input).unwrap(), expect);\n\n }\n\n}\n", "file_path": "vchain/src/chain/utils.rs", "rank": 61, "score": 33784.645826491 }, { "content": " .map(|s| s.parse::<u32>().map_err(Error::from))\n\n .collect::<Result<_>>()?;\n\n let w_data: HashSet<String> = split_str\n\n .next()\n\n .context(format!(\"failed to parse line {}\", line))?\n\n .trim()\n\n .replace('{', \"\")\n\n .replace('}', \"\")\n\n .split(',')\n\n .map(|s| s.trim().to_owned())\n\n .filter(|s| !s.is_empty())\n\n .collect();\n\n\n\n let raw_obj = RawObject {\n\n block_id,\n\n v_data,\n\n w_data,\n\n };\n\n res.entry(block_id).or_insert_with(Vec::new).push(raw_obj);\n\n }\n", "file_path": "vchain/src/chain/utils.rs", "rank": 62, "score": 33783.45772016439 }, { "content": "\n\n let mut multiplier = *base;\n\n for i in 0..num_lookups {\n\n let table_size = if i == num_lookups - 1 {\n\n last_lookup_size\n\n } else {\n\n lookup_size\n\n };\n\n let sub_table: Vec<G> = unfold(multiplier, |last| {\n\n let ret = *last;\n\n last.add_assign(&multiplier);\n\n Some(ret)\n\n })\n\n .take(table_size)\n\n .collect();\n\n table.push(sub_table);\n\n if i != num_lookups - 1 {\n\n let last = *table.last().unwrap().last().unwrap();\n\n multiplier.add_assign(&last);\n\n }\n", "file_path": "vchain/src/acc/utils.rs", "rank": 63, "score": 33783.35458658919 }, { "content": " let table_size = if i == num_lookups - 1 {\n\n last_lookup_size\n\n } else {\n\n lookup_size\n\n };\n\n let sub_table: Vec<F> = unfold(multiplier, |last| {\n\n let ret = *last;\n\n last.mul_assign(&multiplier);\n\n Some(ret)\n\n })\n\n .take(table_size)\n\n .collect();\n\n table.push(sub_table);\n\n if i != num_lookups - 1 {\n\n let last = *table.last().unwrap().last().unwrap();\n\n multiplier.mul_assign(&last);\n\n }\n\n }\n\n Self { table }\n\n }\n", "file_path": "vchain/src/acc/utils.rs", "rank": 64, "score": 33780.81581180767 }, { "content": "pub use query_result::*;\n\n\n\npub mod historical_query;\n\npub use historical_query::*;\n\n\n\npub type IdType = u32;\n\npub type SkipLstLvlType = u8;\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]\n\npub struct Parameter {\n\n pub v_bit_len: Vec<u8>,\n\n pub acc_type: acc::Type,\n\n pub use_sk: bool, // only for debug purpose\n\n pub intra_index: bool,\n\n pub skip_list_max_level: SkipLstLvlType,\n\n}\n\n\n\n#[async_trait::async_trait]\n", "file_path": "vchain/src/chain/mod.rs", "rank": 78, "score": 33609.3181929315 }, { "content": "use crate::acc;\n\nuse anyhow::Result;\n\nuse serde::{Deserialize, Serialize};\n\n\n\npub mod utils;\n\npub use utils::*;\n\n\n\npub mod object;\n\npub use object::*;\n\n\n\npub mod index;\n\npub use index::*;\n\n\n\npub mod build;\n\npub use build::*;\n\n\n\npub mod query;\n\npub use query::*;\n\n\n\npub mod query_result;\n", "file_path": "vchain/src/chain/mod.rs", "rank": 84, "score": 33601.75691997395 }, { "content": "use ark_ec::ProjectiveCurve;\n\nuse ark_ff::{Field, PrimeField};\n\nuse core::ops::MulAssign;\n\nuse core::str::FromStr;\n\nuse criterion::{black_box, criterion_group, criterion_main, Criterion};\n\nuse vchain::acc::utils::*;\n\nuse vchain::acc::{Fr, G1Projective as G1, G2Projective as G2};\n\n\n", "file_path": "vchain/benches/fixed_base_pow.rs", "rank": 86, "score": 33219.50976195468 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use ark_bls12_381::{G1Affine, G2Affine};\n\n use serde::{Deserialize, Serialize};\n\n\n\n #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]\n\n struct Foo {\n\n #[serde(with = \"super\")]\n\n f1: G1Affine,\n\n #[serde(with = \"super\")]\n\n f2: G2Affine,\n\n }\n\n\n\n #[test]\n\n fn test_serde() {\n\n #[allow(clippy::blacklisted_name)]\n\n let foo = Foo {\n\n f1: G1Affine::prime_subgroup_generator(),\n\n f2: G2Affine::prime_subgroup_generator(),\n", "file_path": "vchain/src/acc/serde_impl.rs", "rank": 87, "score": 33155.773521258714 }, { "content": "use ark_ec::AffineCurve;\n\nuse core::marker::PhantomData;\n\nuse serde::{\n\n de::{Deserializer, Visitor},\n\n ser::Serializer,\n\n};\n\n\n", "file_path": "vchain/src/acc/serde_impl.rs", "rank": 88, "score": 33136.50204045275 }, { "content": "\n\n impl<'de, C: AffineCurve> Visitor<'de> for BytesVisitor<C> {\n\n type Value = C;\n\n\n\n fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_str(\"AffineCurve\")\n\n }\n\n\n\n fn visit_bytes<E: DeError>(self, v: &[u8]) -> Result<C, E> {\n\n C::deserialize(v).map_err(E::custom)\n\n }\n\n }\n\n\n\n if d.is_human_readable() {\n\n d.deserialize_str(HexVisitor(PhantomData))\n\n } else {\n\n d.deserialize_bytes(BytesVisitor(PhantomData))\n\n }\n\n}\n\n\n", "file_path": "vchain/src/acc/serde_impl.rs", "rank": 89, "score": 33134.42013327687 }, { "content": " };\n\n\n\n let json = serde_json::to_string_pretty(&foo).unwrap();\n\n let bin = bincode::serialize(&foo).unwrap();\n\n\n\n assert_eq!(serde_json::from_str::<Foo>(&json).unwrap(), foo);\n\n assert_eq!(bincode::deserialize::<Foo>(&bin[..]).unwrap(), foo);\n\n }\n\n}\n", "file_path": "vchain/src/acc/serde_impl.rs", "rank": 90, "score": 33131.81183741478 }, { "content": "use crate::acc::utils::digest_to_prime_field;\n\nuse crate::set::{MultiSet, SetElement};\n\nuse ark_ff::PrimeField;\n\nuse ark_poly::{univariate::DensePolynomial, UVPolynomial};\n\nuse core::ops::Deref;\n\nuse rayon::{self, prelude::*};\n\nuse std::borrow::Cow;\n\n\n\n#[derive(Debug, Clone, Default)]\n\npub struct DigestSet<F: PrimeField> {\n\n pub(crate) inner: Vec<(F, u32)>,\n\n}\n\n\n\nimpl<F: PrimeField> DigestSet<F> {\n\n pub fn new<T: SetElement>(input: &MultiSet<T>) -> Self {\n\n let mut inner: Vec<(F, u32)> = Vec::with_capacity(input.len());\n\n (0..input.len())\n\n .into_par_iter()\n\n .map(|i| {\n\n let (k, v) = input.iter().nth(i).unwrap();\n", "file_path": "vchain/src/acc/digest_set.rs", "rank": 91, "score": 32857.62041762791 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use ark_bls12_381::Fr;\n\n\n\n #[test]\n\n fn test_digest_to_poly() {\n\n let set = DigestSet {\n\n inner: vec![\n\n (Fr::from(1u32), 2),\n\n (Fr::from(2u32), 1),\n\n (Fr::from(3u32), 1),\n\n ],\n\n };\n\n let expect = DensePolynomial::from_coefficients_vec(vec![\n\n Fr::from(6u32),\n\n Fr::from(17u32),\n\n Fr::from(17u32),\n\n Fr::from(7u32),\n\n Fr::from(1u32),\n\n ]);\n\n assert_eq!(set.expand_to_poly(), expect);\n\n }\n\n}\n", "file_path": "vchain/src/acc/digest_set.rs", "rank": 92, "score": 32853.25054148811 }, { "content": " let d = k.to_digest();\n\n (digest_to_prime_field(&d), *v)\n\n })\n\n .collect_into_vec(&mut inner);\n\n Self { inner }\n\n }\n\n\n\n pub fn expand_to_poly(&self) -> DensePolynomial<F> {\n\n let mut inputs = Vec::new();\n\n for (k, v) in &self.inner {\n\n for _ in 0..*v {\n\n inputs.push(DensePolynomial::from_coefficients_vec(vec![*k, F::one()]));\n\n }\n\n }\n\n\n\n fn expand<'a, F: PrimeField>(\n\n polys: &'a [DensePolynomial<F>],\n\n ) -> Cow<'a, DensePolynomial<F>> {\n\n if polys.is_empty() {\n\n return Cow::Owned(DensePolynomial::from_coefficients_vec(vec![F::one()]));\n", "file_path": "vchain/src/acc/digest_set.rs", "rank": 93, "score": 32852.88632142659 }, { "content": " } else if polys.len() == 1 {\n\n return Cow::Borrowed(&polys[0]);\n\n }\n\n let mid = polys.len() / 2;\n\n let (left, right) = rayon::join(|| expand(&polys[..mid]), || expand(&polys[mid..]));\n\n Cow::Owned(left.as_ref() * right.as_ref())\n\n }\n\n\n\n expand(&inputs).into_owned()\n\n }\n\n}\n\n\n\nimpl<F: PrimeField> Deref for DigestSet<F> {\n\n type Target = Vec<(F, u32)>;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.inner\n\n }\n\n}\n\n\n", "file_path": "vchain/src/acc/digest_set.rs", "rank": 94, "score": 32845.09845496251 }, { "content": "pub use self::service::{\n\n BlockData, BlockHeader, InitParam, IntraIndexNode, Object, Parameter, RawObject, SkipListNode,\n\n TxAddObjs,\n\n};\n\n\n\ninclude!(concat!(env!(\"OUT_DIR\"), \"/protobuf_mod.rs\"));\n", "file_path": "vchain-exonum/src/proto/mod.rs", "rank": 95, "score": 32568.13166173178 }, { "content": "# vChain Demo\n\n\n\n**WARNING**: This is an academic proof-of-concept prototype, and in particular has not received careful code review. This implementation is NOT ready for production use.\n\n\n\nIf you find the code here useful, please consider to cite the following papers:\n\n\n\n```bibtex\n\n@inproceedings{SIGMOD19:vchain,\n\n author = {Xu, Cheng and Zhang, Ce and Xu, Jianliang},\n\n title = {{vChain}: Enabling Verifiable Boolean Range Queries over Blockchain Databases},\n\n booktitle = {Proceedings of the 2019 ACM SIGMOD International Conference on Management of Data},\n\n year = {2019},\n\n month = jun,\n\n address = {Amsterdam, Netherlands},\n\n pages = {141--158},\n\n isbn = {978-1-4503-5643-5},\n\n doi = {10.1145/3299869.3300083}\n\n}\n\n\n\n@inproceedings{SIGMOD20:vchain-demo,\n\n author = {Wang, Haixin and Xu, Cheng and Zhang, Ce and Xu, Jianliang},\n\n title = {{vChain}: A Blockchain System Ensuring Query Integrity},\n\n booktitle = {Proceedings of the 2020 ACM SIGMOD International Conference on Management of Data},\n\n year = {2020},\n\n month = jun,\n\n address = {Portland, OR, USA},\n\n pages = {2693--2696},\n\n isbn = {978-1-4503-6735-6},\n\n doi = {10.1145/3318464.3384682}\n\n}\n\n```\n\n\n\n## Build\n\n\n\n* Install Rust from <https://rustup.rs>.\n\n* Run `cargo test` for unit test.\n\n* Run `cargo build --release` to build the binaries, which will be located at `target/release/` folder.\n\n\n\n## SimChain\n\n\n\n### Create Blockchain DB\n\n\n\n#### Input Format\n\n\n\nThe input is a text file with each line represent an object.\n\n\n\n```\n\nobj := block_id [ v_data ] { w_data }\n\nv_data := v_1, v_2, ...\n\nw_data := w_1, w_2, ...\n\n```\n\n\n\nFor example\n\n\n\n```\n\n1 [1,2] {a,b,c}\n\n1 [1,5] {a}\n\n2 [3,4] {a,e}\n\n```\n\n\n\n### Build DB\n\n\n\nRun `simchain-build` to build the database. You need to specify the bit length for each dimension of the v data. For example:\n\n\n\n```sh\n\n./target/release/simchain-build --bit-len 16,16 --skip-list-max-level 10 -i /path/to/data.txt -o /path/to/output_database\n\n```\n\n\n\nRun `simchain-build --help` for more info.\n\n\n", "file_path": "README.md", "rank": 96, "score": 26777.803782378414 }, { "content": "### Start the Server\n\n\n\nRun `simchain-server` after the database is built. For example:\n\n\n\n```sh\n\n./target/release/simchain-server -b 127.0.0.1:8000 --db /path/to/database\n\n```\n\n\n\nRun `simchain-server --help` for more info.\n\n\n\n### Server REST API\n\n\n\n#### Inspect\n\n\n\nUse following API endpoints to inspect the blockchain. Returned response is a JSON object. Refer to source code for their definitions.\n\n\n\n```\n\nGET /get/param\n\nGET /get/blk_header/{id}\n\nGET /get/blk_data/{id}\n\nGET /get/intraindex/{id}\n\nGET /get/skiplist/{id}\n\nGET /get/index/{id}\n\nGET /get/obj/{id}\n\n```\n\n\n\n#### Query\n\n\n\nAPI endpoint is:\n\n\n\n```\n\nPOST /query\n\n```\n\n\n\nEncode query parameter as a JSON object. The following example specifies range as [(1, *, 2), (3, *, 4)] for 3 dimension objects, and bool expression as \"A\" AND (\"B\" OR \"C\").\n\n\n\n```json\n\n{\n\n \"start_block\": 1,\n\n \"end_block\": 10,\n\n \"range\": [[1, null, 2], [3, null, 4]],\n\n \"bool\": [[\"a\"], [\"b\", \"c\"]]\n\n}\n\n```\n\n\n\nThe response is a JSON object like:\n\n\n\n```json\n\n{\n\n \"result\": ...,\n\n \"vo\": ...,\n\n \"query_time_in_ms\": ...,\n\n \"vo_size\": ... // in bytes\n\n \"stats\": ...,\n\n ...\n\n}\n\n```\n\n\n\nRefer to the source code for their definitions.\n\n\n\n#### Verify\n\n\n\nPass the query response directly to the following endpoint for verification.\n\n\n\n```\n\nPOST /verify\n\n```\n\n\n\nThe response is a JSON object like:\n\n\n\n```json\n\n{\n\n \"pass\": true,\n\n \"detail\": ... // detail reason for failure\n\n \"verify_time_in_ms\": ...\n\n}\n\n```\n\n\n\n## Real Chain\n\n\n\n### Start the Node\n\n\n\nRun `vchain-node` to start up a single node blockchain network. For example:\n\n\n\n```sh\n\n./vchain-node -- --bit-len 16,16 --skip-list-max-level 5 --db /path/to/database\n\n```\n\n\n\nRun `vchain-node --help` for more info.\n\n\n\n### Send TX\n\n\n\nRun `vchain-send-tx` to send TX to the node. The data input format is the same as that in the SimChain.\n\n\n\n```sh\n\n./vchain-send-tx -- -i /path/to/data.txt\n\n```\n\n\n\nRun `vchain-send-tx --help` for more info.\n\n\n\n### Start the Server\n\n\n\nRun `vchain-server` to start a server. The REST APIs are the same as those in the SimChain.\n\n\n\n```sh\n\n./vchain-server -b 127.0.0.1:8000\n\n```\n\n\n\nRun `vchain-server --help` for more info.\n", "file_path": "README.md", "rank": 97, "score": 26773.873616030014 }, { "content": "use super::*;\n\nuse crate::acc::{self, Accumulator, AccumulatorProof};\n\nuse crate::acc::{G1Affine, G1Projective};\n\nuse crate::digest::{blake2, concat_digest, concat_digest_ref, Digest, Digestible};\n\nuse crate::set::MultiSet;\n\nuse ark_ec::ProjectiveCurve;\n\nuse ark_ff::Zero;\n\nuse core::ops::Deref;\n\nuse futures::join;\n\nuse howlong::Duration;\n\nuse serde::{Deserialize, Serialize};\n\nuse smallvec::SmallVec;\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]\n\npub enum InvalidReason {\n\n InvalidSetIdx(usize),\n\n InvalidAccIdx(AccProofIdxType),\n\n InvalidAccProof(AccProofIdxType),\n\n InvalidMatchObj(IdType),\n", "file_path": "vchain/src/chain/query_result.rs", "rank": 98, "score": 47.70559513013184 }, { "content": "\n\n#[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)]\n\npub struct ObjAcc(#[serde(with = \"crate::acc::serde_impl\")] pub G1Affine);\n\n\n\n// set_idx, [ acc_idx / proof_idx ]\n\n// query_set = query.to_bool_exp(...)[set_idx]\n\npub type AccProofIdxType = (usize, usize);\n\n\n\n#[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)]\n\npub struct ResultVOAcc<AP: AccumulatorProof> {\n\n // <query_exp_set idx, [proof ...]>\n\n pub proofs: HashMap<usize, Vec<AP>>,\n\n // <query_exp_set idx, [obj_acc ...]>\n\n pub object_accs: HashMap<usize, Vec<ObjAcc>>,\n\n}\n\n\n\nimpl<AP: AccumulatorProof> ResultVOAcc<AP> {\n\n pub fn new() -> Self {\n\n Self {\n\n proofs: HashMap::new(),\n", "file_path": "vchain/src/chain/query_result.rs", "rank": 99, "score": 44.259104136442524 } ]
Rust
src/sink.rs
ollien/hline
f08d7277003e7428a577e357fe18cc7012391fc2
use crate::print; use crate::print::{Printer, StdoutPrinter}; use grep::searcher::{Searcher, Sink, SinkContext, SinkError, SinkMatch}; use std::fmt::Display; use std::io; use std::panic; use termion::color::{Fg, LightRed}; use thiserror::Error; const PASSTHRU_PANIC_MSG: &str = "passthru is not enabled on the given searcher"; pub(crate) struct ContextPrintingSink<P: Printer> { printer: P, } #[derive(Error, Debug)] pub enum Error { #[error("Print failure: {0}")] PrintFailed( io::Error, ), #[error("{0}")] SearchError( String, ), } impl From<print::Error> for Error { fn from(err: print::Error) -> Self { let io_err = match err { print::Error::BrokenPipe(wrapped) | print::Error::Other(wrapped) => wrapped, }; Error::PrintFailed(io_err) } } impl SinkError for Error { fn error_message<T: Display>(message: T) -> Self { Error::SearchError(message.to_string()) } } impl<P: Printer> ContextPrintingSink<P> { fn get_sink_result_for_print_result(res: print::Result) -> Result<bool, Error> { match res { Err(print::Error::Other(_)) => Err(Error::from(res.unwrap_err())), Err(print::Error::BrokenPipe(_)) => Ok(false), Ok(_) => Ok(true), } } } impl<P: Printer> ContextPrintingSink<P> { #[must_use] pub fn new(printer: P) -> Self { ContextPrintingSink { printer } } fn validate_searcher(searcher: &Searcher) { if !searcher.passthru() { panic!("{}", PASSTHRU_PANIC_MSG) } } } impl Default for ContextPrintingSink<StdoutPrinter> { fn default() -> Self { ContextPrintingSink { printer: StdoutPrinter {}, } } } impl<P: Printer> Sink for ContextPrintingSink<P> { type Error = Error; fn matched( &mut self, searcher: &Searcher, sink_match: &SinkMatch, ) -> Result<bool, Self::Error> { Self::validate_searcher(searcher); let print_res = self .printer .colored_print(Fg(LightRed), String::from_utf8_lossy(sink_match.bytes())); Self::get_sink_result_for_print_result(print_res) } fn context( &mut self, searcher: &Searcher, context: &SinkContext<'_>, ) -> Result<bool, Self::Error> { Self::validate_searcher(searcher); let data = String::from_utf8_lossy(context.bytes()); let print_res = self.printer.print(data); Self::get_sink_result_for_print_result(print_res) } } #[cfg(test)] mod tests { use super::*; use crate::testutil::mock_print::MockPrinter; use grep::regex::RegexMatcher; use grep::searcher::SearcherBuilder; use test_case::test_case; const SEARCH_TEXT: &str = "The quick \n\ brown fox \n\ jumped over \n\ the lazy \n\ dog."; enum RequiredSearcherSettings { Passthru, } #[test_case(&[RequiredSearcherSettings::Passthru], true; "passthru")] #[test_case(&[], false; "none")] fn test_requires_properly_configured_searcher( settings: &[RequiredSearcherSettings], valid: bool, ) { let perform_search = || { let matcher = RegexMatcher::new("fox").expect("regexp doesn't compile"); let mock_printer = MockPrinter::default(); let sink = ContextPrintingSink { printer: &mock_printer, }; let mut builder = SearcherBuilder::new(); for setting in settings { match setting { RequiredSearcherSettings::Passthru => builder.passthru(true), }; } let mut searcher = builder.build(); searcher.search_slice(matcher, SEARCH_TEXT.as_bytes(), sink) }; if valid { let search_res = perform_search(); assert!(search_res.is_ok()); } else { let search_res = panic::catch_unwind(perform_search); assert!(search_res.is_err()); match search_res.unwrap_err().downcast_ref::<String>() { Some(err) => assert_eq!(err, PASSTHRU_PANIC_MSG), None => panic!("Panicked error was not of expected type"), }; } } }
use crate::print; use crate::print::{Printer, StdoutPrinter}; use grep::searcher::{Searcher, Sink, SinkContext, SinkError, SinkMatch}; use std::fmt::Display; use std::io; use std::panic; use termion::color::{Fg, LightRed}; use thiserror::Error; const PASSTHRU_PANIC_MSG: &str = "passthru is not enabled on the given searcher"; pub(crate) struct ContextPrintingSink<P: Printer> { printer: P, } #[derive(Error, Debug)] pub enum Error { #[error("Print failure: {0}")] PrintFailed( io::Error, ), #[error("{0}")] SearchError( String, ), } impl From<print::Error> for Error { fn from(err: print::Error) -> Self { let io_err = match err { print::Error::BrokenPipe(wrapped) | print::Error::Other(wrapped) => wrapped, }; Error::PrintFailed(io_err) } } impl SinkError for Error { fn error_message<T: Display>(message: T) -> Self { Error::SearchError(message.to_string()) } } impl<P: Printer> ContextPrintingSink<P> { fn get_sink_result_for_print_result(res: print::Result) -> Result<bool, Error> { match res { Err(print::Error::Other(_)) => Err(Error::from(res.unwrap_err())), Err(print::Error::BrokenPipe(_)) => Ok(false), Ok(_) => Ok(true), } } } impl<P: Printer> ContextPrintingSink<P> { #[must_use] pub fn new(printer: P) -> Self { ContextPrintingSink { printer } } fn validate_searcher(searcher: &Searcher) { if !searcher.passthru() { panic!("{}", PASSTHRU_PANIC_MSG) } } } impl Default for ContextPrintingSink<StdoutPrinter> { fn default() -> Self { ContextPrintingSink { printer: StdoutPrinter {}, } } } impl<P: Printer> Sink for ContextPrintingSink<P> { type Error = Error; fn matched( &mut self, searcher: &Searcher, sink_match: &SinkMatch, ) -> Result<bool, Self::Error> { Self::validate_searcher(searcher); let print_res = self .printer .colored_print(Fg(LightRed), String::from_utf8_lossy(sink_match.bytes())); Self::get_sink_result_for_print_result(print_res) } fn context( &mut self, searcher: &Searcher, context: &SinkContext<'_>, ) -> Result<bool, Self::Error> { Self::validate_searcher(searcher); let data = String::from_utf8_lossy(context.bytes()); let print_res = self.printer.print(data); Self::get_sink_result_for_print_result(print_res) } } #[cfg(test)] mod tests { use super::*; use crate::testutil::mock_print::MockPrinter; use grep::regex::RegexMatcher; use grep::searcher::SearcherBuilder; use test_case::test_case; const SEARCH_TEXT: &str = "The quick \n\ brown fox \n\ jumped over \n\ the lazy \n\ dog."; enum RequiredSearcherSettings { Passthru, } #[test_case(&[RequiredSearcherSettings::Passthru], true; "passthru")] #[test_case(&[], false; "none")] fn test_requires_properly_configured_searcher( settings: &[RequiredSearcherSettings], valid: bool, ) { let perform_search = || { let matcher = RegexMatcher::new("fox").expect("regexp doesn't compile"); let mock_printer = MockPrinter::default(); let sink = ContextPrintingSink { printer: &mock_printer, }; let mut builder = SearcherBuilder::new(); for setting in settings { match setting { RequiredSearcherSettings::Passthru => builder.passthru(true), }; } let mut searcher = builder.build(); searcher.search_slice(matcher, SEARCH_TEXT.as_bytes(), sink) }; if valid {
}
let search_res = perform_search(); assert!(search_res.is_ok()); } else { let search_res = panic::catch_unwind(perform_search); assert!(search_res.is_err()); match search_res.unwrap_err().downcast_ref::<String>() { Some(err) => assert_eq!(err, PASSTHRU_PANIC_MSG), None => panic!("Panicked error was not of expected type"), }; } }
function_block-function_prefix_line
[ { "content": "/// `scan_pattern_to_printer` will print a `Read`'s contents to the given `Printer`, while also scanning its contents\n\n/// for a regular expression. Lines that match this pattern will be highlighted in the output.\n\n///\n\n/// Note that this pattern is not anchored at the start of the line by default, and therefore a match anywhere in the\n\n/// line will force the entire line to be considered a match. For instance, the pattern `[a-z]` will match `123abc456`.\n\n///\n\n/// # Errors\n\n///\n\n/// There are four general error cases\n\n/// - An invalid regular expression\n\n/// - An error produced by the underlying grep library during the search\n\n/// - I/O errors in scanning from the [`Read`] (these will be manifested as search errors, as the search process is what\n\n/// performs the reading).\n\n/// - A failure to print to the given printer\n\n///\n\n/// See [enum@Error] for more details.\n\npub fn scan_pattern_to_printer<R: Read, P: Printer>(\n\n reader: R,\n\n pattern: &str,\n\n printer: P,\n\n) -> Result<(), Error> {\n\n let matcher = RegexMatcher::new(pattern)?;\n\n let mut searcher = SearcherBuilder::new().passthru(true).build();\n\n let context_sink = sink::ContextPrintingSink::new(printer);\n\n\n\n searcher.search_reader(matcher, reader, context_sink)?;\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::testutil;\n\n use std::io;\n\n use std::io::Cursor;\n\n use test_case::test_case;\n", "file_path": "src/lib.rs", "rank": 0, "score": 119044.91424828264 }, { "content": "#[allow(clippy::module_name_repetitions)]\n\npub fn is_file_likely_binary<R: Read>(file: &mut R) -> Result<bool, Error> {\n\n let mut buf: [u8; BUFFER_CHECK_AMOUNT] = [0; BUFFER_CHECK_AMOUNT];\n\n let bytes_read = file.read(&mut buf)?;\n\n\n\n let num_binary_chars = String::from_utf8_lossy(&buf[..bytes_read])\n\n .chars()\n\n .filter(|&c| was_utf8_char_replaced(c) || is_binary_char(c))\n\n .count();\n\n\n\n Ok(num_binary_chars > BINARY_CHAR_THRESHOLD as usize)\n\n}\n\n\n", "file_path": "src/file/utf8.rs", "rank": 1, "score": 112895.98565218462 }, { "content": "// Check if a given file is a binary file (or not possible to be easily checked)\n\nfn should_treat_as_binary_file(opened_file: &mut OpenedFile) -> Result<bool, io::Error> {\n\n match opened_file {\n\n OpenedFile::Stdin(stdin) => {\n\n stdin.start_recording();\n\n let is_likely_binary = file::utf8::is_file_likely_binary(stdin)?;\n\n stdin.stop_recording();\n\n stdin.rewind_to_start_of_recording();\n\n Ok(is_likely_binary)\n\n }\n\n OpenedFile::File(file) => {\n\n let is_likely_binary = file::utf8::is_file_likely_binary(file)?;\n\n file.rewind()?;\n\n Ok(is_likely_binary)\n\n }\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 2, "score": 95421.86857788538 }, { "content": "/// `scan_pattern` will print a reader's contents, while also scanning its contents for a regular expression.\n\n/// Lines that match this pattern will be highlighted in the output.\n\n/// A convenience wrapper for [`scan_pattern_to_printer`] that will print to stdout.\n\n///\n\n/// # Errors\n\n///\n\n/// See [`scan_pattern_to_printer`]\n\npub fn scan_pattern<R: Read>(reader: R, pattern: &str) -> Result<(), Error> {\n\n scan_pattern_to_printer(reader, pattern, StdoutPrinter::new())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 3, "score": 94072.68780799025 }, { "content": "fn make_pattern_case_insensitive(pattern: &str) -> String {\n\n format!(\"(?i){}\", pattern)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 4, "score": 89944.44611308334 }, { "content": "/// `Printer` represents an object that can perform some kind of printing, such as by the print! macro\n\npub trait Printer {\n\n /// Print the given message.\n\n ///\n\n /// # Errors\n\n /// In the event of any i/o error, an error is returned. The type [enum@Error] gives implementors the freedom to\n\n /// specify whether or not this error was due to some kind of broken pipe error, which callers may choose to execute\n\n /// specific behavior. The docs of [enum@Error] specify more information about this.\n\n fn print<S: fmt::Display>(&self, msg: S) -> Result;\n\n\n\n /// Print the given message with the given foreground color.\n\n ///\n\n /// # Errors\n\n /// In the event of any i/o error, an error is returned. The type [enum@Error] gives implementors the freedom to\n\n /// specify whether or not this error was due to some kind of broken pipe error, which callers may choose to\n\n /// execute specific behavior. The docs of [enum@Error] specify more information about this.\n\n fn colored_print<S: fmt::Display, C: Color>(&self, color: Fg<C>, msg: S) -> Result {\n\n let msg_string = msg.to_string();\n\n let colored_msg: String = lines::line_split(&msg_string)\n\n .map(|(component, joining_newline)| {\n\n if component.is_empty() {\n", "file_path": "src/print.rs", "rank": 5, "score": 77164.90654180899 }, { "content": "/// `is_binary_char` checks if the given character is a binary char in the UTF-8 charset\n\nfn is_binary_char(c: char) -> bool {\n\n // this table is stolen quite directly from Less\n\n // https://github.com/gwsw/less/blob/294976950f5dc2a6b3436b1d2df97034936552b9/ubin.uni\n\n // Because of the use of surrogate chars in this table, we cannot use rust's char type :(\n\n #[allow(clippy::unreadable_literal)]\n\n #[rustfmt::skip]\n\n let binary_codepoint_ranges = [\n\n Range::<u32>{ start: 0x0000, end: 0x0007 + 1}, /* Cc */\n\n Range::<u32>{ start: 0x000b, end: 0x000b + 1}, /* Cc */\n\n Range::<u32>{ start: 0x000e, end: 0x001f + 1}, /* Cc */\n\n Range::<u32>{ start: 0x007f, end: 0x009f + 1}, /* Cc */\n\n Range::<u32>{ start: 0x2028, end: 0x2028 + 1}, /* Zl */\n\n Range::<u32>{ start: 0x2029, end: 0x2029 + 1}, /* Zp */\n\n Range::<u32>{ start: 0xd800, end: 0xd800 + 1}, /* Cs */\n\n Range::<u32>{ start: 0xdb7f, end: 0xdb80 + 1}, /* Cs */\n\n Range::<u32>{ start: 0xdbff, end: 0xdc00 + 1}, /* Cs */\n\n Range::<u32>{ start: 0xdfff, end: 0xdfff + 1}, /* Cs */\n\n Range::<u32>{ start: 0xe000, end: 0xe000 + 1}, /* Co */\n\n Range::<u32>{ start: 0xf8ff, end: 0xf8ff + 1}, /* Co */\n\n Range::<u32>{ start: 0xf0000, end: 0xf0000 + 1}, /* Co */\n", "file_path": "src/file/utf8.rs", "rank": 6, "score": 60503.535884939294 }, { "content": "/// `was_utf8_char_replaced` checks if the given char was replaced by [`String::from_utf8_lossy`], which indicates that\n\n/// it was not utf-8 originally\n\nfn was_utf8_char_replaced(c: char) -> bool {\n\n c == std::char::REPLACEMENT_CHARACTER\n\n}\n\n\n", "file_path": "src/file/utf8.rs", "rank": 7, "score": 58798.49614676245 }, { "content": "/// Check if the given file is a binary file, and if it is, exit gracefully\n\nfn handle_potentially_binary_file(opened_file: &mut OpenedFile) {\n\n let is_binary_file = match should_treat_as_binary_file(opened_file) {\n\n Err(err) => {\n\n // This could probably be done nicer with a macro but I don't care about a small allocation like this\n\n // when we're immediately about to quit anyway\n\n print_error(&format!(\"failed to peek file: {}\", err));\n\n process::exit(4);\n\n }\n\n Ok(val) => val,\n\n };\n\n\n\n if is_binary_file {\n\n print_error(\"Input file may be a binary file. Pass -b to ignore this and scan anyway.\");\n\n process::exit(5);\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 8, "score": 54804.45714426243 }, { "content": "fn assert_is_not_directory(file: &File) -> Result<(), io::Error> {\n\n let metadata = file.metadata()?;\n\n if metadata.is_dir() {\n\n Err(io::Error::new(\n\n // io::ErrorKind::IsADirectory is unstable at the time of writing :(\n\n io::ErrorKind::Other,\n\n \"is a directory\",\n\n ))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 9, "score": 53964.79496343892 }, { "content": "fn print_error<T: Display + ?Sized>(error_msg: &T) {\n\n eprintln!(\n\n \"{color}error:{reset} {err}\",\n\n color = Fg(LightRed),\n\n reset = Fg(Reset),\n\n err = error_msg\n\n );\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 10, "score": 49575.47772852079 }, { "content": "/// Open the file that was passed to the command line\n\nfn open_file(file: PassedFile) -> Result<OpenedFile, io::Error> {\n\n match file {\n\n PassedFile::Stdin => {\n\n let stdin = io::stdin();\n\n let recorded_stdin = ReadRecorder::new(stdin);\n\n Ok(OpenedFile::Stdin(recorded_stdin))\n\n }\n\n PassedFile::Path(path) => {\n\n let file = File::open(path)?;\n\n assert_is_not_directory(&file)?;\n\n Ok(OpenedFile::File(file))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 11, "score": 48823.44625919794 }, { "content": "/// `Args` represents arguments passed to the program\n\nstruct Args {\n\n pattern: String,\n\n file: PassedFile,\n\n ok_if_binary_file: bool,\n\n}\n\n\n\nimpl Read for OpenedFile {\n\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n\n match self {\n\n // TODO: If more variants are ever added this could probably be a macro\n\n Self::Stdin(read) => read.read(buf),\n\n Self::File(read) => read.read(buf),\n\n }\n\n }\n\n}\n\n\n\nimpl From<ArgMatches<'_>> for Args {\n\n fn from(args: ArgMatches) -> Self {\n\n let case_insensitive = args.is_present(CASE_INSENSITIVE_ARG_NAME);\n\n let ok_if_binary_file = args.is_present(OK_IF_BINARY_ARG_NAME);\n", "file_path": "src/main.rs", "rank": 12, "score": 42403.259854392105 }, { "content": "/// `OpenedFile` represents some kind of file that was opened for further handling by `hl`\n\nenum OpenedFile {\n\n Stdin(ReadRecorder<Stdin>),\n\n File(File),\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 13, "score": 40661.06703870441 }, { "content": "/// `PassedFile` represents some kind of file that will be passed in an argument\n\nenum PassedFile {\n\n Stdin,\n\n Path(String),\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 14, "score": 40661.06703870441 }, { "content": "fn main() {\n\n let parsed_args = setup_arg_parser().get_matches();\n\n let args_parse_result = Args::try_from(parsed_args);\n\n\n\n let args = args_parse_result.unwrap();\n\n let open_file_result = open_file(args.file);\n\n if let Err(err) = open_file_result {\n\n print_error(&format!(\"Failed to open input file: {}\", err));\n\n process::exit(2);\n\n }\n\n\n\n let mut opened_file = open_file_result.unwrap();\n\n if !args.ok_if_binary_file {\n\n handle_potentially_binary_file(&mut opened_file);\n\n }\n\n\n\n let scan_result = hline::scan_pattern(opened_file, &args.pattern);\n\n if let Err(err) = scan_result {\n\n // the lib crate provides the context for the errors in their error messages\n\n print_error(&err);\n\n process::exit(3);\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 15, "score": 34798.720390814626 }, { "content": "/// Setup the argument parser for the program with all possible flags\n\nfn setup_arg_parser() -> App<'static, 'static> {\n\n App::new(crate_name!())\n\n .version(crate_version!())\n\n .about(\"Highlights lines that match the given regular expression\")\n\n .setting(AppSettings::DisableVersion)\n\n .arg(\n\n Arg::with_name(\"pattern\")\n\n .takes_value(true)\n\n .required(true)\n\n .allow_hyphen_values(true)\n\n .help(concat!(\n\n \"The regular expression to search for. Note that this is not anchored, and if \",\n\n \"anchoring is desired, should be done manually with ^ or $.\"\n\n )),\n\n )\n\n .arg(\n\n Arg::with_name(FILENAME_ARG_NAME)\n\n .takes_value(true)\n\n .help(\"The file to scan. If not specified, reads from stdin\"),\n\n )\n", "file_path": "src/main.rs", "rank": 16, "score": 27032.054211408915 }, { "content": "# hline\n\n\n\n[![crates.io](https://img.shields.io/crates/v/hline.svg)](https://crates.io/crates/hline)\n\n\n\n`hline` is a very small command line utility designed to highlight lines in log files. In practice, I've found that\n\ntuning the context that `grep` gives me when `tail -f`ing a log can be quite cumbersome. Oftentimes, all I really\n\ncare about is seeing that a certain message happened and some surrounding context. `hline` fills that niche!\n\n\n\n## Usage\n\n\n\n```\n\nhline 0.2.1\n\nHighlights lines that match the given regular expression\n\n\n\nUSAGE:\n\n hline [FLAGS] <pattern> [filename]\n\n\n\nFLAGS:\n\n -i, --ignore-case Ignore case when performing matching. If not specified, the matching is case-sensitive.\n\n -h, --help Prints help information\n\n -b Treat the given input file as text, even if it may be a binary file\n\n\n\nARGS:\n\n <pattern> The regular expression to search for. Note that this is not anchored, and if anchoring is desired,\n\n should be done manually with ^ or $.\n\n <filename> The file to scan. If not specified, reads from stdin\n\n```\n\n\n\n## Installation\n\n\n\n```\n\ncargo install hline\n\n```\n\n\n\n\n\n### [Changelog](CHANGELOG.md)\n", "file_path": "README.md", "rank": 26, "score": 16244.233348244099 }, { "content": "# Changelog\n\nAll notable changes to this project will be documented in this file.\n\n\n\nThe format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),\n\nand this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).\n\n\n\n## [0.2.1] - 2021-12-12\n\n### Changed\n\n - Made `Error` enum non-exhaustive to promote future expansion.\n\n\n\n## [0.2.0] - 2021-11-17\n\n### Added\n\n - Added binary file detection. When a binary file is detected, `hline` will refuse to highlight it, unless passed the `-b` flag.\n\n\n\n### Fixed\n\n - Fixed inconsistent error output\n\n - Fixed a panic when non-utf-8 data was encountered\n\n\n\n## [0.1.1] - 2021-11-13\n\n### Fixed\n\n - Fix bug where broken pipes would color the shell. For instance, if the last line in some output patched, running\n\n `hline <pat> myfile.txt |head` would color your terminal red. Oops!\n\n - Made error message output a bit more human-friendly.\n\n\n\n### Changed\n\n - Change highlight color to light red\n\n\n\n## [0.1.0] - 2021-11-07\n\n - Initial public release 🎉\n\n\n\n[0.1.0]: https://github.com/ollien/hline/releases/tag/v0.1.0\n\n[0.1.1]: https://github.com/ollien/hline/releases/tag/v0.1.1\n\n[0.2.0]: https://github.com/ollien/hline/releases/tag/v0.2.0\n\n[0.2.1]: https://github.com/ollien/hline/releases/tag/v0.2.1\n", "file_path": "CHANGELOG.md", "rank": 27, "score": 16243.58165537201 }, { "content": " Apache License\n\n Version 2.0, January 2004\n\n http://www.apache.org/licenses/\n\n\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n\n\n 1. Definitions.\n\n\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n\n and distribution as defined by Sections 1 through 9 of this document.\n\n\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n\n the copyright owner that is granting the License.\n\n\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n\n other entities that control, are controlled by, or are under common\n\n control with that entity. For the purposes of this definition,\n\n \"control\" means (i) the power, direct or indirect, to cause the\n\n direction or management of such entity, whether by contract or\n\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n\n exercising permissions granted by this License.\n\n\n\n \"Source\" form shall mean the preferred form for making modifications,\n\n including but not limited to software source code, documentation\n\n source, and configuration files.\n\n\n\n \"Object\" form shall mean any form resulting from mechanical\n\n transformation or translation of a Source form, including but\n\n not limited to compiled object code, generated documentation,\n\n and conversions to other media types.\n\n\n\n \"Work\" shall mean the work of authorship, whether in Source or\n\n Object form, made available under the License, as indicated by a\n\n copyright notice that is included in or attached to the work\n", "file_path": "LICENSE.md", "rank": 28, "score": 16241.871452750118 }, { "content": " risks associated with Your exercise of permissions under this License.\n\n\n\n 8. Limitation of Liability. In no event and under no legal theory,\n\n whether in tort (including negligence), contract, or otherwise,\n\n unless required by applicable law (such as deliberate and grossly\n\n negligent acts) or agreed to in writing, shall any Contributor be\n\n liable to You for damages, including any direct, indirect, special,\n\n incidental, or consequential damages of any character arising as a\n\n result of this License or out of the use or inability to use the\n\n Work (including but not limited to damages for loss of goodwill,\n\n work stoppage, computer failure or malfunction, or any and all\n\n other commercial damages or losses), even if such Contributor\n\n has been advised of the possibility of such damages.\n\n\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n\n the Work or Derivative Works thereof, You may choose to offer,\n\n and charge a fee for, acceptance of support, warranty, indemnity,\n\n or other liability obligations and/or rights consistent with this\n\n License. However, in accepting such obligations, You may act only\n\n on Your own behalf and on Your sole responsibility, not on behalf\n\n of any other Contributor, and only if You agree to indemnify,\n\n defend, and hold each Contributor harmless for any liability\n\n incurred by, or claims asserted against, such Contributor by reason\n\n of your accepting any such warranty or additional liability.\n\n\n\n END OF TERMS AND CONDITIONS\n\n\n", "file_path": "LICENSE.md", "rank": 29, "score": 16241.11176915098 }, { "content": " the conditions stated in this License.\n\n\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n\n any Contribution intentionally submitted for inclusion in the Work\n\n by You to the Licensor shall be under the terms and conditions of\n\n this License, without any additional terms or conditions.\n\n Notwithstanding the above, nothing herein shall supersede or modify\n\n the terms of any separate license agreement you may have executed\n\n with Licensor regarding such Contributions.\n\n\n\n 6. Trademarks. This License does not grant permission to use the trade\n\n names, trademarks, service marks, or product names of the Licensor,\n\n except as required for reasonable and customary use in describing the\n\n origin of the Work and reproducing the content of the NOTICE file.\n\n\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n\n agreed to in writing, Licensor provides the Work (and each\n\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n\n implied, including, without limitation, any warranties or conditions\n\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n\n PARTICULAR PURPOSE. You are solely responsible for determining the\n\n appropriateness of using or redistributing the Work and assume any\n", "file_path": "LICENSE.md", "rank": 30, "score": 16240.346959898648 }, { "content": " stating that You changed the files; and\n\n\n\n (c) You must retain, in the Source form of any Derivative Works\n\n that You distribute, all copyright, patent, trademark, and\n\n attribution notices from the Source form of the Work,\n\n excluding those notices that do not pertain to any part of\n\n the Derivative Works; and\n\n\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n\n distribution, then any Derivative Works that You distribute must\n\n include a readable copy of the attribution notices contained\n\n within such NOTICE file, excluding those notices that do not\n\n pertain to any part of the Derivative Works, in at least one\n\n of the following places: within a NOTICE text file distributed\n\n as part of the Derivative Works; within the Source form or\n\n documentation, if provided along with the Derivative Works; or,\n\n within a display generated by the Derivative Works, if and\n\n wherever such third-party notices normally appear. The contents\n\n of the NOTICE file are for informational purposes only and\n\n do not modify the License. You may add Your own attribution\n\n notices within Derivative Works that You distribute, alongside\n\n or as an addendum to the NOTICE text from the Work, provided\n\n that such additional attribution notices cannot be construed\n\n as modifying the License.\n\n\n\n You may add Your own copyright statement to Your modifications and\n\n may provide additional or different license terms and conditions\n\n for use, reproduction, or distribution of Your modifications, or\n\n for any such Derivative Works as a whole, provided Your use,\n\n reproduction, and distribution of the Work otherwise complies with\n", "file_path": "LICENSE.md", "rank": 31, "score": 16239.75195533991 }, { "content": " APPENDIX: How to apply the Apache License to your work.\n\n\n\n To apply the Apache License to your work, attach the following\n\n boilerplate notice, with the fields enclosed by brackets \"[]\"\n\n replaced with your own identifying information. (Don't include\n\n the brackets!) The text should be enclosed in the appropriate\n\n comment syntax for the file format. We also recommend that a\n\n file or class name and description of purpose be included on the\n\n same \"printed page\" as the copyright notice for easier\n\n identification within third-party archives.\n\n\n\n Copyright [yyyy] [name of copyright owner]\n\n\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n\n you may not use this file except in compliance with the License.\n\n You may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n Unless required by applicable law or agreed to in writing, software\n\n distributed under the License is distributed on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n See the License for the specific language governing permissions and\n\n limitations under the License.\n", "file_path": "LICENSE.md", "rank": 32, "score": 16239.40158213393 }, { "content": " subsequently incorporated within the Work.\n\n\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n\n this License, each Contributor hereby grants to You a perpetual,\n\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n\n copyright license to reproduce, prepare Derivative Works of,\n\n publicly display, publicly perform, sublicense, and distribute the\n\n Work and such Derivative Works in Source or Object form.\n\n\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n\n this License, each Contributor hereby grants to You a perpetual,\n\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n\n (except as stated in this section) patent license to make, have made,\n\n use, offer to sell, sell, import, and otherwise transfer the Work,\n\n where such license applies only to those patent claims licensable\n\n by such Contributor that are necessarily infringed by their\n\n Contribution(s) alone or by combination of their Contribution(s)\n\n with the Work to which such Contribution(s) was submitted. If You\n\n institute patent litigation against any entity (including a\n\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n\n or a Contribution incorporated within the Work constitutes direct\n\n or contributory patent infringement, then any patent licenses\n\n granted to You under this License for that Work shall terminate\n\n as of the date such litigation is filed.\n\n\n\n 4. Redistribution. You may reproduce and distribute copies of the\n\n Work or Derivative Works thereof in any medium, with or without\n\n modifications, and in Source or Object form, provided that You\n\n meet the following conditions:\n\n\n\n (a) You must give any other recipients of the Work or\n\n Derivative Works a copy of this License; and\n\n\n\n (b) You must cause any modified files to carry prominent notices\n", "file_path": "LICENSE.md", "rank": 33, "score": 16239.06864672635 }, { "content": " (an example is provided in the Appendix below).\n\n\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n\n form, that is based on (or derived from) the Work and for which the\n\n editorial revisions, annotations, elaborations, or other modifications\n\n represent, as a whole, an original work of authorship. For the purposes\n\n of this License, Derivative Works shall not include works that remain\n\n separable from, or merely link (or bind by name) to the interfaces of,\n\n the Work and Derivative Works thereof.\n\n\n\n \"Contribution\" shall mean any work of authorship, including\n\n the original version of the Work and any modifications or additions\n\n to that Work or Derivative Works thereof, that is intentionally\n\n submitted to Licensor for inclusion in the Work by the copyright owner\n\n or by an individual or Legal Entity authorized to submit on behalf of\n\n the copyright owner. For the purposes of this definition, \"submitted\"\n\n means any form of electronic, verbal, or written communication sent\n\n to the Licensor or its representatives, including but not limited to\n\n communication on electronic mailing lists, source code control systems,\n\n and issue tracking systems that are managed by, or on behalf of, the\n\n Licensor for the purpose of discussing and improving the Work, but\n\n excluding communication that is conspicuously marked or otherwise\n\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n\n on behalf of whom a Contribution has been received by Licensor and\n", "file_path": "LICENSE.md", "rank": 34, "score": 16238.125694798953 }, { "content": "#![warn(clippy::all, clippy::pedantic)]\n\nuse grep::regex;\n\nuse grep::regex::RegexMatcher;\n\nuse grep::searcher::SearcherBuilder;\n\nuse print::{Printer, StdoutPrinter};\n\nuse std::io;\n\nuse std::io::Read;\n\nuse thiserror::Error;\n\n\n\npub mod file;\n\nmod lines;\n\npub mod print;\n\nmod sink;\n\n\n\n#[cfg(test)]\n\nmod testutil;\n\n\n\n/// `Error` represents the possible errors that can occur during the search process.\n\n#[derive(Error, Debug)]\n\npub enum Error {\n", "file_path": "src/lib.rs", "rank": 35, "score": 26.27252614541381 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::testutil;\n\n use crate::testutil::mock_print::BarebonesMockPrinter;\n\n use termion::color::Magenta;\n\n use test_case::test_case;\n\n\n\n #[test_case(\n\n io::Error::new(io::ErrorKind::BrokenPipe, \"broken pipe\"),\n\n &|err| matches!(err, Error::BrokenPipe(_));\n\n \"BrokenPipe results in BrokenPipe variant\"\n\n )]\n\n #[test_case(\n\n io::Error::new(io::ErrorKind::Interrupted, \"can't print, we're busy\"),\n\n &|err| matches!(err, Error::Other(_));\n\n \"non-BrokenPipe produces Other variant\"\n\n )]\n\n fn test_error_from_io_err(from: io::Error, matches: &dyn Fn(&Error) -> bool) {\n\n let produced_err = Error::from(from);\n", "file_path": "src/print.rs", "rank": 36, "score": 23.67403222237626 }, { "content": "impl From<sink::Error> for Error {\n\n fn from(err: sink::Error) -> Self {\n\n match err {\n\n sink::Error::SearchError(msg) => Error::SearchError(msg),\n\n sink::Error::PrintFailed(io_err) => Error::PrintFailure(io_err),\n\n }\n\n }\n\n}\n\n\n\nimpl From<regex::Error> for Error {\n\n fn from(err: regex::Error) -> Self {\n\n Self::RegexError(err)\n\n }\n\n}\n\n\n\n/// `scan_pattern` will print a reader's contents, while also scanning its contents for a regular expression.\n\n/// Lines that match this pattern will be highlighted in the output.\n\n/// A convenience wrapper for [`scan_pattern_to_printer`] that will print to stdout.\n\n///\n\n/// # Errors\n\n///\n\n/// See [`scan_pattern_to_printer`]\n", "file_path": "src/lib.rs", "rank": 37, "score": 20.820837055519497 }, { "content": "#![cfg(test)]\n\nuse crate::print;\n\nuse crate::print::Printer;\n\nuse std::cell::RefCell;\n\nuse std::fmt;\n\nuse termion::color;\n\n\n\n#[derive(Default)]\n\npub(crate) struct MockPrinter {\n\n pub(crate) uncolored_messages: RefCell<Vec<String>>,\n\n pub(crate) colored_messages: RefCell<Vec<String>>,\n\n next_error: RefCell<Option<print::Error>>,\n\n}\n\n\n\nimpl MockPrinter {\n\n pub(crate) fn fail_next(&mut self, error: print::Error) {\n\n self.next_error.replace(Some(error));\n\n }\n\n}\n\n\n", "file_path": "src/testutil/mock_print.rs", "rank": 38, "score": 20.69177534190637 }, { "content": " if self.next_error.borrow().is_some() {\n\n Err(self.next_error.replace(None).unwrap())\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n\n/// Similar to [`MockPrinter`], except that it only implements required methods\n\n#[derive(Default)]\n\npub(crate) struct BarebonesMockPrinter {\n\n pub(crate) messages: RefCell<Vec<String>>,\n\n}\n\n\n\nimpl Printer for BarebonesMockPrinter {\n\n fn print<S: fmt::Display>(&self, msg: S) -> print::Result {\n\n self.messages.borrow_mut().push(msg.to_string());\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/testutil/mock_print.rs", "rank": 39, "score": 19.61779449362418 }, { "content": "impl StdoutPrinter {\n\n #[must_use]\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\n\n\nimpl Default for StdoutPrinter {\n\n fn default() -> Self {\n\n Self {}\n\n }\n\n}\n\n\n\nimpl Printer for StdoutPrinter {\n\n fn print<S: fmt::Display>(&self, msg: S) -> Result {\n\n let mut stdout = io::stdout();\n\n Ok(write!(stdout, \"{}\", msg)?)\n\n }\n\n}\n\n\n", "file_path": "src/print.rs", "rank": 40, "score": 17.94893053231791 }, { "content": "}\n\n\n\nimpl<R: Read> ReadRecorder<R> {\n\n /// Make a new `ReadRecorder` wrapping the given `Reader`.\n\n pub fn new(reader: R) -> Self {\n\n Self {\n\n read: reader,\n\n recorded_data: Vec::new(),\n\n cursor_pos: None,\n\n recording: false,\n\n }\n\n }\n\n\n\n /// `start_recording` begins the recording process. Once this is started, any reads that call to the underlying\n\n /// [`Read`] (i.e. not through the recorded portion) will be copied to an internal buffer.\n\n pub fn start_recording(&mut self) {\n\n self.recording = true;\n\n }\n\n\n\n /// `stop_recording` stops additional reads from being recorded.\n", "file_path": "src/file/recorder.rs", "rank": 41, "score": 17.103463885295035 }, { "content": "\n\n Ok(bytes_copied_from_recording + bytes_read_from_file)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::io::{Cursor, Error};\n\n\n\n // A small wrapper for Cursor to provide a read \"mock\"\n\n struct ReadCountingCursor<R> {\n\n wrapped_cursor: Cursor<R>,\n\n // We will define a read as the number of non-trivial reads: i.e, more than 0 bytes are read\n\n num_reads: i32,\n\n }\n\n\n\n impl<R> ReadCountingCursor<R> {\n\n fn new(cursor: Cursor<R>) -> Self {\n\n Self {\n", "file_path": "src/file/recorder.rs", "rank": 42, "score": 16.10515639234544 }, { "content": " #[error(\"{0}\")]\n\n BrokenPipe(io::Error),\n\n #[error(\"{0}\")]\n\n Other(io::Error),\n\n}\n\n\n\nimpl From<io::Error> for Error {\n\n fn from(err: io::Error) -> Self {\n\n match err.kind() {\n\n io::ErrorKind::BrokenPipe => Self::BrokenPipe(err),\n\n _ => Self::Other(err),\n\n }\n\n }\n\n}\n\n\n\n/// `Printer` represents an object that can perform some kind of printing, such as by the print! macro\n", "file_path": "src/print.rs", "rank": 43, "score": 16.075752261629322 }, { "content": "impl Printer for &MockPrinter {\n\n fn print<S: fmt::Display>(&self, msg: S) -> print::Result {\n\n self.uncolored_messages.borrow_mut().push(msg.to_string());\n\n\n\n if self.next_error.borrow().is_some() {\n\n Err(self.next_error.replace(None).unwrap())\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n\n\n fn colored_print<S: fmt::Display, C: color::Color>(\n\n &self,\n\n _color: color::Fg<C>,\n\n msg: S,\n\n ) -> print::Result {\n\n // Unfortunately, termion colors don't implement PartialEq, so checking for the exact color is not\n\n // feasible unless we wanted to write a wrapper, which I don't care enough to just for unit testing\n\n self.colored_messages.borrow_mut().push(msg.to_string());\n\n\n", "file_path": "src/testutil/mock_print.rs", "rank": 44, "score": 14.95370513782812 }, { "content": " \"predicates, and so forth. The fundamental problems involved are, \\n\".to_string(),\n\n \"however, the same in each case, and I have chosen the computable numbers \\n\".to_string(),\n\n \"for explicit treatment as involving the least cumbrous technique. I hope \\n\".to_string(),\n\n \"shortly to give an account of the relations of the computable numbers, \\n\".to_string(),\n\n \"functions, and so forth to one another. This will include a development \\n\".to_string(),\n\n \"of the theory of functions of a real variable expressed in terms of \\n\".to_string(),\n\n \"computable numbers. According to my definition, a number is computable \\n\".to_string(),\n\n \"if its decimal can be written down by a machine.\\n\".to_string()\n\n ];\n\n testutil::assert_slices_eq!(&uncolored_messages, &expected_uncolored_messages);\n\n }\n\n\n\n #[test_case(\".\", 0, 1; \"failure on first match will only attempt to print that match\")]\n\n #[test_case(\"hello I am alan turing\", 1, 0; \"never matching will only attempt to print the first line\")]\n\n fn test_does_not_attempt_to_print_after_broken_pipe_error(\n\n pattern: &str,\n\n num_uncolored_messages: usize,\n\n num_colored_messages: usize,\n\n ) {\n\n let mut mock_printer = MockPrinter::default();\n", "file_path": "src/lib.rs", "rank": 45, "score": 14.436239332333477 }, { "content": " \"empty strings don't need colorization\"\n\n )]\n\n fn test_resets_colors_properly(message: String, expected: String) {\n\n // We're using a mock here specifically so we can test the default implementation of colored_print\n\n let printer = BarebonesMockPrinter::default();\n\n let res = printer.colored_print(Fg(Magenta), message);\n\n assert!(res.is_ok(), \"{}\", res.unwrap_err());\n\n\n\n testutil::assert_slices_eq!(&[expected], &printer.messages.borrow());\n\n }\n\n}\n", "file_path": "src/print.rs", "rank": 46, "score": 14.056741016055001 }, { "content": " /// Parsing the given regular expression failed.\n\n #[error(\"Regular expression engine failed: {0}\")]\n\n RegexError(\n\n /// The original i/o error that caused the print failure.\n\n regex::Error,\n\n ),\n\n /// The search process encountered a fatal error. This is likely an i/o error, but it is not necessarily.\n\n #[error(\"Search process failed: {0}\")]\n\n SearchError(\n\n /// An error message provided by the underlying grep library.\n\n String,\n\n ),\n\n /// Printing to the given printer failed due to an i/o error. The original error is wrapped in the variant\n\n #[error(\"Printing results failed: {0}\")]\n\n PrintFailure(\n\n /// The original i/o error that caused the print failure.\n\n io::Error,\n\n ),\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 47, "score": 13.777425763697869 }, { "content": " wrapped_cursor: cursor,\n\n num_reads: 0,\n\n }\n\n }\n\n }\n\n\n\n impl<R: AsRef<[u8]>> Read for ReadCountingCursor<R> {\n\n fn read(&mut self, buf: &mut [u8]) -> Result<usize, Error> {\n\n // If we are called with a non-trivial buffer, we can count the read\n\n if !buf.is_empty() {\n\n self.num_reads += 1;\n\n }\n\n\n\n self.wrapped_cursor.read(buf)\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_reads_transparently_by_default() {\n\n let s_reader = Cursor::new(\"hello world\");\n", "file_path": "src/file/recorder.rs", "rank": 48, "score": 13.300656911637148 }, { "content": " Range::<u32>{ start: 0xffffd, end: 0xffffd + 1}, /* Co */\n\n Range::<u32>{ start: 0x100000, end: 0x100000 + 1}, /* Co */\n\n Range::<u32>{ start: 0x10fffd, end: 0x10fffd + 1}, /* Co */\n\n ];\n\n\n\n binary_codepoint_ranges\n\n .into_iter()\n\n .any(|range| range.contains(&(c as u32)))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::io::Cursor;\n\n use test_case::test_case;\n\n\n\n #[test_case(b\"hello\", false; \"simple string is utf-8\")]\n\n #[test_case(b\"hello\\xff\\xffworld\", false; \"single non-utf-8 is ok\")]\n\n #[test_case(b\"hello\\x00world\", false; \"single binary char is ok\")]\n\n #[test_case(b\"hello\\x00\\xffworld\", false; \"single non-utf-8 and binary is ok\")]\n\n #[test_case(b\"hello\\xff\\xffworld\\xfa\\xfb\\xfc\\xfd\\xfe\", true; \"too many non-utf-8 is not ok\")]\n\n #[test_case(b\"hello\\0\\0\\0\\0\\0\\0world\", true; \"null terms are binary chars\")]\n\n #[test_case(b\"\\x7f\\x45\\x4c\\x46\\x02\\x01\\x01\\x00\\x00 \", true; \"elf header is binary\")]\n\n fn test_is_file_likely_utf8(s: &[u8], is_utf8: bool) {\n\n let mut byte_reader = Cursor::new(s);\n\n assert_eq!(is_utf8, is_file_likely_binary(&mut byte_reader).unwrap());\n\n }\n\n}\n", "file_path": "src/file/utf8.rs", "rank": 49, "score": 13.250419840144835 }, { "content": " pub fn stop_recording(&mut self) {\n\n self.recording = false;\n\n }\n\n\n\n /// `rewind_to_start_of_recording` is conceptually similar to `[Seek::rewind]`, except it will rewind only to the\n\n /// start of the recorded data.\n\n pub fn rewind_to_start_of_recording(&mut self) {\n\n self.cursor_pos = Some(0);\n\n }\n\n\n\n /// `copy_from_recording` will copy as much data as possible from the current recorded data to the given buffer\n\n fn copy_from_recording(&mut self, buf: &mut [u8]) -> usize {\n\n if self.cursor_pos.is_none() {\n\n return 0;\n\n }\n\n\n\n let cursor_pos = self.cursor_pos.unwrap();\n\n if cursor_pos >= self.recorded_data.len() {\n\n return 0;\n\n }\n", "file_path": "src/file/recorder.rs", "rank": 50, "score": 12.00071771574044 }, { "content": "use std::cmp;\n\nuse std::io::{Error, Read};\n\n\n\n// Having main() here helps with readability with the types I have to declare. Sorry clippy\n\n#[allow(clippy::needless_doctest_main)]\n\n/// `ReadRecorder` is a wrapper for [`Read`] that can \"record\" past reads for replay. This is especially useful if the\n\n/// underlying [`Read`] does not implement [`Seek`](`std::io::Seek`).\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use hline::file::ReadRecorder;\n\n/// use std::io::{Cursor, Read, Result, Seek, SeekFrom};\n\n///\n\n/// // Wraps Cursor and disables seeking to demonstrate no seeking takes place\n\n/// struct NoSeekCursor<T>(Cursor<T>);\n\n///\n\n/// impl <T> NoSeekCursor<T> {\n\n/// fn new(data: T) -> Self {\n\n/// Self(Cursor::new(data))\n", "file_path": "src/file/recorder.rs", "rank": 51, "score": 11.812630041947749 }, { "content": " return (component, None);\n\n } else if component.is_empty() {\n\n // If there's an empty component that _isn't_ the last component, it's going to be followed by a newline\n\n // (an \\r\\n terminated line will be non-empty).\n\n return (component, Some(\"\\n\"));\n\n }\n\n\n\n let len = component.len();\n\n if component.as_bytes()[len - 1] == b'\\r' {\n\n (&component[0..len - 1], Some(\"\\r\\n\"))\n\n } else {\n\n (component, Some(\"\\n\"))\n\n }\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::testutil;\n", "file_path": "src/lines.rs", "rank": 52, "score": 11.801800613760781 }, { "content": " use test_case::test_case;\n\n\n\n #[test_case(\n\n \"hello\",\n\n &[(\"hello\", None) as (&str, Option<&str>)];\n\n \"no newlines\"\n\n )]\n\n #[test_case(\n\n \"hello\\nworld\",\n\n &[(\"hello\", Some(\"\\n\")), (\"world\", None)];\n\n \"splitting newline\"\n\n )]\n\n #[test_case(\n\n \"hello\\nworld\\n\",\n\n &[(\"hello\", Some(\"\\n\")), (\"world\", Some(\"\\n\")), (\"\", None)];\n\n \"terminating newlines\"\n\n )]\n\n #[test_case(\n\n \"hello\\nworld\\r\\n\",\n\n &[(\"hello\", Some(\"\\n\")), (\"world\", Some(\"\\r\\n\")), (\"\", None)];\n", "file_path": "src/lines.rs", "rank": 53, "score": 11.52249152344637 }, { "content": "\n\n fn should_clear_recorded_data(&self, num_bytes_read_from_file: usize) -> bool {\n\n // This takes a bit of mental energy to reason about, but the core idea of why we _also_ check the number\n\n // of bytes read from the file is that it should be perfectly allowable to go to the end of the recording, and\n\n // then re-read.\n\n //\n\n // If we don't have this stipulation, we can simply read up to the end of the buffer, at which point our cursor\n\n // is now out of bounds. We need to \"wait\" until we've actually performed some kind of read to the file.\n\n num_bytes_read_from_file > 0 && self.cursor_out_of_recording_bounds()\n\n }\n\n\n\n fn drop_recorded_data(&mut self) {\n\n self.recorded_data.clear();\n\n self.recorded_data.shrink_to_fit();\n\n self.cursor_pos = None;\n\n }\n\n}\n\n\n\nimpl<R: Read> Read for ReadRecorder<R> {\n\n /// `read` serves two purposes. In the general case, it will forward reads to the wrapped [`Read`], and, if\n", "file_path": "src/file/recorder.rs", "rank": 54, "score": 11.499903815983807 }, { "content": "/// // Read the first six chars (\"hello \")\n\n/// recorder.read(&mut [0u8; 6])\n\n/// .expect(\"this read should have succeeded!\");\n\n/// recorder.stop_recording();\n\n/// recorder.rewind_to_start_of_recording();\n\n///\n\n/// // We can now read the full string, including the data we already read, without seeing!\n\n/// let mut read_data = String::new();\n\n/// recorder.read_to_string(&mut read_data)\n\n/// .expect(\"this read should have succeeded!\");\n\n///\n\n/// assert_eq!(read_data, \"hello world!\");\n\n/// }\n\n/// ```\n\n#[allow(clippy::module_name_repetitions)]\n\npub struct ReadRecorder<R: Read> {\n\n read: R,\n\n recorded_data: Vec<u8>,\n\n cursor_pos: Option<usize>,\n\n recording: bool,\n", "file_path": "src/file/recorder.rs", "rank": 55, "score": 11.49697762509515 }, { "content": "//! `print` provides utilities to facilitate printing out search results.\n\nuse crate::lines;\n\nuse std::fmt;\n\nuse std::io;\n\nuse std::io::Write;\n\nuse std::result;\n\nuse termion::color::{Color, Fg, Reset};\n\nuse thiserror::Error;\n\n\n\npub(crate) type Result = result::Result<(), Error>;\n\n\n\n/// Error is a simple wrapper for [`io::Error`] that differentiates between certain error kinds as part of the type.\n\n///\n\n/// In general, this type exists because of <https://github.com/rust-lang/rust/issues/46016>; println! panics on\n\n/// broken pipe errors. Though we could just ignore the errors, we need some way to differentiate between it and other\n\n/// errors. This could be done with `io::Error::kind`, but this wrapper makes it explicit it should be handled with an\n\n/// action such as terminating gracefully. It's silly and annoying, but it's how it is.\n\n#[derive(Error, Debug)]\n\n#[non_exhaustive]\n\npub enum Error {\n", "file_path": "src/print.rs", "rank": 56, "score": 11.472306037180838 }, { "content": " let res = scan_pattern_to_printer(\n\n &mut lipsum_reader,\n\n r#\"\"?computable\"?\\snumbers\"#,\n\n &mock_printer,\n\n );\n\n if let Err(err) = res {\n\n panic!(\"failed to search: {}\", err)\n\n }\n\n\n\n let colored_messages = mock_printer.colored_messages.borrow();\n\n #[rustfmt::skip]\n\n let expected_colored_messages = [\n\n \"The \\\"computable\\\" numbers may be described briefly as the real \\n\".to_string(),\n\n \"Although the subject of this paper is ostensibly the computable numbers. \\n\".to_string(),\n\n \"however, the same in each case, and I have chosen the computable numbers \\n\".to_string(),\n\n \"shortly to give an account of the relations of the computable numbers, \\n\".to_string(),\n\n \"computable numbers. According to my definition, a number is computable \\n\".to_string(),\n\n ];\n\n testutil::assert_slices_eq!(&colored_messages, &expected_colored_messages);\n\n\n", "file_path": "src/lib.rs", "rank": 57, "score": 11.2774166855351 }, { "content": " let broken_pipe_err =\n\n print::Error::from(io::Error::new(io::ErrorKind::BrokenPipe, \"broken pipe\"));\n\n mock_printer.fail_next(broken_pipe_err);\n\n let mut lipsum_reader = Cursor::new(SEARCH_TEXT);\n\n let res = scan_pattern_to_printer(&mut lipsum_reader, pattern, &mock_printer);\n\n\n\n assert!(!res.is_err(), \"failed to search: {:?}\", res.unwrap_err());\n\n assert_eq!(\n\n num_colored_messages,\n\n mock_printer.colored_messages.borrow().len()\n\n );\n\n assert_eq!(\n\n num_uncolored_messages,\n\n mock_printer.uncolored_messages.borrow().len()\n\n );\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 58, "score": 10.755831925536892 }, { "content": " assert!(\n\n matches(&produced_err),\n\n \"enum did not match: got {:?}\",\n\n &produced_err\n\n );\n\n }\n\n\n\n #[test_case(\n\n \"hello world\".to_string(),\n\n format!(\"{0}hello world{1}\", Fg(Magenta), Fg(Reset));\n\n \"no-newline case ends with reset\"\n\n )]\n\n #[test_case(\n\n \"foo\\nbar\\n\".to_string(),\n\n format!(\"{0}foo{1}\\n{0}bar{1}\\n\", Fg(Magenta), Fg(Reset));\n\n \"puts reset char before newlines\"\n\n )]\n\n #[test_case(\n\n \"hello\\n\\n\\nworld\".to_string(),\n\n format!(\"{0}hello{1}\\n\\n\\n{0}world{1}\", Fg(Magenta), Fg(Reset));\n", "file_path": "src/print.rs", "rank": 59, "score": 10.74255927410198 }, { "content": " // but it does make sure the functionality works as expected\n\n let res = scan_pattern_to_printer(&mut lipsum_reader, \"(?i)INTEGRAL\", &mock_printer);\n\n if let Err(err) = res {\n\n panic!(\"failed to search: {}\", err)\n\n }\n\n\n\n let colored_messages = mock_printer.colored_messages.borrow();\n\n let expected_colored_messages = [\n\n \"of an integral variable or a real or computable variable, computable \\n\".to_string(),\n\n ];\n\n testutil::assert_slices_eq!(&colored_messages, &expected_colored_messages);\n\n\n\n let uncolored_messages = mock_printer.uncolored_messages.borrow();\n\n // Again, the only missing message is the previous\n\n #[rustfmt::skip]\n\n let expected_uncolored_messages = [\n\n \"The \\\"computable\\\" numbers may be described briefly as the real \\n\".to_string(),\n\n \"numbers whose expressions as a decimal are calculable by finite means. \\n\".to_string(),\n\n \"Although the subject of this paper is ostensibly the computable numbers. \\n\".to_string(),\n\n \"it is almost equally easy to define and investigate computable functions \\n\".to_string(),\n", "file_path": "src/lib.rs", "rank": 60, "score": 10.607288476926062 }, { "content": "//! `lines` provides utilities for processing lines of strings\n\n\n\n/// `line_split` is an extremely similar iterator to [`str::lines`], but with one key difference: it provides the line\n\n/// character type it split on (the second element in the returned tuple). This way, one can reconstruct the original\n\n/// string when joining. If the line was not terminated by a newline (i.e. when it's at the end of a file), the second\n\n/// tuple element will be None.\n\npub(crate) fn line_split<'a>(s: &'a str) -> impl Iterator<Item = (&str, Option<&str>)> + 'a {\n\n // We could probably make this more efficient, but it would involve mostly re-implementing `split`.\n\n // I did some poking around, and this method is generally called for split_components.len() <= 2, so I'm not\n\n // too worried\n\n let split_components: Vec<&str> = s.split('\\n').collect();\n\n let num_split_components = split_components.len();\n\n\n\n split_components\n\n .into_iter()\n\n .enumerate()\n\n .map(move |(idx, component)| {\n\n if idx == num_split_components - 1 {\n\n // The last split component will never have a newline, as otherwise it would have a \"\"\n\n // element following it\n", "file_path": "src/lines.rs", "rank": 61, "score": 10.120039598317286 }, { "content": " let uncolored_messages = mock_printer.uncolored_messages.borrow();\n\n #[rustfmt::skip]\n\n let expected_uncolored_messages = [\n\n \"numbers whose expressions as a decimal are calculable by finite means. \\n\".to_string(),\n\n \"it is almost equally easy to define and investigate computable functions \\n\".to_string(),\n\n \"of an integral variable or a real or computable variable, computable \\n\".to_string(),\n\n \"predicates, and so forth. The fundamental problems involved are, \\n\".to_string(),\n\n \"for explicit treatment as involving the least cumbrous technique. I hope \\n\".to_string(),\n\n \"functions, and so forth to one another. This will include a development \\n\".to_string(),\n\n \"of the theory of functions of a real variable expressed in terms of \\n\".to_string(),\n\n \"if its decimal can be written down by a machine.\\n\".to_string(),\n\n ];\n\n testutil::assert_slices_eq!(&uncolored_messages, &expected_uncolored_messages);\n\n }\n\n\n\n #[test]\n\n fn case_insensitive_pattern_matches() {\n\n let mock_printer = MockPrinter::default();\n\n let mut lipsum_reader = Cursor::new(SEARCH_TEXT);\n\n // This test is a little bit of a cheat, because it doesn't test what's actually inputted by the CLI,\n", "file_path": "src/lib.rs", "rank": 62, "score": 10.087953929990189 }, { "content": " return joining_newline.unwrap_or_default().to_string();\n\n }\n\n\n\n format!(\n\n \"{color}{component}{reset}{joining_newline}\",\n\n color = color,\n\n reset = Fg(Reset),\n\n component = component,\n\n joining_newline = joining_newline.unwrap_or_default()\n\n )\n\n })\n\n .collect();\n\n\n\n self.print(colored_msg)\n\n }\n\n}\n\n\n\n/// `StdoutPrinter` is, quite simply, a printer that will print to stdout.\n\npub struct StdoutPrinter;\n\n\n", "file_path": "src/print.rs", "rank": 63, "score": 9.789798652853358 }, { "content": " \"mixing newline types\"\n\n )]\n\n #[test_case(\n\n \"hello\\n\\n\\nworld\",\n\n &[(\"hello\", Some(\"\\n\")), (\"\", Some(\"\\n\")), (\"\", Some(\"\\n\")), (\"world\", None)];\n\n \"chained newlines\"\n\n )]\n\n #[test_case(\n\n \"hello\\n\\r\\n\\nworld\",\n\n &[(\"hello\", Some(\"\\n\")), (\"\", Some(\"\\r\\n\")), (\"\", Some(\"\\n\")), (\"world\", None)];\n\n \"chained, mixed newlines\"\n\n )]\n\n #[test_case(\n\n \"hello\\rworld\\r\\nthere it is!\\n\",\n\n &[(\"hello\\rworld\", Some(\"\\r\\n\")), (\"there it is!\", Some(\"\\n\")), (\"\", None)];\n\n \"carriage return alone isn't significant\"\n\n )]\n\n fn test_splits_on_newlines(s: &str, expected: &[(&str, Option<&str>)]) {\n\n let collected: Vec<(&str, Option<&str>)> = line_split(s).collect();\n\n testutil::assert_slices_eq!(&expected, &collected);\n\n }\n\n}\n", "file_path": "src/lines.rs", "rank": 64, "score": 9.282511251815269 }, { "content": " /// recording currently taking place (see [`start_recording`](`ReadRecorder::start_recording`)),\n\n /// then the read data will be copied to an internal data buffer. However, if the following two conditions are met,\n\n /// then it will not read from the wrapped [`Read`], but rather from the recording buffer.\n\n ///\n\n /// 1. there is recorded data to read\n\n /// 2. the internal \"rewind cursor\" is within the bounds of the read data.\n\n ///\n\n /// This \"rewind cursor\" is initialized by calling\n\n /// [`rewind_to_start_of_recording`](`ReadRecorder::rewind_to_start_of_recording`), which sets it to zero. Every\n\n /// byte read will advance this cursor, until it is outside the bounds of the recorded data, at which point the\n\n /// recorded data is dropped.\n\n fn read(&mut self, buf: &mut [u8]) -> Result<usize, Error> {\n\n let bytes_copied_from_recording = self.copy_from_recording(buf);\n\n let bytes_read_from_file = self.read.read(&mut buf[bytes_copied_from_recording..])?;\n\n if self.recording {\n\n self.recorded_data\n\n .extend(buf.iter().take(bytes_read_from_file));\n\n } else if self.should_clear_recorded_data(bytes_read_from_file) {\n\n self.drop_recorded_data();\n\n }\n", "file_path": "src/file/recorder.rs", "rank": 65, "score": 9.178603303026572 }, { "content": " use testutil::mock_print::MockPrinter;\n\n\n\n const SEARCH_TEXT: &str = \"The \\\"computable\\\" numbers may be described briefly as the real \\n\\\n\n numbers whose expressions as a decimal are calculable by finite means. \\n\\\n\n Although the subject of this paper is ostensibly the computable numbers. \\n\\\n\n it is almost equally easy to define and investigate computable functions \\n\\\n\n of an integral variable or a real or computable variable, computable \\n\\\n\n predicates, and so forth. The fundamental problems involved are, \\n\\\n\n however, the same in each case, and I have chosen the computable numbers \\n\\\n\n for explicit treatment as involving the least cumbrous technique. I hope \\n\\\n\n shortly to give an account of the relations of the computable numbers, \\n\\\n\n functions, and so forth to one another. This will include a development \\n\\\n\n of the theory of functions of a real variable expressed in terms of \\n\\\n\n computable numbers. According to my definition, a number is computable \\n\\\n\n if its decimal can be written down by a machine.\\n\";\n\n\n\n #[test]\n\n fn test_highlights_matches() {\n\n let mock_printer = MockPrinter::default();\n\n let mut lipsum_reader = Cursor::new(SEARCH_TEXT);\n", "file_path": "src/lib.rs", "rank": 66, "score": 9.055258352898534 }, { "content": "#![warn(clippy::all, clippy::pedantic)]\n\nuse clap::{crate_name, crate_version, App, AppSettings, Arg, ArgMatches};\n\nuse hline::file;\n\nuse hline::file::ReadRecorder;\n\nuse std::env;\n\nuse std::fmt::Display;\n\nuse std::fs::File;\n\nuse std::io;\n\nuse std::io::{Read, Seek, Stdin};\n\nuse std::process;\n\nuse termion::color::{Fg, LightRed, Reset};\n\n\n\nconst FILENAME_ARG_NAME: &str = \"filename\";\n\nconst PATTERN_ARG_NAME: &str = \"pattern\";\n\nconst CASE_INSENSITIVE_ARG_NAME: &str = \"case-insensitive\";\n\nconst OK_IF_BINARY_ARG_NAME: &str = \"ok-if-binary\";\n\n\n\n/// `OpenedFile` represents some kind of file that was opened for further handling by `hl`\n", "file_path": "src/main.rs", "rank": 67, "score": 9.041898844966743 }, { "content": "//! File provides utilities for reading files for scanning.\n\n//!\n\n//! These types are not generally require for using the methods defined in the crate root, but can be useful to\n\n//! ensure their output will be usable.\n\nmod recorder;\n\npub mod utf8;\n\n\n\npub use recorder::ReadRecorder;\n", "file_path": "src/file.rs", "rank": 68, "score": 9.030339025861526 }, { "content": "#![cfg(test)]\n\n#![allow(dead_code)]\n\npub(crate) mod mock_print;\n\n\n\npub(crate) fn are_slices_eq<T: PartialEq>(s1: &[T], s2: &[T]) -> bool {\n\n if s1.len() != s2.len() {\n\n return false;\n\n }\n\n\n\n // https://stackoverflow.com/a/29504547\n\n let len = s1.len();\n\n s1.iter().zip(s2).filter(|&(a, b)| a == b).count() == len\n\n}\n\n\n\nmacro_rules! assert_slices_eq {\n\n ($s1: expr, $s2: expr) => {{\n\n let s1 = $s1;\n\n let s2 = $s2;\n\n assert!(\n\n testutil::are_slices_eq(s1, s2),\n\n \"(expected) {:?} != (actual) {:?}\",\n\n s1,\n\n s2,\n\n );\n\n }};\n\n}\n\n\n\npub(crate) use assert_slices_eq;\n", "file_path": "src/testutil.rs", "rank": 69, "score": 9.024262689885878 }, { "content": " let mut recorder = ReadRecorder::new(s_reader);\n\n\n\n let mut read_out = String::new();\n\n recorder\n\n .read_to_string(&mut read_out)\n\n .expect(\"reading failed unexpectedly\");\n\n\n\n assert_eq!(read_out, \"hello world\");\n\n }\n\n\n\n #[test]\n\n fn test_can_read_recorded_portion_repeatedly_without_calling_underlying_reader() {\n\n let s_reader = ReadCountingCursor::new(Cursor::new(\"hello world\"));\n\n\n\n let mut recorder = ReadRecorder::new(s_reader);\n\n recorder.start_recording();\n\n\n\n // Read through the full string once to initialize the recording\n\n recorder\n\n .read_to_string(&mut String::new())\n", "file_path": "src/file/recorder.rs", "rank": 70, "score": 8.420008196210325 }, { "content": "\n\n let bytes_remaining_in_recording = self.recorded_data.len() - cursor_pos;\n\n let bytes_to_read = cmp::min(buf.len(), bytes_remaining_in_recording);\n\n self.recorded_data\n\n .iter()\n\n .skip(cursor_pos)\n\n .take(bytes_to_read)\n\n .enumerate()\n\n .for_each(|(idx, &chr)| buf[idx] = chr);\n\n\n\n self.cursor_pos = Some(cursor_pos + bytes_to_read);\n\n bytes_to_read\n\n }\n\n\n\n fn cursor_out_of_recording_bounds(&self) -> bool {\n\n match self.cursor_pos {\n\n None => false,\n\n Some(cursor_pos) => cursor_pos >= self.recorded_data.len(),\n\n }\n\n }\n", "file_path": "src/file/recorder.rs", "rank": 71, "score": 8.1998915027505 }, { "content": " let mut recorder = ReadRecorder::new(s_reader);\n\n recorder.rewind_to_start_of_recording();\n\n\n\n let mut read_contents = String::new();\n\n recorder\n\n .read_to_string(&mut read_contents)\n\n .expect(\"reading failed unexpectedly\");\n\n\n\n assert_eq!(read_contents, \"hello world\");\n\n }\n\n\n\n #[test]\n\n fn test_reading_past_recorded_portion_drops_recording() {\n\n const BYTES_TO_RECORD: usize = 3;\n\n let s_reader = Cursor::new(\"hello world\");\n\n let mut recorder = ReadRecorder::new(s_reader);\n\n\n\n recorder.start_recording();\n\n recorder\n\n .read_exact(&mut [0_u8; BYTES_TO_RECORD])\n", "file_path": "src/file/recorder.rs", "rank": 72, "score": 7.6448541480928265 }, { "content": " \"underlying Read was called more times than it should\"\n\n );\n\n }\n\n }\n\n\n\n #[test]\n\n fn can_read_slowly_through_recording() {\n\n let s_reader = Cursor::new(\"hello world\");\n\n let mut recorder = ReadRecorder::new(s_reader);\n\n recorder.start_recording();\n\n recorder\n\n .read_to_string(&mut String::new())\n\n .expect(\"reading failed unexpectedly\");\n\n recorder.stop_recording();\n\n recorder.rewind_to_start_of_recording();\n\n\n\n let mut full_res = String::new();\n\n let mut buf = [0_u8; 2];\n\n loop {\n\n let bytes_read = recorder\n", "file_path": "src/file/recorder.rs", "rank": 73, "score": 7.568661900145593 }, { "content": "/// }\n\n/// }\n\n///\n\n/// impl <T: AsRef<[u8]>> Read for NoSeekCursor<T> {\n\n/// fn read(&mut self, buf: &mut [u8]) -> Result<usize> {\n\n/// self.0.read(buf)\n\n/// }\n\n/// }\n\n///\n\n/// impl <T> Seek for NoSeekCursor<T> {\n\n/// fn seek(&mut self, _: SeekFrom) -> Result<u64> {\n\n/// panic!(\"This cursor doesn't support seeking!\");\n\n/// }\n\n/// }\n\n///\n\n/// fn main() {\n\n/// let cursor = NoSeekCursor::new(\"hello world!\");\n\n/// let mut recorder = ReadRecorder::new(cursor);\n\n///\n\n/// recorder.start_recording();\n", "file_path": "src/file/recorder.rs", "rank": 74, "score": 7.4833537028076105 }, { "content": " .expect(\"reading failed unexpectedly\");\n\n\n\n recorder.stop_recording();\n\n recorder.rewind_to_start_of_recording();\n\n\n\n let num_reads_before_retrying = recorder.read.num_reads;\n\n for _ in 0..3 {\n\n let mut read_out = [0_u8; \"hello world\".len()];\n\n recorder\n\n .read_exact(&mut read_out)\n\n .expect(\"reading failed unexpectedly\");\n\n\n\n assert_eq!(\n\n std::str::from_utf8(&read_out).expect(\"did not read utf-8\"),\n\n \"hello world\",\n\n \"read data did not match expected\"\n\n );\n\n recorder.rewind_to_start_of_recording();\n\n assert_eq!(\n\n num_reads_before_retrying, recorder.read.num_reads,\n", "file_path": "src/file/recorder.rs", "rank": 75, "score": 7.366504776764267 }, { "content": "// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n\n// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR\n\n// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE\n\n// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n\n// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT\n\n// OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR\n\n// BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,\n\n// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE\n\n// OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN\n\n// IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n\nuse std::io::{Error, Read};\n\nuse std::ops::Range;\n\n\n\nconst BINARY_CHAR_THRESHOLD: i8 = 5;\n\nconst BUFFER_CHECK_AMOUNT: usize = 255;\n\n\n\n/// `is_file_likely_binary` check if a file is likely a binary file. This is useful to check if a file is likely\n\n/// human-readable or not.\n\n///\n\n/// # Errors\n\n///\n\n/// An [`std::io::Error`] will be returned if there is an underlying problem reading from the given [`Read`]\n\n//\n\n// This mechanism is inspired heavily by `less`' implementation, which follows the same semantics (in utf-8 mode,\n\n// at least).\n\n// https://github.com/gwsw/less/blob/294976950f5dc2a6b3436b1d2df97034936552b9/filename.c#L480-L484\n\n#[allow(clippy::module_name_repetitions)]\n", "file_path": "src/file/utf8.rs", "rank": 76, "score": 7.063194871684672 }, { "content": " .expect(\"reading failed unexpectedly\");\n\n recorder.rewind_to_start_of_recording();\n\n recorder.stop_recording();\n\n\n\n // Read one byte past the recording\n\n recorder\n\n .read_exact(&mut [0_u8; BYTES_TO_RECORD + 1])\n\n .expect(\"reading failed unexpectedly\");\n\n\n\n recorder.rewind_to_start_of_recording();\n\n\n\n // We should not be able to simply read the recorded data now. The recording buffer will be empty\n\n let mut read_contents = [0_u8; BYTES_TO_RECORD];\n\n recorder\n\n .read_exact(&mut read_contents)\n\n .expect(\"reading failed unexpectedly\");\n\n\n\n assert_ne!(\n\n std::str::from_utf8(&read_contents).expect(\"did not read utf-8\"),\n\n \"hel\",\n\n \"Read data that the read cursor should have already passed\"\n\n );\n\n }\n\n}\n", "file_path": "src/file/recorder.rs", "rank": 77, "score": 6.344539765888589 }, { "content": " .read(&mut buf)\n\n .expect(\"reading failed unexpectedly\");\n\n if bytes_read == 0 {\n\n break;\n\n }\n\n\n\n for byte in &buf[0..bytes_read] {\n\n full_res.push(*byte as char);\n\n }\n\n }\n\n\n\n assert_eq!(\"hello world\", full_res);\n\n }\n\n\n\n #[test]\n\n fn test_does_not_call_underlying_read_when_reading_within_recorded_portion() {\n\n let s_reader = ReadCountingCursor::new(Cursor::new(\"hello world\"));\n\n let mut recorder = ReadRecorder::new(s_reader);\n\n\n\n recorder.start_recording();\n", "file_path": "src/file/recorder.rs", "rank": 78, "score": 4.84190620029832 }, { "content": " .arg(\n\n Arg::with_name(CASE_INSENSITIVE_ARG_NAME)\n\n .short(\"-i\")\n\n .long(\"--ignore-case\")\n\n .help(\"Ignore case when performing matching. If not specified, the matching is case-sensitive.\"),\n\n )\n\n .arg(\n\n Arg::with_name(OK_IF_BINARY_ARG_NAME)\n\n .short(\"-b\")\n\n .help(\"Treat the given input file as text, even if it may be a binary file\"),\n\n )\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 79, "score": 4.787703527030024 }, { "content": " recorder\n\n .read_exact(&mut [0_u8; 3])\n\n .expect(\"reading failed unexpectedly\");\n\n recorder.rewind_to_start_of_recording();\n\n recorder.stop_recording();\n\n\n\n let num_reads_before = recorder.read.num_reads;\n\n recorder\n\n .read_exact(&mut [0_u8; 3])\n\n .expect(\"reading failed unexpectedly\");\n\n\n\n assert_eq!(\n\n num_reads_before, recorder.read.num_reads,\n\n \"underlying Read was called more times than it should\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_can_rewind_through_zero_length_recording() {\n\n let s_reader = ReadCountingCursor::new(Cursor::new(\"hello world\"));\n", "file_path": "src/file/recorder.rs", "rank": 80, "score": 4.651827863658497 }, { "content": " let pattern = args\n\n .value_of(PATTERN_ARG_NAME)\n\n .map(|pat| {\n\n if case_insensitive {\n\n make_pattern_case_insensitive(pat)\n\n } else {\n\n pat.to_string()\n\n }\n\n })\n\n .expect(\"pattern arg not found, despite parser reporting it was present\");\n\n\n\n let file = args\n\n .value_of(FILENAME_ARG_NAME)\n\n .map_or(PassedFile::Stdin, |filename| {\n\n PassedFile::Path(filename.to_string())\n\n });\n\n\n\n Args {\n\n pattern,\n\n file,\n\n ok_if_binary_file,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 81, "score": 2.548561148085059 }, { "content": "//! Provides utilities to determine the properties of a UTF-8 encoded file.\n\n\n\n// Many things in this file take heavy inspiration from less. As such, I'm including their license here.\n\n//\n\n// Less License\n\n// ------------\n\n\n\n// Less\n\n// Copyright (C) 1984-2018 Mark Nudelman\n\n\n\n// Redistribution and use in source and binary forms, with or without\n\n// modification, are permitted provided that the following conditions\n\n// are met:\n\n// 1. Redistributions of source code must retain the above copyright\n\n// notice, this list of conditions and the following disclaimer.\n\n// 2. Redistributions in binary form must reproduce the above copyright\n\n// notice in the documentation and/or other materials provided with\n\n// the distribution.\n\n\n\n// THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY\n", "file_path": "src/file/utf8.rs", "rank": 82, "score": 1.6290741405083793 } ]
Rust
consensus/safety-rules/src/persistent_safety_storage.rs
pepenemo/libra
86a42bc1f1113868ee8c4815f0d3a137a923ed52
use crate::{ counters, logging::{self, LogEntry, LogEvent, LogField}, }; use anyhow::Result; use consensus_types::{common::Author, safety_data::SafetyData}; use libra_crypto::ed25519::{Ed25519PrivateKey, Ed25519PublicKey}; use libra_global_constants::{CONSENSUS_KEY, EXECUTION_KEY, OWNER_ACCOUNT, SAFETY_DATA, WAYPOINT}; use libra_logger::prelude::*; use libra_secure_storage::{ CachedStorage, CryptoStorage, InMemoryStorage, KVStorage, Storage, Value, }; use libra_types::waypoint::Waypoint; use std::str::FromStr; pub struct PersistentSafetyStorage { internal_store: Storage, } impl PersistentSafetyStorage { pub fn in_memory( consensus_private_key: Ed25519PrivateKey, execution_private_key: Ed25519PrivateKey, ) -> Self { let storage = Storage::from(InMemoryStorage::new()); Self::initialize( storage, Author::random(), consensus_private_key, execution_private_key, Waypoint::default(), ) } pub fn initialize( mut internal_store: Storage, author: Author, consensus_private_key: Ed25519PrivateKey, execution_private_key: Ed25519PrivateKey, waypoint: Waypoint, ) -> Self { Self::initialize_( &mut internal_store, author, consensus_private_key, execution_private_key, waypoint, ) .expect("Unable to initialize backend storage"); Self { internal_store } } fn initialize_( internal_store: &mut Storage, author: Author, consensus_private_key: Ed25519PrivateKey, execution_private_key: Ed25519PrivateKey, waypoint: Waypoint, ) -> Result<()> { let result = internal_store.import_private_key(CONSENSUS_KEY, consensus_private_key); if let Err(libra_secure_storage::Error::KeyAlreadyExists(_)) = result { warn!("Attempted to re-initialize existing storage"); return Ok(()); } internal_store.import_private_key(EXECUTION_KEY, execution_private_key)?; internal_store.set( SAFETY_DATA, Value::SafetyData(SafetyData::new(1, 0, 0, None)), )?; internal_store.set(OWNER_ACCOUNT, Value::String(author.to_string()))?; internal_store.set(WAYPOINT, Value::String(waypoint.to_string()))?; Ok(()) } pub fn into_cached(self) -> PersistentSafetyStorage { if let Storage::CachedStorage(cached_storage) = self.internal_store { PersistentSafetyStorage { internal_store: Storage::CachedStorage(cached_storage), } } else { PersistentSafetyStorage { internal_store: Storage::CachedStorage(CachedStorage::new(self.internal_store)), } } } pub fn new(internal_store: Storage) -> Self { Self { internal_store } } pub fn author(&self) -> Result<Author> { let res = self.internal_store.get(OWNER_ACCOUNT)?; let res = res.value.string()?; std::str::FromStr::from_str(&res) } pub fn consensus_key_for_version( &self, version: Ed25519PublicKey, ) -> Result<Ed25519PrivateKey> { self.internal_store .export_private_key_for_version(CONSENSUS_KEY, version) .map_err(|e| e.into()) } pub fn execution_public_key(&self) -> Result<Ed25519PublicKey> { Ok(self .internal_store .get_public_key(EXECUTION_KEY) .map(|r| r.public_key)?) } pub fn safety_data(&self) -> Result<SafetyData> { Ok(self .internal_store .get(SAFETY_DATA) .and_then(|r| r.value.safety_data())?) } pub fn set_safety_data(&mut self, data: SafetyData) -> Result<()> { counters::set_state("epoch", data.epoch as i64); counters::set_state("last_voted_round", data.last_voted_round as i64); counters::set_state("preferred_round", data.preferred_round as i64); self.internal_store .set(SAFETY_DATA, Value::SafetyData(data))?; Ok(()) } pub fn waypoint(&self) -> Result<Waypoint> { let waypoint = self .internal_store .get(WAYPOINT) .and_then(|r| r.value.string())?; Waypoint::from_str(&waypoint) .map_err(|e| anyhow::anyhow!("Unable to parse waypoint: {}", e)) } pub fn set_waypoint(&mut self, waypoint: &Waypoint) -> Result<()> { self.internal_store .set(WAYPOINT, Value::String(waypoint.to_string()))?; send_struct_log!(logging::safety_log(LogEntry::Waypoint, LogEvent::Update) .data(LogField::Message.as_str(), waypoint)); Ok(()) } #[cfg(any(test, feature = "testing"))] pub fn internal_store(&mut self) -> &mut Storage { &mut self.internal_store } } #[cfg(test)] mod tests { use super::*; use libra_crypto::Uniform; use libra_types::validator_signer::ValidatorSigner; #[test] fn test() { let private_key = ValidatorSigner::from_int(0).private_key().clone(); let mut storage = PersistentSafetyStorage::in_memory( private_key, Ed25519PrivateKey::generate_for_testing(), ); let safety_data = storage.safety_data().unwrap(); assert_eq!(safety_data.epoch, 1); assert_eq!(safety_data.last_voted_round, 0); assert_eq!(safety_data.preferred_round, 0); storage .set_safety_data(SafetyData::new(9, 8, 1, None)) .unwrap(); let safety_data = storage.safety_data().unwrap(); assert_eq!(safety_data.epoch, 9); assert_eq!(safety_data.last_voted_round, 8); assert_eq!(safety_data.preferred_round, 1); } }
use crate::{ counters, logging::{self, LogEntry, LogEvent, LogField}, }; use anyhow::Result; use consensus_types::{common::Author, safety_data::SafetyData}; use libra_crypto::ed25519::{Ed25519PrivateKey, Ed25519PublicKey}; use libra_global_constants::{CONSENSUS_KEY, EXECUTION_KEY, OWNER_ACCOUNT, SAFETY_DATA, WAYPOINT}; use libra_logger::prelude::*; use libra_secure_storage::{ CachedStorage, CryptoStorage, InMemoryStorage, KVStorage, Storage, Value, }; use libra_types::waypoint::Waypoint; use std::str::FromStr; pub struct PersistentSafetyStorage { internal_store: Storage, } impl PersistentSafetyStorage { pub fn in_memory( consensus_private_key: Ed25519PrivateKey, execution_private_key: Ed25519PrivateKey, ) -> Self { let storage = Storage::from(InMemoryStorage::new()); Self::initialize( storage, Author::random(), consensus_private_key, execution_private_key, Waypoint::default(), ) } pub fn initialize( mut internal_store: Storage, author: Author, consensus_private_key: Ed25519PrivateKey, execution_private_key: Ed25519PrivateKey, waypoint: Waypoint, ) -> Self { Self::initialize_( &mut internal_store, author, consensus_private_key, execution_private_key, waypoint, ) .expect("Unable to initialize backend storage"); Self { internal_store } } fn initialize_( internal_store: &mut Storage, author: Author, consensus_private_key: Ed25519PrivateKey, execution_private_key: Ed25519PrivateKey, waypoint: Waypoint, ) -> Result<()> { let result = internal_store.import_private_key(CONSENSUS_KEY, consensus_private_key); if let Err(libra_secure_storage::Error::KeyAlreadyExists(_)) = result { warn!("Attempted to re-initialize existing storage"); return Ok(()); } internal_store.import_private_key(EXECUTION_KEY, execution_private_key)?; internal_store.set( SAFETY_DATA, Value::SafetyData(SafetyData::new(1, 0, 0, None)), )?; internal_store.set(OWNER_ACCOUNT, Value::String(author.to_string()))?; internal_store.set(WAYPOINT, Value::String(waypoint.to_string()))?; Ok(()) } pub fn into_cached(self) -> PersistentSafetyStorage {
} pub fn new(internal_store: Storage) -> Self { Self { internal_store } } pub fn author(&self) -> Result<Author> { let res = self.internal_store.get(OWNER_ACCOUNT)?; let res = res.value.string()?; std::str::FromStr::from_str(&res) } pub fn consensus_key_for_version( &self, version: Ed25519PublicKey, ) -> Result<Ed25519PrivateKey> { self.internal_store .export_private_key_for_version(CONSENSUS_KEY, version) .map_err(|e| e.into()) } pub fn execution_public_key(&self) -> Result<Ed25519PublicKey> { Ok(self .internal_store .get_public_key(EXECUTION_KEY) .map(|r| r.public_key)?) } pub fn safety_data(&self) -> Result<SafetyData> { Ok(self .internal_store .get(SAFETY_DATA) .and_then(|r| r.value.safety_data())?) } pub fn set_safety_data(&mut self, data: SafetyData) -> Result<()> { counters::set_state("epoch", data.epoch as i64); counters::set_state("last_voted_round", data.last_voted_round as i64); counters::set_state("preferred_round", data.preferred_round as i64); self.internal_store .set(SAFETY_DATA, Value::SafetyData(data))?; Ok(()) } pub fn waypoint(&self) -> Result<Waypoint> { let waypoint = self .internal_store .get(WAYPOINT) .and_then(|r| r.value.string())?; Waypoint::from_str(&waypoint) .map_err(|e| anyhow::anyhow!("Unable to parse waypoint: {}", e)) } pub fn set_waypoint(&mut self, waypoint: &Waypoint) -> Result<()> { self.internal_store .set(WAYPOINT, Value::String(waypoint.to_string()))?; send_struct_log!(logging::safety_log(LogEntry::Waypoint, LogEvent::Update) .data(LogField::Message.as_str(), waypoint)); Ok(()) } #[cfg(any(test, feature = "testing"))] pub fn internal_store(&mut self) -> &mut Storage { &mut self.internal_store } } #[cfg(test)] mod tests { use super::*; use libra_crypto::Uniform; use libra_types::validator_signer::ValidatorSigner; #[test] fn test() { let private_key = ValidatorSigner::from_int(0).private_key().clone(); let mut storage = PersistentSafetyStorage::in_memory( private_key, Ed25519PrivateKey::generate_for_testing(), ); let safety_data = storage.safety_data().unwrap(); assert_eq!(safety_data.epoch, 1); assert_eq!(safety_data.last_voted_round, 0); assert_eq!(safety_data.preferred_round, 0); storage .set_safety_data(SafetyData::new(9, 8, 1, None)) .unwrap(); let safety_data = storage.safety_data().unwrap(); assert_eq!(safety_data.epoch, 9); assert_eq!(safety_data.last_voted_round, 8); assert_eq!(safety_data.preferred_round, 1); } }
if let Storage::CachedStorage(cached_storage) = self.internal_store { PersistentSafetyStorage { internal_store: Storage::CachedStorage(cached_storage), } } else { PersistentSafetyStorage { internal_store: Storage::CachedStorage(CachedStorage::new(self.internal_store)), } }
if_condition
[ { "content": "/// Same as `to_bytes` but write directly into an `std::io::Write` object.\n\npub fn serialize_into<W, T>(write: &mut W, value: &T) -> Result<()>\n\nwhere\n\n W: std::io::Write,\n\n T: ?Sized + Serialize,\n\n{\n\n let serializer = Serializer::new(write, crate::MAX_CONTAINER_DEPTH);\n\n value.serialize(serializer)\n\n}\n\n\n", "file_path": "common/lcs/src/ser.rs", "rank": 0, "score": 390741.1805614043 }, { "content": "#[cfg(any(test, feature = \"fuzzing\"))]\n\npub fn keypair_strategy() -> impl Strategy<Value = KeyPair<Ed25519PrivateKey, Ed25519PublicKey>> {\n\n test_utils::uniform_keypair_strategy::<Ed25519PrivateKey, Ed25519PublicKey>()\n\n}\n\n\n\n#[cfg(any(test, feature = \"fuzzing\"))]\n\nuse proptest::prelude::*;\n\n\n\n#[cfg(any(test, feature = \"fuzzing\"))]\n\nimpl proptest::arbitrary::Arbitrary for Ed25519PublicKey {\n\n type Parameters = ();\n\n type Strategy = BoxedStrategy<Self>;\n\n\n\n fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {\n\n crate::test_utils::uniform_keypair_strategy::<Ed25519PrivateKey, Ed25519PublicKey>()\n\n .prop_map(|v| v.public_key)\n\n .boxed()\n\n }\n\n}\n", "file_path": "crypto/crypto/src/ed25519.rs", "rank": 1, "score": 375294.903659512 }, { "content": "#[cfg(any(test, feature = \"fuzzing\"))]\n\npub fn random_serializable_struct() -> impl Strategy<Value = TestLibraCrypto> {\n\n (String::arbitrary()).prop_map(TestLibraCrypto).no_shrink()\n\n}\n", "file_path": "crypto/crypto/src/test_utils.rs", "rank": 2, "score": 370945.6834967121 }, { "content": "pub fn struct_ref_instantiation(state: &mut AbstractState) -> Result<Vec<SignatureToken>, VMError> {\n\n let token = state.register_move().unwrap().token;\n\n if let Some(type_actuals) = get_type_actuals_from_reference(&token) {\n\n Ok(type_actuals)\n\n } else {\n\n Err(VMError::new(\"Invalid field borrow\".to_string()))\n\n }\n\n}\n\n\n", "file_path": "language/testing-infra/test-generation/src/transitions.rs", "rank": 3, "score": 366530.36021461943 }, { "content": "fn next_number(initial: char, mut it: impl Iterator<Item = char>) -> Result<(Token, usize)> {\n\n let mut num = String::new();\n\n num.push(initial);\n\n loop {\n\n match it.next() {\n\n Some(c) if c.is_ascii_digit() => num.push(c),\n\n Some(c) if c.is_alphanumeric() => {\n\n let mut suffix = String::new();\n\n suffix.push(c);\n\n loop {\n\n match it.next() {\n\n Some(c) if c.is_ascii_alphanumeric() => suffix.push(c),\n\n _ => {\n\n let len = num.len() + suffix.len();\n\n let tok = match suffix.as_str() {\n\n \"u8\" => Token::U8(num),\n\n \"u64\" => Token::U64(num),\n\n \"u128\" => Token::U128(num),\n\n _ => bail!(\"invalid suffix\"),\n\n };\n", "file_path": "language/move-core/types/src/parser.rs", "rank": 4, "score": 364084.87752882077 }, { "content": "/// Executes all storage tests on a given storage backend.\n\npub fn execute_all_storage_tests(storage: &mut Storage) {\n\n for test in STORAGE_TESTS.iter() {\n\n test(storage);\n\n storage.reset_and_clear().unwrap();\n\n }\n\n}\n\n\n", "file_path": "secure/storage/src/tests/suite.rs", "rank": 5, "score": 362757.17180037114 }, { "content": "pub fn run(mut args: Args, xctx: XContext) -> Result<()> {\n\n args.args.extend(args.testname.clone());\n\n let config = xctx.config();\n\n\n\n let env_vars: &[(&str, &str)] = if args.html_cov_dir.is_some() {\n\n info!(\"Running \\\"cargo clean\\\" before collecting coverage\");\n\n let mut clean_cmd = Command::new(\"cargo\");\n\n clean_cmd.arg(\"clean\");\n\n clean_cmd.output()?;\n\n &[\n\n // A way to use -Z (unstable) flags with the stable compiler. See below.\n\n (\"RUSTC_BOOTSTRAP\", \"1\"),\n\n // Recommend setting for grcov, avoids using the cargo cache.\n\n (\"CARGO_INCREMENTAL\", \"0\"),\n\n // Recommend flags for use with grcov, with these flags removed: -Copt-level=0, -Clink-dead-code.\n\n // for more info see: https://github.com/mozilla/grcov#example-how-to-generate-gcda-fiels-for-a-rust-project\n\n (\n\n \"RUSTFLAGS\",\n\n \"-Zprofile -Ccodegen-units=1 -Coverflow-checks=off\",\n\n ),\n", "file_path": "devtools/x/src/test.rs", "rank": 6, "score": 358410.6620219805 }, { "content": "pub fn run(mut args: Args, xctx: XContext) -> Result<()> {\n\n args.args.extend(args.benchname.clone());\n\n let config = xctx.config();\n\n\n\n let mut direct_args = Vec::new();\n\n if args.no_run {\n\n direct_args.push(OsString::from(\"--no-run\"));\n\n };\n\n\n\n let cmd = CargoCommand::Bench(config.cargo_config(), direct_args.as_slice(), &args.args);\n\n let base_args = CargoArgs::default();\n\n\n\n if !args.package.is_empty() {\n\n cmd.run_on_packages(args.package.iter(), &base_args)?;\n\n } else if utils::project_is_root(&xctx)? {\n\n cmd.run_on_all_packages(&base_args)?;\n\n } else {\n\n cmd.run_on_local_package(&base_args)?;\n\n };\n\n Ok(())\n\n}\n", "file_path": "devtools/x/src/bench.rs", "rank": 7, "score": 358410.6620219805 }, { "content": "pub fn arb_mock_genesis() -> impl Strategy<Value = (TransactionToCommit, LedgerInfoWithSignatures)>\n\n{\n\n arb_blocks_to_commit_impl(\n\n 1, /* num_accounts */\n\n 1, /* max_txn_per_block */\n\n 1, /* max_blocks */\n\n )\n\n .prop_map(|blocks| {\n\n let (block, ledger_info_with_sigs) = &blocks[0];\n\n\n\n (block[0].clone(), ledger_info_with_sigs.clone())\n\n })\n\n}\n", "file_path": "storage/libradb/src/test_helper.rs", "rank": 8, "score": 356179.74759806355 }, { "content": "/// This produces a round that is often higher than the parent, but not\n\n/// too high\n\npub fn some_round(initial_round: Round) -> impl Strategy<Value = Round> {\n\n prop_oneof![\n\n 9 => Just(1 + initial_round),\n\n 1 => bigger_round(initial_round),\n\n ]\n\n}\n\n\n\nprop_compose! {\n\n /// This creates a child with a parent on its left, and a QC on the left\n\n /// of the parent. This, depending on branching, does not require the\n\n /// QC to always be an ancestor or the parent to always be the highest QC\n\n fn child(\n\n signer_strategy: impl Strategy<Value = ValidatorSigner>,\n\n block_forest_strategy: impl Strategy<Value = LinearizedBlockForest>,\n\n )(\n\n signer in signer_strategy,\n\n (forest_vec, parent_idx, qc_idx) in block_forest_strategy\n\n .prop_flat_map(|forest_vec| {\n\n let len = forest_vec.len();\n\n (Just(forest_vec), 0..len)\n", "file_path": "consensus/consensus-types/src/block_test_utils.rs", "rank": 9, "score": 356176.4160621243 }, { "content": "pub fn run(args: Args, xctx: XContext) -> crate::Result<()> {\n\n let config = xctx.config();\n\n let summaries_config = config.summaries_config();\n\n let workspace_config = config.workspace_config();\n\n let pkg_graph = xctx.core().package_graph()?;\n\n let feature_graph = pkg_graph.feature_graph();\n\n let default_members = xctx.core().default_members()?;\n\n\n\n let default_opts = summaries_config.default.to_cargo_options(pkg_graph)?;\n\n let full_opts = summaries_config.full.to_cargo_options(pkg_graph)?;\n\n\n\n let out_dir = args\n\n .out_dir\n\n .unwrap_or_else(|| xctx.core().project_root().join(Args::DEFAULT_OUT_DIR));\n\n\n\n fs::create_dir_all(&out_dir)?;\n\n\n\n // TODO: figure out a way to unify this with WorkspaceSubset.\n\n\n\n // Create summaries for:\n", "file_path": "devtools/x/src/generate_summaries.rs", "rank": 10, "score": 355089.04399809346 }, { "content": "pub fn run(args: Args, xctx: XContext) -> crate::Result<()> {\n\n let workspace_config = xctx.config().workspace_config();\n\n\n\n let project_linters: &[&dyn ProjectLinter] = &[\n\n &guppy::BannedDeps::new(&workspace_config.banned_deps),\n\n &guppy::DirectDepDups,\n\n ];\n\n\n\n let package_linters: &[&dyn PackageLinter] = &[\n\n &guppy::EnforcedAttributes::new(&workspace_config.enforced_attributes),\n\n &guppy::CrateNamesPaths,\n\n &guppy::IrrelevantBuildDeps,\n\n &guppy::OverlayFeatures::new(&workspace_config.overlay),\n\n &guppy::WorkspaceHack,\n\n &workspace_classify::DefaultOrTestOnly::new(&workspace_config.test_only),\n\n ];\n\n\n\n let content_linters: &[&dyn ContentLinter] = &[\n\n &license::LicenseHeader,\n\n &toml::RootToml,\n", "file_path": "devtools/x/src/lint/mod.rs", "rank": 11, "score": 355089.04399809346 }, { "content": "pub fn run(args: Args, _xctx: XContext) -> crate::Result<()> {\n\n let base_summary_text = fs::read_to_string(&args.base_summary)?;\n\n let base_summary = Summary::parse(&base_summary_text)?;\n\n let compare_summary_text = fs::read_to_string(&args.compare_summary)?;\n\n let compare_summary = Summary::parse(&compare_summary_text)?;\n\n\n\n let summary_diff = SummaryDiff::new(&base_summary, &compare_summary);\n\n println!(\"{}\", summary_diff.report());\n\n Ok(())\n\n}\n", "file_path": "devtools/x/src/diff_summary.rs", "rank": 12, "score": 355089.04399809346 }, { "content": "// This function generates an arbitrary serde_json::Value.\n\npub fn arb_json_value() -> impl Strategy<Value = Value> {\n\n let leaf = prop_oneof![\n\n Just(Value::Null),\n\n any::<bool>().prop_map(Value::Bool),\n\n any::<f64>().prop_map(|n| serde_json::json!(n)),\n\n any::<String>().prop_map(Value::String),\n\n ];\n\n\n\n leaf.prop_recursive(\n\n 10, // 10 levels deep\n\n 256, // Maximum size of 256 nodes\n\n 10, // Up to 10 items per collection\n\n |inner| {\n\n prop_oneof![\n\n prop::collection::vec(inner.clone(), 0..10).prop_map(Value::Array),\n\n prop::collection::hash_map(any::<String>(), inner, 0..10)\n\n .prop_map(|map| serde_json::json!(map)),\n\n ]\n\n },\n\n )\n\n}\n", "file_path": "types/src/proptest_types.rs", "rank": 13, "score": 353989.9088274516 }, { "content": "/// Initializes struct logger from STRUCT_LOG_FILE env var.\n\n/// If STRUCT_LOG_FILE is set, STRUCT_LOG_TCP_ADDR will be ignored.\n\n/// Can only be called once\n\npub fn init_struct_log_from_env() -> Result<(), InitLoggerError> {\n\n if let Ok(file) = env::var(\"STRUCT_LOG_FILE\") {\n\n init_file_struct_log(file)\n\n } else if let Ok(address) = env::var(\"STRUCT_LOG_TCP_ADDR\") {\n\n init_tcp_struct_log(address)\n\n } else if let Ok(address) = env::var(\"STRUCT_LOG_UDP_ADDR\") {\n\n // Remove once all usages of STRUCT_LOG_UDP_ADDR are transferred over\n\n init_tcp_struct_log(address)\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "common/logger/src/struct_log.rs", "rank": 14, "score": 351294.64175913174 }, { "content": "/// Initialize struct logger sink that prints all structured logs to stdout\n\n/// Can only be called once\n\npub fn init_println_struct_log() -> Result<(), InitLoggerError> {\n\n let logger = PrintStructLog {};\n\n let logger = Box::leak(Box::new(logger));\n\n set_struct_logger(logger)\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum InitLoggerError {\n\n IoError(io::Error),\n\n StructLoggerAlreadySet,\n\n}\n\n\n", "file_path": "common/logger/src/struct_log.rs", "rank": 15, "score": 351293.46659706096 }, { "content": "pub fn arb_metadata_files() -> impl Strategy<Value = Vec<(ShellSafeName, TextLine)>> {\n\n vec(any::<(ShellSafeName, TextLine)>(), 0..10)\n\n}\n\n\n\nasync fn wait_for_dentries(path: &PathBuf, num_of_files: usize) {\n\n // sync\n\n tokio::process::Command::new(\"sync\")\n\n .stdin(Stdio::null())\n\n .stdout(Stdio::null())\n\n .stderr(Stdio::null())\n\n .spawn()\n\n .unwrap()\n\n .await\n\n .unwrap();\n\n\n\n // try every 10ms, for 10 seconds at most\n\n for n in 1..=1000usize {\n\n let output = tokio::process::Command::new(\"sh\")\n\n .arg(\"-c\")\n\n .arg(&format!(\n", "file_path": "storage/backup/backup-cli/src/storage/test_util.rs", "rank": 16, "score": 351230.6303589442 }, { "content": "fn display_list_of_items<T, I>(items: I, f: &mut fmt::Formatter) -> fmt::Result\n\nwhere\n\n T: Display,\n\n I: IntoIterator<Item = T>,\n\n{\n\n write!(f, \"[\")?;\n\n let mut items = items.into_iter();\n\n if let Some(x) = items.next() {\n\n write!(f, \"{}\", x)?;\n\n for x in items {\n\n write!(f, \", {}\", x)?;\n\n }\n\n }\n\n write!(f, \"]\")\n\n}\n\n\n\nimpl Display for ContainerRef {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Self::Local(c) => write!(f, \"({}, {})\", c.rc_count(), c),\n", "file_path": "language/move-vm/types/src/values/values_impl.rs", "rank": 17, "score": 351176.19297100697 }, { "content": "// Parse a use declaration:\n\n// UseDecl =\n\n// \"use\" <ModuleIdent> <UseAlias> \";\" |\n\n// \"use\" <ModuleIdent> :: <UseMember> \";\" |\n\n// \"use\" <ModuleIdent> :: \"{\" Comma<UseMember> \"}\" \";\"\n\nfn parse_use_decl<'input>(tokens: &mut Lexer<'input>) -> Result<Use, Error> {\n\n consume_token(tokens, Tok::Use)?;\n\n let ident = parse_module_ident(tokens)?;\n\n let alias_opt = parse_use_alias(tokens)?;\n\n let use_ = match (&alias_opt, tokens.peek()) {\n\n (None, Tok::ColonColon) => {\n\n consume_token(tokens, Tok::ColonColon)?;\n\n let sub_uses = match tokens.peek() {\n\n Tok::LBrace => parse_comma_list(\n\n tokens,\n\n Tok::LBrace,\n\n Tok::RBrace,\n\n parse_use_member,\n\n \"a module member alias\",\n\n )?,\n\n _ => vec![parse_use_member(tokens)?],\n\n };\n\n Use::Members(ident, sub_uses)\n\n }\n\n _ => Use::Module(ident, alias_opt.map(ModuleName)),\n\n };\n\n consume_token(tokens, Tok::Semicolon)?;\n\n Ok(use_)\n\n}\n\n\n", "file_path": "language/move-lang/src/parser/syntax.rs", "rank": 18, "score": 350685.0091518748 }, { "content": "pub fn assert_single_value(mut values: Values) -> Value {\n\n assert!(values.len() == 1);\n\n values.pop().unwrap()\n\n}\n\n\n\nimpl Value {\n\n pub fn is_ref(&self) -> bool {\n\n match self {\n\n Value::Ref(_) => true,\n\n Value::NonRef => false,\n\n }\n\n }\n\n\n\n pub fn as_vref(&self) -> Option<RefID> {\n\n match self {\n\n Value::Ref(id) => Some(*id),\n\n Value::NonRef => None,\n\n }\n\n }\n\n\n", "file_path": "language/move-lang/src/cfgir/borrows/state.rs", "rank": 19, "score": 347002.091735217 }, { "content": "pub fn to_x25519(edkey: Ed25519PublicKey) -> Result<x25519::PublicKey, Error> {\n\n x25519::PublicKey::from_ed25519_public_bytes(&edkey.to_bytes())\n\n .map_err(|e| Error::UnexpectedError(e.to_string()))\n\n}\n", "file_path": "config/management/src/storage.rs", "rank": 20, "score": 346817.6364769803 }, { "content": "pub fn read_u8(cursor: &mut Cursor<&[u8]>) -> Result<u8> {\n\n let mut buf = [0; 1];\n\n cursor.read_exact(&mut buf)?;\n\n Ok(buf[0])\n\n}\n\n\n", "file_path": "language/vm/src/file_format_common.rs", "rank": 21, "score": 346662.2103623365 }, { "content": "pub fn read_u32(cursor: &mut Cursor<&[u8]>) -> Result<u32> {\n\n let mut buf = [0; 4];\n\n cursor.read_exact(&mut buf)?;\n\n Ok(u32::from_le_bytes(buf))\n\n}\n\n\n", "file_path": "language/vm/src/file_format_common.rs", "rank": 22, "score": 346662.2103623365 }, { "content": "pub fn read_uleb128_as_u64(cursor: &mut Cursor<&[u8]>) -> Result<u64> {\n\n let mut value: u64 = 0;\n\n let mut shift = 0;\n\n while let Ok(byte) = read_u8(cursor) {\n\n let cur = (byte & 0x7f) as u64;\n\n if (cur << shift) >> shift != cur {\n\n bail!(\"invalid ULEB128 repr for usize\");\n\n }\n\n value |= cur << shift;\n\n\n\n if (byte & 0x80) == 0 {\n\n if shift > 0 && cur == 0 {\n\n bail!(\"invalid ULEB128 repr for usize\");\n\n }\n\n return Ok(value);\n\n }\n\n\n\n shift += 7;\n\n if shift > size_of::<u64>() * 8 {\n\n break;\n\n }\n\n }\n\n bail!(\"invalid ULEB128 repr for usize\");\n\n}\n\n\n", "file_path": "language/vm/src/file_format_common.rs", "rank": 23, "score": 343026.49043422495 }, { "content": "/// Record sample values for crypto types used by transactions.\n\nfn trace_crypto_values(tracer: &mut Tracer, samples: &mut Samples) -> Result<()> {\n\n let mut hasher = TestOnlyHasher::default();\n\n hasher.update(b\"Test message\");\n\n let hashed_message = hasher.finish();\n\n\n\n let message = TestLibraCrypto(\"Hello, World\".to_string());\n\n\n\n let mut rng: StdRng = SeedableRng::from_seed([0; 32]);\n\n let private_key = Ed25519PrivateKey::generate(&mut rng);\n\n let public_key: Ed25519PublicKey = (&private_key).into();\n\n let signature = private_key.sign(&message);\n\n\n\n tracer.trace_value(samples, &hashed_message)?;\n\n tracer.trace_value(samples, &public_key)?;\n\n tracer.trace_value::<MultiEd25519PublicKey>(samples, &public_key.into())?;\n\n tracer.trace_value(samples, &signature)?;\n\n tracer.trace_value::<MultiEd25519Signature>(samples, &signature.into())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "testsuite/generate-format/src/libra.rs", "rank": 24, "score": 339307.4028622044 }, { "content": "/// Record sample values for crypto types used by consensus.\n\nfn trace_crypto_values(tracer: &mut Tracer, samples: &mut Samples) -> Result<()> {\n\n let message = TestLibraCrypto(\"Hello, World\".to_string());\n\n\n\n let mut rng: StdRng = SeedableRng::from_seed([0; 32]);\n\n let private_key = Ed25519PrivateKey::generate(&mut rng);\n\n let public_key: Ed25519PublicKey = (&private_key).into();\n\n let signature = private_key.sign(&message);\n\n\n\n tracer.trace_value(samples, &public_key)?;\n\n tracer.trace_value(samples, &signature)?;\n\n tracer.trace_value::<MultiEd25519PublicKey>(samples, &public_key.into())?;\n\n tracer.trace_value::<MultiEd25519Signature>(samples, &signature.into())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "testsuite/generate-format/src/consensus.rs", "rank": 25, "score": 339307.4028622044 }, { "content": "/// Record sample values for crypto types used by network.\n\nfn trace_crypto_values(tracer: &mut Tracer, samples: &mut Samples) -> Result<()> {\n\n let mut rng: StdRng = SeedableRng::from_seed([0; 32]);\n\n let private_key = PrivateKey::generate(&mut rng);\n\n let public_key: PublicKey = (&private_key).into();\n\n\n\n tracer.trace_value(samples, &public_key)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "testsuite/generate-format/src/network.rs", "rank": 26, "score": 339307.4028622044 }, { "content": "fn lvalues<'a>(context: &mut Context, al: impl IntoIterator<Item = &'a N::LValue>) {\n\n al.into_iter().for_each(|a| lvalue(context, a))\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 27, "score": 336287.2442434189 }, { "content": "#[cfg(any(test, feature = \"fuzzing\"))]\n\npub fn uniform_keypair_strategy<Priv, Pub>() -> impl Strategy<Value = KeyPair<Priv, Pub>>\n\nwhere\n\n Pub: Serialize + for<'a> From<&'a Priv>,\n\n Priv: Serialize + Uniform,\n\n{\n\n // The no_shrink is because keypairs should be fixed -- shrinking would cause a different\n\n // keypair to be generated, which appears to not be very useful.\n\n any::<[u8; 32]>()\n\n .prop_map(|seed| {\n\n let mut rng = StdRng::from_seed(seed);\n\n KeyPair::<Priv, Pub>::generate(&mut rng)\n\n })\n\n .no_shrink()\n\n}\n\n\n\n/// This struct provides a means of testing signing and verification through\n\n/// LCS serialization and domain separation\n\n#[cfg(any(test, feature = \"fuzzing\"))]\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct TestLibraCrypto(pub String);\n", "file_path": "crypto/crypto/src/test_utils.rs", "rank": 28, "score": 335433.97630485124 }, { "content": "/// Serializes a `StructHandle`.\n\n///\n\n/// A `StructHandle` gets serialized as follows:\n\n/// - `StructHandle.module` as a ULEB128 (index into the `ModuleHandle` table)\n\n/// - `StructHandle.name` as a ULEB128 (index into the `IdentifierPool`)\n\n/// - `StructHandle.is_nominal_resource` as a 1 byte boolean (0 for false, 1 for true)\n\nfn serialize_struct_handle(binary: &mut BinaryData, struct_handle: &StructHandle) -> Result<()> {\n\n serialize_module_handle_index(binary, &struct_handle.module)?;\n\n serialize_identifier_index(binary, &struct_handle.name)?;\n\n serialize_nominal_resource_flag(binary, struct_handle.is_nominal_resource)?;\n\n serialize_kinds(binary, &struct_handle.type_parameters)\n\n}\n\n\n", "file_path": "language/vm/src/serializer.rs", "rank": 29, "score": 334552.10956440103 }, { "content": "/// Initializes struct logger sink that stream logs through TCP protocol.\n\n/// Can only be called once\n\npub fn init_tcp_struct_log(address: String) -> Result<(), InitLoggerError> {\n\n let logger = TCPStructLog::start_new(address).map_err(InitLoggerError::IoError)?;\n\n let logger = Box::leak(Box::new(logger));\n\n set_struct_logger(logger)\n\n}\n\n\n", "file_path": "common/logger/src/struct_log.rs", "rank": 30, "score": 331562.47621767 }, { "content": "pub fn increment_query(method: &str, result: &str) {\n\n QUERY_COUNTER.with_label_values(&[method, result]).inc();\n\n}\n\n\n", "file_path": "consensus/safety-rules/src/counters.rs", "rank": 31, "score": 331317.9229673058 }, { "content": "/// Initializes struct logger sink that writes to specified file.\n\n/// Can only be called once\n\npub fn init_file_struct_log(file_path: String) -> Result<(), InitLoggerError> {\n\n let logger = FileStructLog::start_new(file_path).map_err(InitLoggerError::IoError)?;\n\n let logger = Box::leak(Box::new(logger));\n\n set_struct_logger(logger)\n\n}\n\n\n", "file_path": "common/logger/src/struct_log.rs", "rank": 32, "score": 328170.0766104352 }, { "content": "/// Private helper method to generate a new ed25519 key pair using entropy from the OS.\n\nfn new_ed25519_key_pair() -> Result<(Ed25519PrivateKey, Ed25519PublicKey), Error> {\n\n let mut seed_rng = OsRng;\n\n let mut rng = rand::rngs::StdRng::from_seed(seed_rng.gen());\n\n let private_key = Ed25519PrivateKey::generate(&mut rng);\n\n let public_key = private_key.public_key();\n\n Ok((private_key, public_key))\n\n}\n\n\n", "file_path": "secure/storage/src/crypto_kv_storage.rs", "rank": 33, "score": 327379.349772568 }, { "content": "/// Processes a generic response returned by a vault request. This function simply just checks\n\n/// that the response was not an error and calls response.into_string() to clear the ureq stream.\n\npub fn process_generic_response(resp: Response) -> Result<(), Error> {\n\n if resp.ok() {\n\n // Explicitly clear buffer so the stream can be re-used.\n\n resp.into_string()?;\n\n Ok(())\n\n } else {\n\n Err(resp.into())\n\n }\n\n}\n\n\n", "file_path": "secure/storage/vault/src/lib.rs", "rank": 34, "score": 326687.9672185061 }, { "content": "/// Same as `to_bytes` but only return the size of the serialized bytes.\n\npub fn serialized_size<T>(value: &T) -> Result<usize>\n\nwhere\n\n T: ?Sized + Serialize,\n\n{\n\n let mut counter = WriteCounter(0);\n\n serialize_into(&mut counter, value)?;\n\n Ok(counter.0)\n\n}\n\n\n", "file_path": "common/lcs/src/ser.rs", "rank": 35, "score": 325808.3019172678 }, { "content": "/// Verify HashValues work correctly\n\nfn test_hash_value(storage: &mut Storage) {\n\n let hash_value_key = \"HashValue\";\n\n let hash_value_value = HashValue::random();\n\n\n\n storage\n\n .set(hash_value_key, Value::HashValue(hash_value_value))\n\n .unwrap();\n\n let out_value = storage\n\n .get(hash_value_key)\n\n .unwrap()\n\n .value\n\n .hash_value()\n\n .unwrap();\n\n assert_eq!(hash_value_value, out_value);\n\n}\n\n\n", "file_path": "secure/storage/src/tests/suite.rs", "rank": 36, "score": 324207.0098165259 }, { "content": "/// This produces the genesis block\n\npub fn genesis_strategy() -> impl Strategy<Value = Block> {\n\n Just(Block::make_genesis_block())\n\n}\n\n\n\nprop_compose! {\n\n /// This produces an unmoored block, with arbitrary parent & QC ancestor\n\n pub fn unmoored_block(ancestor_id_strategy: impl Strategy<Value = HashValue>)(\n\n ancestor_id in ancestor_id_strategy,\n\n )(\n\n block in new_proposal(\n\n ancestor_id,\n\n Round::arbitrary(),\n\n proptests::arb_signer(),\n\n certificate_for_genesis(),\n\n )\n\n ) -> Block {\n\n block\n\n }\n\n}\n\n\n", "file_path": "consensus/consensus-types/src/block_test_utils.rs", "rank": 37, "score": 323678.3330142484 }, { "content": "/// Offers the genesis block.\n\npub fn leaf_strategy() -> impl Strategy<Value = Block> {\n\n genesis_strategy().boxed()\n\n}\n\n\n\nprop_compose! {\n\n /// This produces a block with an invalid id (and therefore signature)\n\n /// given a valid block\n\n pub fn fake_id(block_strategy: impl Strategy<Value = Block>)\n\n (fake_id in HashValue::arbitrary(),\n\n block in block_strategy) -> Block {\n\n Block {\n\n id: fake_id,\n\n block_data: BlockData::new_proposal(\n\n block.payload().unwrap().clone(),\n\n block.author().unwrap(),\n\n block.round(),\n\n get_current_timestamp().as_micros() as u64,\n\n block.quorum_cert().clone(),\n\n ),\n\n signature: Some(block.signature().unwrap().clone()),\n", "file_path": "consensus/consensus-types/src/block_test_utils.rs", "rank": 38, "score": 323678.3330142484 }, { "content": "#[cfg(any(test, feature = \"fuzzing\"))]\n\npub fn arb_libranet_addr() -> impl Strategy<Value = NetworkAddress> {\n\n let arb_transport_protos = prop_oneof![\n\n any::<u16>().prop_map(|port| vec![Protocol::Memory(port)]),\n\n any::<(Ipv4Addr, u16)>()\n\n .prop_map(|(addr, port)| vec![Protocol::Ip4(addr), Protocol::Tcp(port)]),\n\n any::<(Ipv6Addr, u16)>()\n\n .prop_map(|(addr, port)| vec![Protocol::Ip6(addr), Protocol::Tcp(port)]),\n\n any::<(DnsName, u16)>()\n\n .prop_map(|(name, port)| vec![Protocol::Dns(name), Protocol::Tcp(port)]),\n\n any::<(DnsName, u16)>()\n\n .prop_map(|(name, port)| vec![Protocol::Dns4(name), Protocol::Tcp(port)]),\n\n any::<(DnsName, u16)>()\n\n .prop_map(|(name, port)| vec![Protocol::Dns6(name), Protocol::Tcp(port)]),\n\n ];\n\n let arb_libranet_protos = any::<(x25519::PublicKey, u8)>()\n\n .prop_map(|(pubkey, hs)| vec![Protocol::NoiseIK(pubkey), Protocol::Handshake(hs)]);\n\n\n\n (arb_transport_protos, arb_libranet_protos).prop_map(\n\n |(mut transport_protos, mut libranet_protos)| {\n\n transport_protos.append(&mut libranet_protos);\n", "file_path": "network/network-address/src/lib.rs", "rank": 39, "score": 323678.3330142484 }, { "content": "fn serialize_struct_def_index(binary: &mut BinaryData, idx: &StructDefinitionIndex) -> Result<()> {\n\n write_as_uleb128(binary, idx.0, STRUCT_DEF_INDEX_MAX)\n\n}\n\n\n", "file_path": "language/vm/src/serializer.rs", "rank": 40, "score": 323428.5883573538 }, { "content": "fn serialize_struct_handle_index(binary: &mut BinaryData, idx: &StructHandleIndex) -> Result<()> {\n\n write_as_uleb128(binary, idx.0, STRUCT_HANDLE_INDEX_MAX)\n\n}\n\n\n", "file_path": "language/vm/src/serializer.rs", "rank": 41, "score": 323428.5883573538 }, { "content": "/// Processes the response returned by a transit key restore vault request.\n\npub fn process_transit_restore_response(resp: Response) -> Result<(), Error> {\n\n match resp.status() {\n\n 204 => {\n\n // Explicitly clear buffer so the stream can be re-used.\n\n resp.into_string()?;\n\n Ok(())\n\n }\n\n _ => Err(resp.into()),\n\n }\n\n}\n\n\n", "file_path": "secure/storage/vault/src/lib.rs", "rank": 42, "score": 322782.95526304765 }, { "content": "/// generate_corpus produces an arbitrary transaction to submit to JSON RPC service\n\npub fn generate_corpus(gen: &mut ValueGenerator) -> Vec<u8> {\n\n // use proptest to generate a SignedTransaction\n\n let txn = gen.generate(proptest::arbitrary::any::<SignedTransaction>());\n\n let payload = hex::encode(lcs::to_bytes(&txn).unwrap());\n\n let request =\n\n serde_json::json!({\"jsonrpc\": \"2.0\", \"method\": \"submit\", \"params\": [payload], \"id\": 1});\n\n serde_json::to_vec(&request).expect(\"failed to convert JSON to byte array\")\n\n}\n\n\n", "file_path": "json-rpc/src/fuzzing.rs", "rank": 43, "score": 321538.84970834915 }, { "content": "/// Serialize the given data structure as a `Vec<u8>` of LCS.\n\n///\n\n/// Serialization can fail if `T`'s implementation of `Serialize` decides to\n\n/// fail, if `T` contains sequences which are longer than `MAX_SEQUENCE_LENGTH`,\n\n/// or if `T` attempts to serialize an unsupported datatype such as a f32,\n\n/// f64, or char.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use libra_canonical_serialization::to_bytes;\n\n/// use serde::Serialize;\n\n///\n\n/// #[derive(Serialize)]\n\n/// struct Ip([u8; 4]);\n\n///\n\n/// #[derive(Serialize)]\n\n/// struct Port(u16);\n\n///\n\n/// #[derive(Serialize)]\n\n/// struct Service {\n\n/// ip: Ip,\n\n/// port: Vec<Port>,\n\n/// connection_max: Option<u32>,\n\n/// enabled: bool,\n\n/// }\n\n///\n\n/// let service = Service {\n\n/// ip: Ip([192, 168, 1, 1]),\n\n/// port: vec![Port(8001), Port(8002), Port(8003)],\n\n/// connection_max: Some(5000),\n\n/// enabled: false,\n\n/// };\n\n///\n\n/// let bytes = to_bytes(&service).unwrap();\n\n/// let expected = vec![\n\n/// 0xc0, 0xa8, 0x01, 0x01, 0x03, 0x41, 0x1f, 0x42,\n\n/// 0x1f, 0x43, 0x1f, 0x01, 0x88, 0x13, 0x00, 0x00,\n\n/// 0x00,\n\n/// ];\n\n/// assert_eq!(bytes, expected);\n\n/// ```\n\npub fn to_bytes<T>(value: &T) -> Result<Vec<u8>>\n\nwhere\n\n T: ?Sized + Serialize,\n\n{\n\n let mut output = Vec::new();\n\n serialize_into(&mut output, value)?;\n\n Ok(output)\n\n}\n\n\n", "file_path": "common/lcs/src/ser.rs", "rank": 44, "score": 320883.02686023194 }, { "content": "/// This test tries to get and set non-existent keys in storage and asserts that the correct\n\n/// errors are returned on these operations.\n\nfn test_get_non_existent(storage: &mut Storage) {\n\n assert_eq!(\n\n storage.get(CRYPTO_KEY).unwrap_err(),\n\n Error::KeyNotSet(CRYPTO_KEY.to_string())\n\n );\n\n assert_eq!(\n\n storage.get(U64_KEY).unwrap_err(),\n\n Error::KeyNotSet(U64_KEY.to_string())\n\n );\n\n}\n\n\n", "file_path": "secure/storage/src/tests/suite.rs", "rank": 45, "score": 320481.35134340124 }, { "content": "// This is exact copy of similar function in log crate\n\n/// Sets structured logger\n\npub fn set_struct_logger(logger: &'static dyn StructLogSink) -> Result<(), InitLoggerError> {\n\n unsafe {\n\n match STRUCT_LOGGER_STATE.compare_and_swap(UNINITIALIZED, INITIALIZING, Ordering::SeqCst) {\n\n UNINITIALIZED => {\n\n STRUCT_LOGGER = logger;\n\n STRUCT_LOGGER_STATE.store(INITIALIZED, Ordering::SeqCst);\n\n Ok(())\n\n }\n\n INITIALIZING => {\n\n while STRUCT_LOGGER_STATE.load(Ordering::SeqCst) == INITIALIZING {}\n\n Err(InitLoggerError::StructLoggerAlreadySet)\n\n }\n\n _ => Err(InitLoggerError::StructLoggerAlreadySet),\n\n }\n\n }\n\n}\n\n\n\nstatic STRUCT_LOG_LEVEL: Lazy<log::Level> = Lazy::new(|| {\n\n let level = env::var(\"STRUCT_LOG_LEVEL\").unwrap_or_else(|_| \"debug\".to_string());\n\n log::Level::from_str(&level).expect(\"Failed to parse log level\")\n\n});\n\n\n", "file_path": "common/logger/src/struct_log.rs", "rank": 46, "score": 319424.4359985348 }, { "content": "/// Output transaction builders in Rust for the given ABIs.\n\n/// If `local_types` is true, we generate a file suitable for the Libra codebase itself\n\n/// rather than using serde-generated, standalone definitions.\n\npub fn output(out: &mut dyn Write, abis: &[ScriptABI], local_types: bool) -> Result<()> {\n\n let mut emitter = RustEmitter {\n\n out: IndentedWriter::new(out, IndentConfig::Space(4)),\n\n local_types,\n\n };\n\n\n\n emitter.output_preamble()?;\n\n emitter.output_script_call_enum_with_imports(abis)?;\n\n\n\n writeln!(emitter.out, \"\\nimpl ScriptCall {{\")?;\n\n emitter.out.indent();\n\n emitter.output_encode_method(abis)?;\n\n emitter.output_decode_method()?;\n\n emitter.out.unindent();\n\n writeln!(emitter.out, \"\\n}}\")?;\n\n\n\n for abi in abis {\n\n emitter.output_script_encoder_function(abi)?;\n\n }\n\n\n", "file_path": "language/transaction-builder/generator/src/rust.rs", "rank": 47, "score": 319226.4159222157 }, { "content": "// Generate some random, well-formed, unsigned-varint length-prefixed byte arrays\n\n// for our fuzzer corpus to act as serialized inbound rpc calls.\n\npub fn generate_corpus(gen: &mut ValueGenerator) -> Vec<u8> {\n\n let small_data_strat = vec(any::<u8>(), 0..MAX_SMALL_MSG_BYTES);\n\n let medium_data_strat = vec(any::<u8>(), 0..MAX_MEDIUM_MSG_BYTES);\n\n\n\n // bias corpus generation to prefer small message sizes\n\n let data_strat = prop_oneof![small_data_strat, medium_data_strat];\n\n\n\n let length_prefixed_data_strat = data_strat.prop_map(|data| {\n\n let max_len = data.len() + MAX_UVI_PREFIX_BYTES;\n\n let mut buf = bytes::BytesMut::with_capacity(max_len);\n\n let mut codec = LengthDelimitedCodec::new();\n\n codec\n\n .encode(bytes::Bytes::from(data), &mut buf)\n\n .expect(\"Failed to create uvi-prefixed data for corpus\");\n\n buf.freeze().to_vec()\n\n });\n\n\n\n gen.generate(length_prefixed_data_strat)\n\n}\n\n\n", "file_path": "network/src/protocols/rpc/fuzzing.rs", "rank": 48, "score": 317625.03046099516 }, { "content": "// Parse a value:\n\n// Value =\n\n// <Address>\n\n// | \"true\"\n\n// | \"false\"\n\n// | <U8Value>\n\n// | <U64Value>\n\n// | <U128Value>\n\n// | <ByteString>\n\nfn parse_value<'input>(tokens: &mut Lexer<'input>) -> Result<Value, Error> {\n\n let start_loc = tokens.start_loc();\n\n let val = match tokens.peek() {\n\n Tok::AddressValue => {\n\n let addr = parse_address(tokens)?;\n\n Value_::Address(addr)\n\n }\n\n Tok::True => {\n\n tokens.advance()?;\n\n Value_::Bool(true)\n\n }\n\n Tok::False => {\n\n tokens.advance()?;\n\n Value_::Bool(false)\n\n }\n\n Tok::U8Value => {\n\n let mut s = tokens.content();\n\n if s.ends_with(\"u8\") {\n\n s = &s[..s.len() - 2]\n\n }\n", "file_path": "language/move-lang/src/parser/syntax.rs", "rank": 49, "score": 317402.3363453499 }, { "content": "/// Processes the response returned by a seal-status() vault request.\n\npub fn process_unsealed_response(resp: Response) -> Result<bool, Error> {\n\n if resp.ok() {\n\n let resp: SealStatusResponse = serde_json::from_str(&resp.into_string()?)?;\n\n Ok(!resp.sealed)\n\n } else {\n\n Err(resp.into())\n\n }\n\n}\n\n\n\n/// Key backup / restore format\n\n/// Example:\n\n/// {\n\n/// \"policy\":{\n\n/// \"name\":\"local_owner_key__consensus\",\n\n/// \"keys\":{\n\n/// \"1\":{\n\n/// \"key\":\"C3R5O8uAfrgv7sJmCMSLEp1R2HmkZtwdfGT/xVvZVvgCGo6TkWga/ojplJFMM+i2805X3CV7IRyNLCSJcr4AqQ==\",\n\n/// \"hmac_key\":null,\n\n/// \"time\":\"2020-05-29T06:27:38.1233515Z\",\n\n/// \"ec_x\":null,\n", "file_path": "secure/storage/vault/src/lib.rs", "rank": 50, "score": 317364.210464768 }, { "content": "/// This test stores different types of values into storage, retrieves them, and asserts\n\n/// that the value unwrap functions return an unexpected type error on an incorrect unwrap.\n\nfn test_verify_incorrect_value_types(storage: &mut Storage) {\n\n let crypto_value = Value::Ed25519PrivateKey(Ed25519PrivateKey::generate_for_testing());\n\n let u64_value = Value::U64(10);\n\n\n\n storage.set(U64_KEY, u64_value).unwrap();\n\n storage.set(CRYPTO_KEY, crypto_value).unwrap();\n\n\n\n assert_eq!(\n\n storage\n\n .get(U64_KEY)\n\n .unwrap()\n\n .value\n\n .ed25519_private_key()\n\n .unwrap_err(),\n\n Error::UnexpectedValueType\n\n );\n\n assert_eq!(\n\n storage.get(CRYPTO_KEY).unwrap().value.u64().unwrap_err(),\n\n Error::UnexpectedValueType\n\n );\n\n}\n\n\n", "file_path": "secure/storage/src/tests/suite.rs", "rank": 51, "score": 316634.17559790844 }, { "content": "pub fn state_sync_msg_strategy() -> impl Strategy<Value = StateSynchronizerMsg> {\n\n prop_oneof![\n\n (any::<GetChunkRequest>()).prop_map(|get_chunk_request| {\n\n StateSynchronizerMsg::GetChunkRequest(Box::new(get_chunk_request))\n\n }),\n\n (any::<GetChunkResponse>()).prop_map(|get_chunk_response| {\n\n StateSynchronizerMsg::GetChunkResponse(Box::new(get_chunk_response))\n\n })\n\n ]\n\n}\n\n\n\nimpl Arbitrary for GetChunkRequest {\n\n type Parameters = ();\n\n fn arbitrary_with(_args: ()) -> Self::Strategy {\n\n (\n\n any::<u64>(),\n\n any::<u64>(),\n\n any::<u64>(),\n\n any::<TargetType>(),\n\n )\n", "file_path": "state-synchronizer/src/tests/fuzzing.rs", "rank": 52, "score": 316268.33608525875 }, { "content": "/// Serializes a `Vec<StructDefinitionIndex>`.\n\nfn serialize_acquires(binary: &mut BinaryData, indices: &[StructDefinitionIndex]) -> Result<()> {\n\n serialize_acquires_count(binary, indices.len())?;\n\n for def_idx in indices {\n\n serialize_struct_def_index(binary, def_idx)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "language/vm/src/serializer.rs", "rank": 53, "score": 314697.53532942507 }, { "content": "/// Output a header-only library providing C++ transaction builders for the given ABIs.\n\npub fn output(out: &mut dyn Write, abis: &[ScriptABI], namespace: Option<&str>) -> Result<()> {\n\n let mut emitter = CppEmitter {\n\n out: IndentedWriter::new(out, IndentConfig::Space(4)),\n\n namespace,\n\n inlined_definitions: true,\n\n };\n\n emitter.output_preamble()?;\n\n emitter.output_open_namespace()?;\n\n emitter.output_using_namespaces()?;\n\n for abi in abis {\n\n emitter.output_builder_definition(abi)?;\n\n }\n\n emitter.output_close_namespace()\n\n}\n\n\n", "file_path": "language/transaction-builder/generator/src/cpp.rs", "rank": 54, "score": 314434.9136242317 }, { "content": "/// Processes the response returned by a token renew vault request.\n\npub fn process_token_renew_response(resp: Response) -> Result<u32, Error> {\n\n if resp.ok() {\n\n let resp: RenewTokenResponse = serde_json::from_str(&resp.into_string()?)?;\n\n Ok(resp.auth.lease_duration)\n\n } else {\n\n Err(resp.into())\n\n }\n\n}\n\n\n", "file_path": "secure/storage/vault/src/lib.rs", "rank": 55, "score": 313601.09148692794 }, { "content": "/// Processes the response returned by a policy read vault request.\n\npub fn process_policy_read_response(resp: Response) -> Result<Policy, Error> {\n\n match resp.status() {\n\n 200 => Ok(Policy::try_from(resp.into_json()?)?),\n\n _ => Err(resp.into()),\n\n }\n\n}\n\n\n", "file_path": "secure/storage/vault/src/lib.rs", "rank": 56, "score": 313601.091486928 }, { "content": "/// Processes the response returned by a token create vault request.\n\npub fn process_token_create_response(resp: Response) -> Result<String, Error> {\n\n if resp.ok() {\n\n let resp: CreateTokenResponse = serde_json::from_str(&resp.into_string()?)?;\n\n Ok(resp.auth.client_token)\n\n } else {\n\n Err(resp.into())\n\n }\n\n}\n\n\n", "file_path": "secure/storage/vault/src/lib.rs", "rank": 57, "score": 313601.09148692794 }, { "content": "/// This test creates a new named key pair and attempts to get a non-existent version of the public\n\n/// and private keys. As such, these calls should fail.\n\nfn test_create_and_get_non_existent_version(storage: &mut Storage) {\n\n // Create new named key pair\n\n let _ = storage.create_key(CRYPTO_NAME).unwrap();\n\n\n\n // Get a non-existent version of the new key pair and verify failure\n\n let non_existent_public_key = Ed25519PrivateKey::generate_for_testing().public_key();\n\n assert!(\n\n storage.export_private_key_for_version(CRYPTO_NAME, non_existent_public_key).is_err(),\n\n \"We have tried to retrieve a non-existent private key version -- the call should have failed!\",\n\n );\n\n}\n\n\n", "file_path": "secure/storage/src/tests/suite.rs", "rank": 58, "score": 313134.846049562 }, { "content": "/// Given an operation retries it successfully sleeping everytime it fails\n\n/// If the operation succeeds before the iterator runs out, it returns success\n\npub fn retry<I, O, T, E>(iterable: I, mut operation: O) -> Result<T, E>\n\nwhere\n\n I: IntoIterator<Item = Duration>,\n\n O: FnMut() -> Result<T, E>,\n\n{\n\n let mut iterator = iterable.into_iter();\n\n loop {\n\n match operation() {\n\n Ok(value) => return Ok(value),\n\n Err(err) => {\n\n if let Some(delay) = iterator.next() {\n\n thread::sleep(delay);\n\n } else {\n\n return Err(err);\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "common/retrier/src/lib.rs", "rank": 59, "score": 312773.74883771956 }, { "content": "/// Create boogie type value for a struct with given type actuals.\n\npub fn boogie_struct_type_value(\n\n env: &GlobalEnv,\n\n module_id: ModuleId,\n\n struct_id: StructId,\n\n args: &[Type],\n\n) -> String {\n\n let struct_env = env.get_module(module_id).into_struct(struct_id);\n\n format!(\n\n \"{}_type_value({})\",\n\n boogie_struct_name(&struct_env),\n\n boogie_type_values(env, args)\n\n )\n\n}\n\n\n", "file_path": "language/move-prover/src/boogie_helpers.rs", "rank": 60, "score": 312163.35874288605 }, { "content": "fn parse_one<'a, T>(args: &mut impl Iterator<Item = &'a str>) -> Result<T, ParseError>\n\nwhere\n\n T: FromStr,\n\n T::Err: Into<ParseError>,\n\n{\n\n let next_arg = args.next().ok_or(ParseError::UnexpectedEnd)?;\n\n next_arg.parse().map_err(Into::into)\n\n}\n\n\n\nimpl Protocol {\n\n fn parse<'a>(\n\n protocol_type: &str,\n\n args: &mut impl Iterator<Item = &'a str>,\n\n ) -> Result<Protocol, ParseError> {\n\n let protocol = match protocol_type {\n\n \"ip4\" => Protocol::Ip4(parse_one(args)?),\n\n \"ip6\" => Protocol::Ip6(parse_one(args)?),\n\n \"dns\" => Protocol::Dns(parse_one(args)?),\n\n \"dns4\" => Protocol::Dns4(parse_one(args)?),\n\n \"dns6\" => Protocol::Dns6(parse_one(args)?),\n", "file_path": "network/network-address/src/lib.rs", "rank": 61, "score": 311056.2346743413 }, { "content": "/// Helper function to deserialize versions from above encoding.\n\nfn deserialize_u64_varint<T>(reader: &mut T) -> Result<u64>\n\nwhere\n\n T: Read,\n\n{\n\n let mut num = 0u64;\n\n for i in 0..8 {\n\n let byte = reader.read_u8()?;\n\n let more = (byte & 0x80) != 0;\n\n num |= u64::from(byte & 0x7f) << (i * 7);\n\n if !more {\n\n return Ok(num);\n\n }\n\n }\n\n // Last byte is encoded as is.\n\n let byte = reader.read_u8()?;\n\n num |= u64::from(byte) << 56;\n\n Ok(num)\n\n}\n", "file_path": "storage/jellyfish-merkle/src/node_type/mod.rs", "rank": 62, "score": 310658.90032899077 }, { "content": "// Parse a struct definition:\n\n// StructDefinition =\n\n// <DocComments> \"resource\"? \"struct\" <StructDefName> \"{\" Comma<FieldAnnot> \"}\"\n\n// | <DocComments> \"native\" \"resource\"? \"struct\" <StructDefName> \";\"\n\n// StructDefName =\n\n// <Identifier> <OptionalTypeParameters>\n\nfn parse_struct_definition<'input>(tokens: &mut Lexer<'input>) -> Result<StructDefinition, Error> {\n\n tokens.match_doc_comments();\n\n let start_loc = tokens.start_loc();\n\n\n\n // Record the source location of the \"native\" keyword (if there is one).\n\n let native_opt = consume_optional_token_with_loc(tokens, Tok::Native)?;\n\n\n\n // Record the source location of the \"resource\" keyword (if there is one).\n\n let resource_opt = consume_optional_token_with_loc(tokens, Tok::Resource)?;\n\n\n\n consume_token(tokens, Tok::Struct)?;\n\n\n\n // <StructDefName>\n\n let name = StructName(parse_identifier(tokens)?);\n\n let type_parameters = parse_optional_type_parameters(tokens)?;\n\n\n\n let fields = match native_opt {\n\n Some(loc) => {\n\n consume_token(tokens, Tok::Semicolon)?;\n\n StructFields::Native(loc)\n", "file_path": "language/move-lang/src/parser/syntax.rs", "rank": 63, "score": 310650.45605896047 }, { "content": "/// Processes the response returned by a transit key sign vault request.\n\npub fn process_transit_sign_response(resp: Response) -> Result<Ed25519Signature, Error> {\n\n if resp.ok() {\n\n let signature: SignatureResponse = serde_json::from_str(&resp.into_string()?)?;\n\n let signature = &signature.data.signature;\n\n let signature_pieces: Vec<_> = signature.split(':').collect();\n\n let signature = signature_pieces\n\n .get(2)\n\n .ok_or_else(|| Error::SerializationError(signature.into()))?;\n\n Ok(Ed25519Signature::try_from(\n\n base64::decode(&signature)?.as_slice(),\n\n )?)\n\n } else {\n\n Err(resp.into())\n\n }\n\n}\n\n\n", "file_path": "secure/storage/vault/src/lib.rs", "rank": 64, "score": 309972.6554236547 }, { "content": "pub fn verify(errors: &mut Errors, modules: &mut UniqueMap<ModuleIdent, N::ModuleDefinition>) {\n\n let imm_modules = &modules;\n\n let context = &mut Context::new(imm_modules);\n\n module_defs(context, modules);\n\n let graph = &context.dependency_graph();\n\n match petgraph_toposort(graph, None) {\n\n Err(cycle_node) => {\n\n let cycle_ident = cycle_node.node_id().clone();\n\n let error = cycle_error(context, cycle_ident);\n\n errors.push(error)\n\n }\n\n Ok(ordered_ids) => {\n\n let ordered_ids = ordered_ids.into_iter().cloned().collect::<Vec<_>>();\n\n for (order, mident) in ordered_ids.into_iter().rev().enumerate() {\n\n modules.get_mut(&mident).unwrap().dependency_order = order\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 65, "score": 309741.4990836579 }, { "content": "fn compile_constant(_context: &mut Context, ty: Type, value: MoveValue) -> Result<Constant> {\n\n fn type_layout(ty: Type) -> Result<MoveTypeLayout> {\n\n Ok(match ty {\n\n Type::Address => MoveTypeLayout::Address,\n\n Type::Signer => MoveTypeLayout::Signer,\n\n Type::U8 => MoveTypeLayout::U8,\n\n Type::U64 => MoveTypeLayout::U64,\n\n Type::U128 => MoveTypeLayout::U128,\n\n Type::Bool => MoveTypeLayout::Bool,\n\n Type::Vector(inner_type) => MoveTypeLayout::Vector(Box::new(type_layout(*inner_type)?)),\n\n Type::Reference(_, _) => bail!(\"References are not supported in constant type layouts\"),\n\n Type::TypeParameter(_) => {\n\n bail!(\"Type parameters are not supported in constant type layouts\")\n\n }\n\n Type::Struct(_ident, _tys) => {\n\n bail!(\"TODO Structs are not *yet* supported in constant type layouts\")\n\n }\n\n })\n\n }\n\n\n\n Constant::serialize_constant(&type_layout(ty)?, &value)\n\n .ok_or_else(|| format_err!(\"Could not serialize constant\"))\n\n}\n\n\n\n//**************************************************************************************************\n\n// Bytecode\n\n//**************************************************************************************************\n\n\n", "file_path": "language/compiler/ir-to-bytecode/src/compiler.rs", "rank": 66, "score": 306543.30712512194 }, { "content": "#[cfg(any(test, feature = \"fuzzing\"))]\n\npub fn keypair_strategy() -> impl Strategy<Value = KeyPair<PrivateKey, PublicKey>> {\n\n test_utils::uniform_keypair_strategy::<PrivateKey, PublicKey>()\n\n}\n", "file_path": "crypto/crypto/src/x25519.rs", "rank": 67, "score": 305878.7529359901 }, { "content": "pub fn generate_corpus(gen: &mut libra_proptest_helpers::ValueGenerator) -> Vec<u8> {\n\n let (init_msg, resp_msg) = generate_first_two_messages();\n\n // choose a random one\n\n let strategy = proptest::arbitrary::any::<bool>();\n\n if gen.generate(strategy) {\n\n init_msg\n\n } else {\n\n resp_msg\n\n }\n\n}\n\n\n\n//\n\n// Fuzzing\n\n// =======\n\n//\n\n// - fuzz_initiator: fuzzes the second message of the handshake, received by the initiator.\n\n// - fuzz_responder: fuzzes the first message of the handshake, received by the responder.\n\n//\n\n\n", "file_path": "network/src/noise/fuzzing.rs", "rank": 68, "score": 305288.9825269835 }, { "content": "/// Processes the response returned by a secret list vault request.\n\npub fn process_secret_list_response(resp: Response) -> Result<Vec<String>, Error> {\n\n match resp.status() {\n\n 200 => {\n\n let resp: ReadSecretListResponse = serde_json::from_str(&resp.into_string()?)?;\n\n Ok(resp.data.keys)\n\n }\n\n // There are no secrets.\n\n 404 => {\n\n // Explicitly clear buffer so the stream can be re-used.\n\n resp.into_string()?;\n\n Ok(vec![])\n\n }\n\n _ => Err(resp.into()),\n\n }\n\n}\n\n\n", "file_path": "secure/storage/vault/src/lib.rs", "rank": 69, "score": 305036.25155209674 }, { "content": "/// Processes the response returned by a policy list vault request.\n\npub fn process_policy_list_response(resp: Response) -> Result<Vec<String>, Error> {\n\n match resp.status() {\n\n 200 => {\n\n let policies: ListPoliciesResponse = serde_json::from_str(&resp.into_string()?)?;\n\n Ok(policies.policies)\n\n }\n\n // There are no policies.\n\n 404 => {\n\n // Explicitly clear buffer so the stream can be re-used.\n\n resp.into_string()?;\n\n Ok(vec![])\n\n }\n\n _ => Err(resp.into()),\n\n }\n\n}\n\n\n", "file_path": "secure/storage/vault/src/lib.rs", "rank": 70, "score": 305036.25155209674 }, { "content": "/// Processes the response returned by a transit key list vault request.\n\npub fn process_transit_list_response(resp: Response) -> Result<Vec<String>, Error> {\n\n match resp.status() {\n\n 200 => {\n\n let list_keys: ListKeysResponse = serde_json::from_str(&resp.into_string()?)?;\n\n Ok(list_keys.data.keys)\n\n }\n\n 404 => {\n\n // Explicitly clear buffer so the stream can be re-used.\n\n resp.into_string()?;\n\n Err(Error::NotFound(\"transit/\".into(), \"keys\".into()))\n\n }\n\n _ => Err(resp.into()),\n\n }\n\n}\n\n\n", "file_path": "secure/storage/vault/src/lib.rs", "rank": 71, "score": 305036.1983536165 }, { "content": "/// Processes the response returned by a transit key create vault request.\n\npub fn process_transit_create_response(name: &str, resp: Response) -> Result<(), Error> {\n\n match resp.status() {\n\n 200 | 204 => {\n\n // Explicitly clear buffer so the stream can be re-used.\n\n resp.into_string()?;\n\n Ok(())\n\n }\n\n 404 => {\n\n // Explicitly clear buffer so the stream can be re-used.\n\n resp.into_string()?;\n\n Err(Error::NotFound(\"transit/\".into(), name.into()))\n\n }\n\n _ => Err(resp.into()),\n\n }\n\n}\n\n\n", "file_path": "secure/storage/vault/src/lib.rs", "rank": 72, "score": 305036.1983536165 }, { "content": "/// Choose a proposer that is going to be the single leader (relevant for a mock fixed proposer\n\n/// election only).\n\npub fn choose_leader(peers: Vec<Author>) -> Author {\n\n // As it is just a tmp hack function, pick the min PeerId to be a proposer.\n\n peers.into_iter().min().expect(\"No trusted peers found!\")\n\n}\n\n\n\nimpl RotatingProposer {\n\n /// With only one proposer in the vector, it behaves the same as a fixed proposer strategy.\n\n pub fn new(proposers: Vec<Author>, contiguous_rounds: u32) -> Self {\n\n Self {\n\n proposers,\n\n contiguous_rounds,\n\n }\n\n }\n\n}\n\n\n\nimpl ProposerElection for RotatingProposer {\n\n fn get_valid_proposer(&self, round: Round) -> Author {\n\n self.proposers\n\n [((round / u64::from(self.contiguous_rounds)) % self.proposers.len() as u64) as usize]\n\n }\n\n}\n", "file_path": "consensus/src/liveness/rotating_proposer_election.rs", "rank": 73, "score": 303846.40661384375 }, { "content": "fn add_node_batch(batch: &mut SchemaBatch, node_batch: &NodeBatch) -> Result<()> {\n\n node_batch\n\n .iter()\n\n .map(|(node_key, node)| batch.put::<JellyfishMerkleNodeSchema>(node_key, node))\n\n .collect::<Result<Vec<_>>>()?;\n\n Ok(())\n\n}\n", "file_path": "storage/libradb/src/state_store/mod.rs", "rank": 74, "score": 303774.0350697516 }, { "content": "// TODO rework parsing modifiers\n\nfn is_struct_definition<'input>(tokens: &mut Lexer<'input>) -> Result<bool, Error> {\n\n let mut t = tokens.peek();\n\n if t == Tok::Native {\n\n t = tokens.lookahead()?;\n\n }\n\n Ok(t == Tok::Struct || t == Tok::Resource)\n\n}\n\n\n", "file_path": "language/move-lang/src/parser/syntax.rs", "rank": 75, "score": 301552.1754914393 }, { "content": "/// Computes the key immediately after `key`.\n\npub fn plus_one(key: HashValue) -> HashValue {\n\n assert_ne!(key, HashValue::new([0xff; HashValue::LENGTH]));\n\n\n\n let mut buf = key.to_vec();\n\n for i in (0..HashValue::LENGTH).rev() {\n\n if buf[i] == 255 {\n\n buf[i] = 0;\n\n } else {\n\n buf[i] += 1;\n\n break;\n\n }\n\n }\n\n HashValue::from_slice(&buf).unwrap()\n\n}\n\n\n", "file_path": "storage/jellyfish-merkle/src/test_helper.rs", "rank": 76, "score": 300710.624535045 }, { "content": "/// Advance both iterators if their next nibbles are the same until either reaches the end or\n\n/// the find a mismatch. Return the number of matched nibbles.\n\npub fn skip_common_prefix<'a, 'b, I1: 'a, I2: 'b>(x: &'a mut I1, y: &mut I2) -> usize\n\nwhere\n\n I1: Iterator + Peekable,\n\n I2: Iterator + Peekable,\n\n <I1 as Iterator>::Item: std::cmp::PartialEq<<I2 as Iterator>::Item>,\n\n{\n\n let mut count = 0;\n\n loop {\n\n let x_peek = x.peek();\n\n let y_peek = y.peek();\n\n if x_peek.is_none()\n\n || y_peek.is_none()\n\n || x_peek.expect(\"cannot be none\") != y_peek.expect(\"cannot be none\")\n\n {\n\n break;\n\n }\n\n count += 1;\n\n x.next();\n\n y.next();\n\n }\n\n count\n\n}\n", "file_path": "storage/jellyfish-merkle/src/nibble_path/mod.rs", "rank": 77, "score": 299829.3595811399 }, { "content": "fn check_restricted_self_name(context: &mut Context, case: &str, n: &Name) -> Result<(), ()> {\n\n check_restricted_name(context, case, n, ModuleName::SELF_NAME)\n\n}\n\n\n", "file_path": "language/move-lang/src/expansion/translate.rs", "rank": 78, "score": 298212.42291686434 }, { "content": "// Parse a byte string:\n\n// ByteString = <ByteStringValue>\n\nfn parse_byte_string<'input>(tokens: &mut Lexer<'input>) -> Result<Value_, Error> {\n\n if tokens.peek() != Tok::ByteStringValue {\n\n return Err(unexpected_token_error(tokens, \"a byte string value\"));\n\n }\n\n let s = tokens.content();\n\n let text = s[2..s.len() - 1].to_owned();\n\n let value_ = if s.starts_with(\"x\\\"\") {\n\n Value_::HexString(text)\n\n } else {\n\n assert!(s.starts_with(\"b\\\"\"));\n\n Value_::ByteString(text)\n\n };\n\n tokens.advance()?;\n\n Ok(value_)\n\n}\n\n\n", "file_path": "language/move-lang/src/parser/syntax.rs", "rank": 79, "score": 298102.06728219404 }, { "content": "pub fn test_execution_with_storage_impl() -> Arc<LibraDB> {\n\n let (mut config, genesis_key) = config_builder::test_config();\n\n let (libradb, db, mut executor) = create_db_and_executor(&config);\n\n let parent_block_id = executor.committed_block_id();\n\n let signer = extract_signer(&mut config);\n\n\n\n let seed = [1u8; 32];\n\n // TEST_SEED is also used to generate a random validator set in get_test_config. Each account\n\n // in this random validator set gets created in genesis. If one of {account1, account2,\n\n // account3} already exists in genesis, the code below will fail.\n\n assert!(seed != TEST_SEED);\n\n let mut rng = ::rand::rngs::StdRng::from_seed(seed);\n\n\n\n let privkey1 = Ed25519PrivateKey::generate(&mut rng);\n\n let pubkey1 = privkey1.public_key();\n\n let account1_auth_key = AuthenticationKey::ed25519(&pubkey1);\n\n let account1 = account1_auth_key.derived_address();\n\n\n\n let privkey2 = Ed25519PrivateKey::generate(&mut rng);\n\n let pubkey2 = privkey2.public_key();\n", "file_path": "execution/executor-test-helpers/src/integration_test_impl.rs", "rank": 80, "score": 297892.74050409696 }, { "content": "/// Build a Libra `Script` from a structured object `ScriptCall`.\n\npub fn encode(self) -> Script {{\"#\n\n )?;\n\n self.out.indent();\n\n writeln!(self.out, \"use ScriptCall::*;\\nmatch self {{\")?;\n\n self.out.indent();\n\n for abi in abis {\n\n self.output_variant_encoder(abi)?;\n\n }\n\n self.out.unindent();\n\n writeln!(self.out, \"}}\")?;\n\n self.out.unindent();\n\n writeln!(self.out, \"}}\\n\")\n\n }\n\n\n\n fn output_variant_encoder(&mut self, abi: &ScriptABI) -> Result<()> {\n\n let params = std::iter::empty()\n\n .chain(abi.ty_args().iter().map(TypeArgumentABI::name))\n\n .chain(abi.args().iter().map(ArgumentABI::name))\n\n .collect::<Vec<_>>()\n\n .join(\", \");\n", "file_path": "language/transaction-builder/generator/src/rust.rs", "rank": 81, "score": 296871.86162722495 }, { "content": "/// Create a registry of network data formats.\n\npub fn get_registry() -> Result<Registry> {\n\n let mut tracer =\n\n Tracer::new(TracerConfig::default().is_human_readable(lcs::is_human_readable()));\n\n let mut samples = Samples::new();\n\n // 1. Record samples for types with custom deserializers.\n\n trace_crypto_values(&mut tracer, &mut samples)?;\n\n tracer.trace_value(\n\n &mut samples,\n\n &address::DnsName::from_str(\"example.com\").unwrap(),\n\n )?;\n\n tracer.trace_value(&mut samples, &address::NetworkAddress::mock())?;\n\n\n\n // 2. Trace the main entry point(s) + every enum separately.\n\n tracer.trace_type::<messaging::v1::NetworkMessage>(&samples)?;\n\n tracer.trace_type::<handshake::v1::HandshakeMsg>(&samples)?;\n\n tracer.trace_type::<address::NetworkAddress>(&samples)?;\n\n tracer.trace_type::<address::RawNetworkAddress>(&samples)?;\n\n tracer.trace_type::<address::encrypted::EncNetworkAddress>(&samples)?;\n\n tracer.trace_type::<address::encrypted::RawEncNetworkAddress>(&samples)?;\n\n\n\n tracer.trace_type::<messaging::v1::ErrorCode>(&samples)?;\n\n tracer.trace_type::<messaging::v1::ParsingErrorType>(&samples)?;\n\n tracer.trace_type::<messaging::v1::NotSupportedType>(&samples)?;\n\n tracer.trace_type::<handshake::v1::ProtocolId>(&samples)?;\n\n tracer.trace_type::<address::Protocol>(&samples)?;\n\n tracer.trace_type::<libra_config::network_id::NetworkId>(&samples)?;\n\n\n\n tracer.registry()\n\n}\n", "file_path": "testsuite/generate-format/src/network.rs", "rank": 82, "score": 296219.0197935254 }, { "content": "pub fn get_registry() -> Result<Registry> {\n\n let mut tracer =\n\n Tracer::new(TracerConfig::default().is_human_readable(lcs::is_human_readable()));\n\n let mut samples = Samples::new();\n\n // 1. Record samples for types with custom deserializers.\n\n trace_crypto_values(&mut tracer, &mut samples)?;\n\n tracer.trace_value(&mut samples, &event::EventKey::random())?;\n\n\n\n // 2. Trace the main entry point(s) + every enum separately.\n\n tracer.trace_type::<contract_event::ContractEvent>(&samples)?;\n\n tracer.trace_type::<language_storage::TypeTag>(&samples)?;\n\n tracer.trace_type::<transaction::metadata::Metadata>(&samples)?;\n\n tracer.trace_type::<transaction::metadata::GeneralMetadata>(&samples)?;\n\n tracer.trace_type::<transaction::metadata::TravelRuleMetadata>(&samples)?;\n\n tracer.trace_type::<transaction::Transaction>(&samples)?;\n\n tracer.trace_type::<transaction::TransactionArgument>(&samples)?;\n\n tracer.trace_type::<transaction::TransactionPayload>(&samples)?;\n\n tracer.trace_type::<transaction::WriteSetPayload>(&samples)?;\n\n tracer.trace_type::<transaction::authenticator::TransactionAuthenticator>(&samples)?;\n\n tracer.trace_type::<write_set::WriteOp>(&samples)?;\n\n tracer.registry()\n\n}\n", "file_path": "testsuite/generate-format/src/libra.rs", "rank": 83, "score": 296219.0197935254 }, { "content": "/// Create a registry for consensus types.\n\npub fn get_registry() -> Result<Registry> {\n\n let mut tracer =\n\n Tracer::new(TracerConfig::default().is_human_readable(lcs::is_human_readable()));\n\n let mut samples = Samples::new();\n\n // 1. Record samples for types with custom deserializers.\n\n trace_crypto_values(&mut tracer, &mut samples)?;\n\n tracer.trace_value(\n\n &mut samples,\n\n &consensus_types::block::Block::make_genesis_block(),\n\n )?;\n\n tracer.trace_value(&mut samples, &event::EventKey::random())?;\n\n\n\n // 2. Trace the main entry point(s) + every enum separately.\n\n tracer.trace_type::<contract_event::ContractEvent>(&samples)?;\n\n tracer.trace_type::<language_storage::TypeTag>(&samples)?;\n\n tracer.trace_type::<transaction::Transaction>(&samples)?;\n\n tracer.trace_type::<transaction::TransactionArgument>(&samples)?;\n\n tracer.trace_type::<transaction::TransactionPayload>(&samples)?;\n\n tracer.trace_type::<transaction::WriteSetPayload>(&samples)?;\n\n tracer.trace_type::<transaction::authenticator::TransactionAuthenticator>(&samples)?;\n\n tracer.trace_type::<write_set::WriteOp>(&samples)?;\n\n\n\n tracer.trace_type::<consensus::network_interface::ConsensusMsg>(&samples)?;\n\n tracer.trace_type::<consensus_types::block_data::BlockType>(&samples)?;\n\n tracer.trace_type::<consensus_types::block_retrieval::BlockRetrievalStatus>(&samples)?;\n\n\n\n tracer.registry()\n\n}\n", "file_path": "testsuite/generate-format/src/consensus.rs", "rank": 84, "score": 296219.0197935254 }, { "content": "/// Returns a [`Strategy`] that provides a variety of balances (or transfer amounts) over a roughly\n\n/// logarithmic distribution.\n\npub fn log_balance_strategy(max_balance: u64) -> impl Strategy<Value = u64> {\n\n // The logarithmic distribution is modeled by uniformly picking from ranges of powers of 2.\n\n let minimum = gas_costs::TXN_RESERVED.next_power_of_two();\n\n assert!(max_balance >= minimum, \"minimum to make sense\");\n\n let mut strategies = vec![];\n\n // Balances below and around the minimum are interesting but don't cover *every* power of 2,\n\n // just those starting from the minimum.\n\n let mut lower_bound: u64 = 0;\n\n let mut upper_bound: u64 = minimum;\n\n loop {\n\n strategies.push(lower_bound..upper_bound);\n\n if upper_bound >= max_balance {\n\n break;\n\n }\n\n lower_bound = upper_bound;\n\n upper_bound = (upper_bound * 2).min(max_balance);\n\n }\n\n Union::new(strategies)\n\n}\n\n\n", "file_path": "language/testing-infra/e2e-tests/src/account_universe.rs", "rank": 85, "score": 295714.6934029751 }, { "content": "// Check for the identifier token with specified value and return an error if it does not match.\n\nfn consume_identifier<'input>(tokens: &mut Lexer<'input>, value: &str) -> Result<(), Error> {\n\n if tokens.peek() == Tok::IdentifierValue && tokens.content() == value {\n\n tokens.advance()\n\n } else {\n\n let expected = format!(\"'{}'\", value);\n\n Err(unexpected_token_error(tokens, &expected))\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/parser/syntax.rs", "rank": 86, "score": 293941.0434047199 }, { "content": "/// Retrieve a waypoint given the URL.\n\nfn retrieve_waypoint(url_str: &str) -> anyhow::Result<Waypoint> {\n\n let client = reqwest::blocking::ClientBuilder::new().build()?;\n\n let response = client.get(url_str).send()?;\n\n\n\n Ok(response\n\n .error_for_status()\n\n .map_err(|_| anyhow::format_err!(\"Failed to retrieve waypoint from URL {}\", url_str))?\n\n .text()\n\n .map(|r| Waypoint::from_str(r.trim()))??)\n\n}\n", "file_path": "testsuite/cli/src/main.rs", "rank": 87, "score": 293913.19338840345 }, { "content": "fn use_(context: &mut Context, acc: &mut AliasMap, u: P::Use) {\n\n let unbound_module = |mident: &ModuleIdent| -> Error {\n\n vec![(\n\n mident.loc(),\n\n format!(\"Invalid 'use'. Unbound module: '{}'\", mident),\n\n )]\n\n };\n\n macro_rules! add_module_alias {\n\n ($ident:expr, $alias_opt:expr) => {{\n\n let alias: Name = $alias_opt.unwrap_or_else(|| $ident.0.value.name.0.clone());\n\n if let Err(()) = check_restricted_self_name(context, \"module alias\", &alias) {\n\n return;\n\n }\n\n\n\n if let Err(old_loc) = acc.add_module_alias(alias.clone(), $ident) {\n\n duplicate_module_alias(context, old_loc, alias)\n\n }\n\n }};\n\n };\n\n match u {\n", "file_path": "language/move-lang/src/expansion/translate.rs", "rank": 88, "score": 293337.8766896507 }, { "content": "pub fn validator_signers_to_waypoint(signers: &[&ValidatorSigner]) -> Waypoint {\n\n let li = validator_signers_to_ledger_info(signers);\n\n Waypoint::new_epoch_boundary(&li).unwrap()\n\n}\n\n\n", "file_path": "consensus/safety-rules/src/test_utils.rs", "rank": 89, "score": 293329.45424480684 }, { "content": "/// Perform a stateful deserialization from a `&[u8]` using the provided `seed`.\n\npub fn from_bytes_seed<'a, T>(seed: T, bytes: &'a [u8]) -> Result<T::Value>\n\nwhere\n\n T: DeserializeSeed<'a>,\n\n{\n\n let mut deserializer = Deserializer::new(bytes, crate::MAX_CONTAINER_DEPTH);\n\n let t = seed.deserialize(&mut deserializer)?;\n\n deserializer.end().map(move |_| t)\n\n}\n\n\n", "file_path": "common/lcs/src/de.rs", "rank": 90, "score": 293104.6932000532 }, { "content": "pub fn get_registry() -> Result<Registry> {\n\n let mut tracer =\n\n Tracer::new(TracerConfig::default().is_human_readable(lcs::is_human_readable()));\n\n let samples = Samples::new();\n\n // 1. Record samples for types with custom deserializers.\n\n\n\n // 2. Trace the main entry point(s) + every enum separately.\n\n tracer.trace_type::<transaction::ScriptABI>(&samples)?;\n\n tracer.trace_type::<language_storage::TypeTag>(&samples)?;\n\n tracer.registry()\n\n}\n", "file_path": "testsuite/generate-format/src/move_abi.rs", "rank": 91, "score": 292929.81092413823 }, { "content": "// Parse a specification let.\n\n// SpecLet = \"let\" <Identifier> \"=\" <Exp> \";\"\n\nfn parse_spec_let<'input>(tokens: &mut Lexer<'input>) -> Result<SpecBlockMember, Error> {\n\n let start_loc = tokens.start_loc();\n\n tokens.advance()?;\n\n let name = parse_identifier(tokens)?;\n\n consume_token(tokens, Tok::Equal)?;\n\n let def = parse_exp(tokens)?;\n\n consume_token(tokens, Tok::Semicolon)?;\n\n Ok(spanned(\n\n tokens.file_name(),\n\n start_loc,\n\n tokens.previous_end_loc(),\n\n SpecBlockMember_::Let { name, def },\n\n ))\n\n}\n\n\n", "file_path": "language/move-lang/src/parser/syntax.rs", "rank": 92, "score": 291554.6979720639 }, { "content": "fn struct_def(context: &mut Context, sdef: &N::StructDefinition) {\n\n if let N::StructFields::Defined(fields) = &sdef.fields {\n\n fields.iter().for_each(|(_, (_, bt))| type_(context, bt));\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 93, "score": 290945.5375429047 }, { "content": "// Parse an 'as' use alias:\n\n// UseAlias = (\"as\" <Identifier>)?\n\nfn parse_use_alias<'input>(tokens: &mut Lexer<'input>) -> Result<Option<Name>, Error> {\n\n Ok(if tokens.peek() == Tok::As {\n\n tokens.advance()?;\n\n Some(parse_identifier(tokens)?)\n\n } else {\n\n None\n\n })\n\n}\n\n\n", "file_path": "language/move-lang/src/parser/syntax.rs", "rank": 94, "score": 290649.51344234217 }, { "content": "fn types<'a>(context: &mut Context, tys: impl IntoIterator<Item = &'a N::Type>) {\n\n tys.into_iter().for_each(|ty| type_(context, ty))\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 95, "score": 290503.2380334905 }, { "content": "/// returns account's sequence number from storage\n\npub fn get_account_sequence_number(storage: &dyn DbReader, address: AccountAddress) -> Result<u64> {\n\n match storage.get_latest_account_state(address)? {\n\n Some(blob) => Ok(AccountResource::try_from(&blob)?.sequence_number()),\n\n None => Ok(0),\n\n }\n\n}\n", "file_path": "vm-validator/src/vm_validator.rs", "rank": 96, "score": 290382.17010755534 }, { "content": "pub fn type_(context: &mut Context, ty: &mut Type) {\n\n use Type_::*;\n\n match &mut ty.value {\n\n Anything | UnresolvedError | Param(_) | Unit => (),\n\n Ref(_, b) => type_(context, b),\n\n Var(tvar) => {\n\n let ty_tvar = sp(ty.loc, Var(*tvar));\n\n let replacement = core::unfold_type(&context.subst, ty_tvar);\n\n let replacement = match replacement {\n\n sp!(_, Var(_)) => panic!(\"ICE unfold_type_base failed to expand\"),\n\n sp!(loc, Anything) => {\n\n context.error(vec![(\n\n ty.loc,\n\n \"Could not infer this type. Try adding an annotation\",\n\n )]);\n\n sp(loc, UnresolvedError)\n\n }\n\n t => t,\n\n };\n\n *ty = replacement;\n", "file_path": "language/move-lang/src/typing/expand.rs", "rank": 97, "score": 290018.5263293092 }, { "content": "pub fn set_state(field: &str, value: i64) {\n\n STATE_GAUGE.with_label_values(&[field]).set(value);\n\n}\n", "file_path": "consensus/safety-rules/src/counters.rs", "rank": 98, "score": 289892.645106753 }, { "content": "pub fn lcs_benchmark(c: &mut Criterion) {\n\n let mut btree_map = BTreeMap::new();\n\n let mut hash_map = HashMap::new();\n\n for i in 0u32..2000u32 {\n\n btree_map.insert(i, i);\n\n hash_map.insert(i, i);\n\n }\n\n c.bench_function(\"serialize btree map\", |b| {\n\n b.iter(|| {\n\n to_bytes(&btree_map).unwrap();\n\n })\n\n });\n\n c.bench_function(\"serialize hash map\", |b| {\n\n b.iter(|| {\n\n to_bytes(&hash_map).unwrap();\n\n })\n\n });\n\n}\n\n\n\ncriterion_group!(benches, lcs_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "common/lcs/benches/lcs_bench.rs", "rank": 99, "score": 289380.07612194 } ]
Rust
src/pdu/hex_access/write_multi_reg.rs
hubertmis/modbus
430c6204070d7fd27fc639c08475b3b3079edeaa
use crate::Error; use crate::pdu::{Function, FunctionCode, Request as ReqT, Response as RspT, Setter}; use std::convert::TryInto; const MIN_QUANTITY: usize = 1; const MAX_QUANTITY: usize = 123; #[derive(Debug, PartialEq)] pub struct Request { address: u16, values: Vec<u16>, } impl Request { pub fn new(address: u16, values: &[u16]) -> Self { assert!(values.len() >= MIN_QUANTITY); assert!(values.len() <= MAX_QUANTITY); Request{address, values: Vec::from(values)} } pub fn get_address(&self) -> u16 { self.address } pub fn get_values(&self) -> &[u16] { &self.values } } impl Function for Request { fn encode(&self) -> Result<Vec<u8>, Error> { match self.values.len() { MIN_QUANTITY..=MAX_QUANTITY => { let mut result = Vec::new(); result.push(FunctionCode::WriteMultiReg as u8); result.append(&mut self.address.to_be_bytes().to_vec()); result.append(&mut (self.values.len() as u16).to_be_bytes().to_vec()); result.push((self.values.len() as u8) * 2); for val in &self.values { result.append(&mut val.to_be_bytes().to_vec()); } Ok(result) } _ => Err(Error::InvalidValue) } } fn decode(data: &[u8]) -> Result<Self, Error> { if data.len() < 6 { return Err(Error::InvalidDataLength); } if data[0] != FunctionCode::WriteMultiReg as u8 { return Err(Error::InvalidData); } let address = u16::from_be_bytes(data[1..=2].try_into().unwrap()); let quantity = u16::from_be_bytes(data[3..=4].try_into().unwrap()); let data_cnt = data[5]; if data_cnt as u16 != quantity * 2 { return Err(Error::InvalidDataLength); } if (quantity as usize) < MIN_QUANTITY || (quantity as usize) > MAX_QUANTITY { return Err(Error::InvalidData); } let mut values = Vec::with_capacity(quantity as usize); for i in 0..quantity { let val_idx = (6 + i * 2) as usize; values.push(u16::from_be_bytes(data[val_idx..=val_idx+1].try_into().unwrap())) } Ok(Self{address, values}) } } impl ReqT for Request { type Rsp = Response; } impl Setter for Request { fn create_expected_response(&self) -> Self::Rsp { Response::new(self.address, self.values.len() as u16) } } #[derive(Debug, PartialEq)] pub struct Response { address: u16, quantity: u16, } impl Response { pub fn new(address: u16, quantity: u16) -> Self { assert!(quantity as usize >= MIN_QUANTITY); assert!(quantity as usize <= MAX_QUANTITY); Self{address, quantity} } pub fn get_address(&self) -> u16 { self.address } pub fn get_quantity(&self) -> u16 { self.quantity } } impl Function for Response { fn encode(&self) -> Result<Vec<u8>, Error> { match self.quantity as usize { MIN_QUANTITY..=MAX_QUANTITY => { let mut result = Vec::new(); result.push(FunctionCode::WriteMultiReg as u8); result.append(&mut self.address.to_be_bytes().to_vec()); result.append(&mut self.quantity.to_be_bytes().to_vec()); Ok(result) } _ => Err(Error::InvalidValue) } } fn decode(data: &[u8]) -> Result<Self, Error> { if data.len() != 5 { return Err(Error::InvalidDataLength); } if data[0] != FunctionCode::WriteMultiReg as u8 { return Err(Error::InvalidData); } let address = u16::from_be_bytes(data[1..=2].try_into().unwrap()); let quantity = u16::from_be_bytes(data[3..=4].try_into().unwrap()); Ok(Self{address, quantity}) } } impl RspT for Response { fn get_exc_function_code() -> u8 { FunctionCode::ExcWriteMultiReg.try_into().unwrap() } } #[cfg(test)] mod tests { use super::*; #[test] fn test_encode_request() { let req = Request::new(0xdead, &vec![0xfade, 0xface, 0x0000, 0x0001]); let pdu = req.encode().unwrap(); let expected_pdu = vec![0x10, 0xde, 0xad, 0x00, 0x04, 0x08, 0xfa, 0xde, 0xfa, 0xce, 0x00, 0x00, 0x00, 0x01]; assert_eq!(pdu, expected_pdu); } #[test] fn test_encode_response() { let rsp = Response::new(0xffff, 0x0072); let pdu = rsp.encode().unwrap(); let expected_pdu = vec![0x10, 0xff, 0xff, 0x00, 0x72]; assert_eq!(pdu, expected_pdu); } #[test] fn test_decode_request() { let pdu = vec![0x10, 0x00, 0x00, 0x00, 0x02, 0x04, 0x01, 0x02, 0xfe, 0xfd]; let req = Request::decode(&pdu).unwrap(); let expected_req = Request::new(0x0000, &vec![0x0102, 0xfefd]); assert_eq!(req, expected_req); } #[test] fn test_decode_invalid_request() { let pdu = vec![0x11, 0x01, 0x23, 0x00, 0x01, 0x02, 0x11, 0x12]; let err = Request::decode(&pdu).err().unwrap(); match err { Error::InvalidData => {} _ => panic!(format!("Expected InvalidData, but got {:?}", err)), } } #[test] fn test_decode_response() { let pdu = vec![0x10, 0x01, 0x23, 0x00, 0x65]; let rsp = Response::decode(&pdu).unwrap(); let expected_rsp = Response::new(0x0123, 0x0065); assert_eq!(rsp, expected_rsp); } }
use crate::Error; use crate::pdu::{Function, FunctionCode, Request as ReqT, Response as RspT, Setter}; use std::convert::TryInto; const MIN_QUANTITY: usize = 1; const MAX_QUANTITY: usize = 123; #[derive(Debug, PartialEq)] pub struct Request { address: u16, values: Vec<u16>, } impl Request { pub fn new(address: u16, values: &[u16]) -> Self { assert!(values.len() >= MIN_QUANTITY); assert!(values.len() <= MAX_QUANTITY); Request{address, values: Vec::from(values)} } pub fn get_address(&self) -> u16 { self.address } pub fn get_values(&self) -> &[u16] { &self.values } } impl Function for Request { fn encode(&self) -> Result<Vec<u8>, Error> { match self.values.len() { MIN_QUANTITY..=MAX_QUANTITY => { let mut result = Vec::new(); result.push(FunctionCode::WriteMultiReg as u8); result.append(&mut self.address.to_be_bytes().to_vec()); result.append(&mut (self.values.len() as u16).to_be_bytes().to_vec()); result.push((self.values.len() as u8) * 2); for val in &self.values { result.append(&mut val.to_be_bytes().to_vec()); } Ok(result) } _ => Err(Error::InvalidValue) } } fn decode(data: &[u8]) -> Result<Self, Error> { if data.len() < 6 { return Err(Error::InvalidDataLength); } if data[0] != FunctionCode::WriteMultiReg as u8 { return Err(Error::InvalidData); } let address = u16::from_be_bytes(data[1..=2].try_into().unwrap()); let quantity = u16::from_be_bytes(data[3..=4].try_into().unwrap()); let data_cnt = data[5]; if data_cnt as u16 != quantity * 2 { return Err(Error::InvalidDataLength); } if (quantity as usize) < MIN_QUANTITY || (quantity as usize) > MAX_QUANTITY { return Err(Error::InvalidData); } let mut values = Vec::with_capacity(quantity as usize); for i in 0..quantity { let val_idx = (6 + i * 2) as usize; values.push(u16::from_be_bytes(data[val_idx..=val_idx+1].try_into().unwrap())) } Ok(Self{address, values}) } } impl ReqT for Request { type Rsp = Response; } impl Setter for Request { fn create_expected_response(&self) -> Self::Rsp { Response::new(self.address, self.values.len() as u16) } } #[derive(Debug, PartialEq)] pub struct Response { address: u16, quantity: u16, } impl Response { pub fn new(address: u16, quantity: u16) -> Self { assert!(quantity as usize >= MIN_QUANTITY); assert!(quantity as usize <= MAX_QUANTITY); Self{address, quantity} } pub fn get_address(&self) -> u16 { self.address } pub fn get_quantity(&self) -> u16 { self.quantity } } impl Function for Response { fn encode(&self) -> Result<Vec<u8>, Error> { match self.quantity as usize { MIN_QUANTITY..=MAX_QUANTITY => { let mut result = Vec::new(); result.push(FunctionCode::WriteMultiReg as u8); result.append(&mut self.address.to_be_bytes().to_vec()); result.app
fn decode(data: &[u8]) -> Result<Self, Error> { if data.len() != 5 { return Err(Error::InvalidDataLength); } if data[0] != FunctionCode::WriteMultiReg as u8 { return Err(Error::InvalidData); } let address = u16::from_be_bytes(data[1..=2].try_into().unwrap()); let quantity = u16::from_be_bytes(data[3..=4].try_into().unwrap()); Ok(Self{address, quantity}) } } impl RspT for Response { fn get_exc_function_code() -> u8 { FunctionCode::ExcWriteMultiReg.try_into().unwrap() } } #[cfg(test)] mod tests { use super::*; #[test] fn test_encode_request() { let req = Request::new(0xdead, &vec![0xfade, 0xface, 0x0000, 0x0001]); let pdu = req.encode().unwrap(); let expected_pdu = vec![0x10, 0xde, 0xad, 0x00, 0x04, 0x08, 0xfa, 0xde, 0xfa, 0xce, 0x00, 0x00, 0x00, 0x01]; assert_eq!(pdu, expected_pdu); } #[test] fn test_encode_response() { let rsp = Response::new(0xffff, 0x0072); let pdu = rsp.encode().unwrap(); let expected_pdu = vec![0x10, 0xff, 0xff, 0x00, 0x72]; assert_eq!(pdu, expected_pdu); } #[test] fn test_decode_request() { let pdu = vec![0x10, 0x00, 0x00, 0x00, 0x02, 0x04, 0x01, 0x02, 0xfe, 0xfd]; let req = Request::decode(&pdu).unwrap(); let expected_req = Request::new(0x0000, &vec![0x0102, 0xfefd]); assert_eq!(req, expected_req); } #[test] fn test_decode_invalid_request() { let pdu = vec![0x11, 0x01, 0x23, 0x00, 0x01, 0x02, 0x11, 0x12]; let err = Request::decode(&pdu).err().unwrap(); match err { Error::InvalidData => {} _ => panic!(format!("Expected InvalidData, but got {:?}", err)), } } #[test] fn test_decode_response() { let pdu = vec![0x10, 0x01, 0x23, 0x00, 0x65]; let rsp = Response::decode(&pdu).unwrap(); let expected_rsp = Response::new(0x0123, 0x0065); assert_eq!(rsp, expected_rsp); } }
end(&mut self.quantity.to_be_bytes().to_vec()); Ok(result) } _ => Err(Error::InvalidValue) } }
function_block-function_prefixed
[ { "content": "/// Setter is a trait for Modbus requests that expect known response.\n\npub trait Setter where Self: Request, Self::Rsp: PartialEq {\n\n fn create_expected_response(&self) -> Self::Rsp;\n\n}\n\n\n\n#[derive(Clone, Copy, FromPrimitive, IntoPrimitive, PartialEq)]\n\n#[repr(u8)]\n\npub enum FunctionCode {\n\n ReadCoils = 0x01,\n\n ReadDscrIn = 0x02,\n\n ReadHldReg = 0x03,\n\n ReadInReg = 0x04,\n\n WriteSingleCoil = 0x05,\n\n WriteSingleReg = 0x06,\n\n WriteMultiReg = 0x10,\n\n\n\n ExcReadCoils = 0x81,\n\n ExcReadDscrIn = 0x82,\n\n ExcReadHldReg = 0x83,\n\n ExcReadInReg = 0x84,\n\n ExcWriteSingleCoil = 0x85,\n", "file_path": "src/pdu/mod.rs", "rank": 0, "score": 153163.7285572129 }, { "content": "pub fn decode_req(pdu: &[u8]) -> Result<RequestData, Error> {\n\n if pdu.len() < 2 {\n\n return Err(Error::InvalidDataLength);\n\n }\n\n\n\n match num::FromPrimitive::from_u8(pdu[0]) {\n\n Some(FunctionCode::ReadCoils) => Ok(RequestData::ReadCoils(bit_access::read_coils::Request::decode(pdu)?)),\n\n Some(FunctionCode::ReadDscrIn) => Ok(RequestData::ReadDscrIn(bit_access::read_dscr_in::Request::decode(pdu)?)),\n\n Some(FunctionCode::ReadHldReg) => Ok(RequestData::ReadHldReg(hex_access::read_hld_reg::Request::decode(pdu)?)),\n\n Some(FunctionCode::ReadInReg) => Ok(RequestData::ReadInReg(hex_access::read_in_reg::Request::decode(pdu)?)),\n\n Some(FunctionCode::WriteSingleCoil) => Ok(RequestData::WriteSingleCoil(bit_access::write_single_coil::Message::decode(pdu)?)),\n\n Some(FunctionCode::WriteSingleReg) => Ok(RequestData::WriteSingleReg(hex_access::write_single_reg::Message::decode(pdu)?)),\n\n Some(FunctionCode::WriteMultiReg) => Ok(RequestData::WriteMultiReg(hex_access::write_multi_reg::Request::decode(pdu)?)),\n\n _ => Err(Error::InvalidData),\n\n }\n\n}\n\n\n\n/*\n", "file_path": "src/pdu/mod.rs", "rank": 1, "score": 151742.4133594132 }, { "content": "fn encode_exc_rsp(function_code: &FunctionCode, exception_code: &ExceptionCode) -> Result<Vec<u8>, Error> {\n\n let mut result = Vec::new();\n\n result.push(*function_code as u8);\n\n result.push(*exception_code as u8);\n\n\n\n Ok(result)\n\n}\n\n*/\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n}", "file_path": "src/pdu/mod.rs", "rank": 2, "score": 129418.91312946606 }, { "content": "pub trait Request: Function {\n\n type Rsp: Response;\n\n}\n\n\n", "file_path": "src/pdu/mod.rs", "rank": 3, "score": 104454.15908393545 }, { "content": "pub trait Function {\n\n fn encode(&self) -> Result<Vec<u8>, Error>;\n\n fn decode(data: &[u8]) -> Result<Self, Error> where Self: Sized;\n\n}\n\n\n", "file_path": "src/pdu/mod.rs", "rank": 4, "score": 74286.0988156631 }, { "content": "fn get_transaction_id() -> u16 {\n\n TRANSACTION_ID.fetch_add(1, Ordering::Relaxed)\n\n}\n\n\n\npub struct Frame<'a> {\n\n transaction_id: u16,\n\n unit_id: u8,\n\n pdu: &'a [u8],\n\n}\n\n\n\nimpl<'a> Frame<'a> {\n\n pub fn new(unit_id: u8, pdu: &'a [u8]) -> Self {\n\n Self{transaction_id: get_transaction_id(), unit_id, pdu}\n\n }\n\n\n\n pub fn get_unit_id(&self) -> u8 {\n\n self.unit_id\n\n }\n\n\n\n pub fn get_pdu(&self) -> Vec<u8> {\n", "file_path": "src/transport/tcp/frame.rs", "rank": 5, "score": 69942.5103235788 }, { "content": "pub trait Response: Function + Sized {\n\n fn get_exc_function_code() -> u8;\n\n\n\n fn decode_response(data: &[u8]) -> Result<Self, Error> {\n\n if let Ok(exc_code) = Self::decode_exc_rsp(data, Some(Self::get_exc_function_code())) {\n\n return Err(Error::ExceptionResponse(exc_code));\n\n }\n\n\n\n Self::decode(data)\n\n }\n\n\n\n fn decode_exc_rsp(data: &[u8], exp_fnc_code: Option<u8>) -> Result<ExceptionCode, Error> {\n\n if data.len() != 2 {\n\n return Err(Error::InvalidDataLength);\n\n }\n\n\n\n if let Some(exp_fnc_code) = exp_fnc_code {\n\n if data[0] != exp_fnc_code {\n\n return Err(Error::InvalidData);\n\n }\n\n }\n\n\n\n ExceptionCode::try_from(data[1])\n\n }\n\n}\n\n\n", "file_path": "src/pdu/mod.rs", "rank": 6, "score": 64858.35145407589 }, { "content": "/// The trait implemented by Modbus protocol link layers \n\npub trait Transport {\n\n /// Type describing message destination\n\n type Dst;\n\n /// Stream used to read or write messages in during data exchange\n\n type Stream;\n\n\n\n /// Enable Modbus master mode for given transport.\n\n fn start_master(&mut self) -> Result<(), Error>;\n\n /// Enable Modbus slave mode for given transport.\n\n fn start_slave(&mut self, unit_id: u8) -> Result<(), Error>;\n\n\n\n /// Verify if given destination is broadcast.\n\n fn is_broadcast(dst: &Self::Dst) -> bool;\n\n\n\n /// Write PDU of a request frame through given transport.\n\n /// \n\n /// This method shall be used only in master mode.\n\n /// This method returns Stream that shall be used to read response.\n\n fn write_req_pdu(&mut self, dst: &Self::Dst, pdu: &[u8]) -> Result<Self::Stream, Error>;\n\n\n", "file_path": "src/transport/mod.rs", "rank": 7, "score": 36640.65201204279 }, { "content": "\n\n InvalidRequest,\n\n MissingReqHandler,\n\n\n\n IoError(IoError),\n\n SerialError(SerialError),\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Error::InvalidValue => f.write_str(\"Invalid value\"),\n\n Error::TooShortData => f.write_str(\"Too short data in the buffer\"),\n\n Error::InvalidData => f.write_str(\"Invalid data\"),\n\n Error::InvalidDataLength => f.write_str(\"Invalid data length\"),\n\n Error::InvalidFunction => f.write_str(\"Invalid function code\"),\n\n Error::InvalidResponse => f.write_str(\"Invalid response\"),\n\n Error::NoResponse => f.write_str(\"No response\"),\n\n Error::InvalidRequest => f.write_str(\"Invalid request\"),\n\n Error::MissingReqHandler => f.write_str(\"Missing request handler for given request\"),\n", "file_path": "src/error.rs", "rank": 8, "score": 29446.725881920043 }, { "content": "use crate::pdu::ExceptionCode;\n\nuse serialport::Error as SerialError;\n\nuse std::convert::From;\n\nuse std::error::Error as StdError;\n\nuse std::fmt;\n\nuse std::io::Error as IoError;\n\n\n\n/// The error types used by the modbus library\n\n#[derive(Debug)]\n\npub enum Error {\n\n InvalidValue,\n\n\n\n TooShortData,\n\n InvalidData,\n\n InvalidDataLength,\n\n InvalidFunction,\n\n\n\n InvalidResponse,\n\n NoResponse,\n\n ExceptionResponse(ExceptionCode),\n", "file_path": "src/error.rs", "rank": 9, "score": 29444.410748066777 }, { "content": " Error::ExceptionResponse(code) => f.write_str(&format!(\"Exception response: {}\", code)),\n\n Error::IoError(error) => f.write_str(&format!(\"IO error: {}\", error)),\n\n Error::SerialError(error) => f.write_str(&format!(\"Serial error: {}\", error)),\n\n }\n\n }\n\n}\n\n\n\nimpl StdError for Error {}\n\n\n\nimpl From<SerialError> for Error {\n\n fn from(error: SerialError) -> Self {\n\n Self::SerialError(error)\n\n }\n\n}\n\n\n\nimpl From<IoError> for Error {\n\n fn from(error: IoError) -> Self {\n\n Self::IoError(error)\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 10, "score": 29441.782345254265 }, { "content": "#[derive(Clone, Copy, Debug, FromPrimitive, PartialEq)]\n\nenum Value {\n\n Off = 0x0000,\n\n On = 0xFF00,\n\n}\n\n\n\nimpl TryFrom<[u8; 2]> for Value {\n\n type Error = Error;\n\n\n\n fn try_from(value: [u8; 2]) -> Result<Self, Self::Error> {\n\n match u16::from_be_bytes(value) {\n\n x if x == Value::Off as u16 => Ok(Value::Off),\n\n x if x == Value::On as u16 => Ok(Value::On),\n\n _ => Err(Error::InvalidData),\n\n }\n\n }\n\n}\n\n\n\nimpl TryFrom<&[u8]> for Value {\n\n type Error = Error;\n\n\n", "file_path": "src/pdu/bit_access/write_single_coil.rs", "rank": 11, "score": 22596.889523856556 }, { "content": " /// let req = modbus::WriteSingleRegRequest::new(0xfedc, value);\n\n /// assert_eq!(req.get_value(), value);\n\n /// ```\n\n pub fn get_value(&self) -> u16 {\n\n self.value\n\n }\n\n}\n\n\n\nimpl Function for Message {\n\n fn encode(&self) -> Result<Vec<u8>, Error> {\n\n let mut result = Vec::new();\n\n result.push(FunctionCode::WriteSingleReg as u8);\n\n result.append(&mut self.address.to_be_bytes().to_vec());\n\n result.append(&mut self.value.to_be_bytes().to_vec());\n\n\n\n Ok(result)\n\n }\n\n\n\n fn decode(data: &[u8]) -> Result<Self, Error> {\n\n if data.len() != 5 {\n", "file_path": "src/pdu/hex_access/write_single_reg.rs", "rank": 14, "score": 25.838280611236712 }, { "content": " /// Get quantity of the coils in the request\n\n /// \n\n /// # Examples\n\n /// \n\n /// ```\n\n /// let quantity = 35;\n\n /// let request = modbus::ReadCoilsRequest::new(0, quantity);\n\n /// \n\n /// assert_eq!(request.get_quantity(), quantity);\n\n /// ```\n\n pub fn get_quantity(&self) -> u16 {\n\n self.quantity\n\n }\n\n}\n\n\n\nimpl Function for Request {\n\n fn encode(&self) -> Result<Vec<u8>, Error> {\n\n match self.quantity {\n\n 1..=2000 => {\n\n let mut result = Vec::new();\n", "file_path": "src/pdu/bit_access/read_coils.rs", "rank": 15, "score": 25.611333194854655 }, { "content": " result.append(&mut self.address.to_be_bytes().to_vec());\n\n result.append(&mut (self.value as u16).to_be_bytes().to_vec());\n\n\n\n Ok(result)\n\n }\n\n\n\n fn decode(data: &[u8]) -> Result<Self, Error> {\n\n if data.len() != 5 {\n\n return Err(Error::InvalidDataLength);\n\n }\n\n if data[0] != FunctionCode::WriteSingleCoil as u8 {\n\n return Err(Error::InvalidData);\n\n }\n\n \n\n Ok(Self{address: u16::from_be_bytes(data[1..=2].try_into().unwrap()),\n\n value: data[3..=4].try_into()?})\n\n }\n\n}\n\n\n\nimpl Request for Message {\n", "file_path": "src/pdu/bit_access/write_single_coil.rs", "rank": 16, "score": 25.55064513364615 }, { "content": " /// Get quantity of the discrete inputs in the request\n\n /// \n\n /// # Examples\n\n /// \n\n /// ```\n\n /// let quantity = 35;\n\n /// let request = modbus::ReadDscrInRequest::new(0, quantity);\n\n /// \n\n /// assert_eq!(request.get_quantity(), quantity);\n\n /// ```\n\n pub fn get_quantity(&self) -> u16 {\n\n self.quantity\n\n }\n\n}\n\n\n\nimpl Function for Request {\n\n fn encode(&self) -> Result<Vec<u8>, Error> {\n\n match self.quantity {\n\n 1..=2000 => {\n\n let mut result = Vec::new();\n", "file_path": "src/pdu/bit_access/read_dscr_in.rs", "rank": 17, "score": 25.401433631617426 }, { "content": " /// Get quantity of the registers in the request\n\n /// \n\n /// # Examples\n\n /// \n\n /// ```\n\n /// let quantity = 125;\n\n /// let request = modbus::ReadInRegRequest::new(0, quantity);\n\n /// \n\n /// assert_eq!(request.get_quantity(), quantity);\n\n /// ```\n\n pub fn get_quantity(&self) -> u16 {\n\n self.quantity\n\n }\n\n}\n\n\n\nimpl Function for Request {\n\n fn encode(&self) -> Result<Vec<u8>, Error> {\n\n match self.quantity {\n\n MIN_QUANTITY..=MAX_QUANTITY => {\n\n let mut result = Vec::new();\n", "file_path": "src/pdu/hex_access/read_in_reg.rs", "rank": 18, "score": 24.907919593897905 }, { "content": "use crate::Error;\n\nuse crate::pdu::{Function, FunctionCode, Request, Response, Setter};\n\nuse std::convert::TryInto;\n\n\n\n/// Write Single Register request or response function\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Message {\n\n address: u16,\n\n value: u16,\n\n}\n\n\n\nimpl Message {\n\n /// Create a new Write Single Register function\n\n /// \n\n /// # Examples\n\n /// ```\n\n /// let req = modbus::WriteSingleRegRequest::new(0xabcd, 0xcafe);\n\n /// let rsp = modbus::WriteSingleRegResponse::new(0x0123, 0xface);\n\n /// ```\n\n pub fn new(address: u16, value: u16) -> Self {\n", "file_path": "src/pdu/hex_access/write_single_reg.rs", "rank": 20, "score": 24.71974865331939 }, { "content": " /// Get quantity of the registers in the request\n\n /// \n\n /// # Examples\n\n /// \n\n /// ```\n\n /// let quantity = 125;\n\n /// let request = modbus::ReadHldRegRequest::new(0, quantity);\n\n /// \n\n /// assert_eq!(request.get_quantity(), quantity);\n\n /// ```\n\n pub fn get_quantity(&self) -> u16 {\n\n self.quantity\n\n }\n\n}\n\n\n\nimpl Function for Request {\n\n fn encode(&self) -> Result<Vec<u8>, Error> {\n\n match self.quantity {\n\n MIN_QUANTITY..=MAX_QUANTITY => {\n\n let mut result = Vec::new();\n", "file_path": "src/pdu/hex_access/read_hld_reg.rs", "rank": 21, "score": 24.714775465112954 }, { "content": " result.push(FunctionCode::ReadCoils as u8);\n\n result.append(&mut self.address.to_be_bytes().to_vec());\n\n result.append(&mut self.quantity.to_be_bytes().to_vec());\n\n\n\n Ok(result)\n\n }\n\n _ => Err(Error::InvalidValue),\n\n }\n\n }\n\n\n\n fn decode(data: &[u8]) -> Result<Self, Error> where Self: Sized {\n\n if data.len() != 5 {\n\n return Err(Error::InvalidDataLength);\n\n }\n\n if data[0] != FunctionCode::ReadCoils as u8 {\n\n return Err(Error::InvalidData);\n\n }\n\n\n\n Ok(Self {address: u16::from_be_bytes(data[1..=2].try_into().unwrap()), \n\n quantity: u16::from_be_bytes(data[3..=4].try_into().unwrap())})\n", "file_path": "src/pdu/bit_access/read_coils.rs", "rank": 23, "score": 24.29421385618372 }, { "content": " result.push(FunctionCode::ReadInReg as u8);\n\n result.append(&mut self.address.to_be_bytes().to_vec());\n\n result.append(&mut self.quantity.to_be_bytes().to_vec());\n\n\n\n Ok(result)\n\n }\n\n _ => Err(Error::InvalidValue),\n\n }\n\n }\n\n\n\n fn decode(data: &[u8]) -> Result<Self, Error> where Self: Sized {\n\n if data.len() != 5 {\n\n return Err(Error::InvalidDataLength);\n\n }\n\n if data[0] != FunctionCode::ReadInReg as u8 {\n\n return Err(Error::InvalidData);\n\n }\n\n\n\n Ok(Self {address: u16::from_be_bytes(data[1..=2].try_into().unwrap()), \n\n quantity: u16::from_be_bytes(data[3..=4].try_into().unwrap())})\n", "file_path": "src/pdu/hex_access/read_in_reg.rs", "rank": 24, "score": 24.294213856183724 }, { "content": " result.push(FunctionCode::ReadDscrIn as u8);\n\n result.append(&mut self.address.to_be_bytes().to_vec());\n\n result.append(&mut self.quantity.to_be_bytes().to_vec());\n\n Ok(result)\n\n }\n\n _ => Err(Error::InvalidValue)\n\n }\n\n }\n\n\n\n fn decode(data: &[u8]) -> Result<Self, Error> {\n\n if data.len() != 5 {\n\n return Err(Error::InvalidDataLength);\n\n }\n\n if data[0] != FunctionCode::ReadDscrIn as u8 {\n\n return Err(Error::InvalidData);\n\n }\n\n\n\n Ok(Self {address: u16::from_be_bytes(data[1..=2].try_into().unwrap()),\n\n quantity: u16::from_be_bytes(data[3..=4].try_into().unwrap())})\n\n }\n", "file_path": "src/pdu/bit_access/read_dscr_in.rs", "rank": 25, "score": 24.15513682511171 }, { "content": " result.push(FunctionCode::ReadHldReg as u8);\n\n result.append(&mut self.address.to_be_bytes().to_vec());\n\n result.append(&mut self.quantity.to_be_bytes().to_vec());\n\n\n\n Ok(result)\n\n }\n\n _ => Err(Error::InvalidValue),\n\n }\n\n }\n\n\n\n fn decode(data: &[u8]) -> Result<Self, Error> where Self: Sized {\n\n if data.len() != 5 {\n\n return Err(Error::InvalidDataLength);\n\n }\n\n if data[0] != FunctionCode::ReadHldReg as u8 {\n\n return Err(Error::InvalidData);\n\n }\n\n\n\n Ok(Self {address: u16::from_be_bytes(data[1..=2].try_into().unwrap()), \n\n quantity: u16::from_be_bytes(data[3..=4].try_into().unwrap())})\n", "file_path": "src/pdu/hex_access/read_hld_reg.rs", "rank": 26, "score": 24.02566672907308 }, { "content": "use crate::error::Error;\n\nuse crate::pdu::{Function, FunctionCode, Request as ReqT, Response as RspT};\n\nuse std::convert::TryInto;\n\nuse std::vec::Vec;\n\n\n\nconst MIN_QUANTITY: u16 = 1;\n\nconst MAX_QUANTITY: u16 = 125;\n\n\n\n/// Read Holding Registers function request\n\n#[derive(Debug, PartialEq)]\n\npub struct Request {\n\n address: u16,\n\n quantity: u16,\n\n}\n\n\n\nimpl Request {\n\n /// Create a new Read Holding registers request\n\n /// \n\n /// # Examples\n\n /// ```\n", "file_path": "src/pdu/hex_access/read_hld_reg.rs", "rank": 27, "score": 23.831703007149002 }, { "content": "use crate::error::Error;\n\nuse crate::pdu::{Function, FunctionCode, Request as ReqT, Response as RspT};\n\nuse std::convert::TryInto;\n\nuse std::vec::Vec;\n\n\n\nconst MIN_QUANTITY: u16 = 1;\n\nconst MAX_QUANTITY: u16 = 0x7D;\n\n\n\n/// Read Input Registers function request\n\n#[derive(Debug, PartialEq)]\n\npub struct Request {\n\n address: u16,\n\n quantity: u16,\n\n}\n\n\n\nimpl Request {\n\n /// Create a new Read Input registers request\n\n /// \n\n /// # Examples\n\n /// ```\n", "file_path": "src/pdu/hex_access/read_in_reg.rs", "rank": 29, "score": 23.542008829951072 }, { "content": "\n\nimpl TryFrom<Value> for bool {\n\n type Error = Infallible;\n\n\n\n fn try_from(value: Value) -> Result<Self, Self::Error> {\n\n match value {\n\n Value::On => Ok(true),\n\n Value::Off => Ok(false),\n\n }\n\n }\n\n}\n\n\n\n/// Write Single Coil request or response function\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Message {\n\n address: u16,\n\n value: Value,\n\n}\n\n\n\nimpl Message {\n", "file_path": "src/pdu/bit_access/write_single_coil.rs", "rank": 30, "score": 23.28556809758573 }, { "content": " pub fn new(registers: &[u16]) -> Self {\n\n Self{ registers: registers.to_vec() }\n\n }\n\n\n\n /// Get registers' values from the response.\n\n /// \n\n /// # Examples\n\n /// ```\n\n /// let registers = vec![0x2047, 0x0000, 0x0123];\n\n /// let rsp = modbus::ReadInRegResponse::new(&registers);\n\n /// assert_eq!(rsp.get_registers(), &registers);\n\n /// ```\n\n pub fn get_registers(&self) -> &Vec<u16> {\n\n &self.registers\n\n }\n\n}\n\n\n\nimpl Function for Response {\n\n fn encode(&self) -> Result<Vec<u8>, Error> {\n\n let mut result = Vec::new();\n", "file_path": "src/pdu/hex_access/read_in_reg.rs", "rank": 31, "score": 22.526711463488837 }, { "content": " pub fn new(registers: &[u16]) -> Self {\n\n Self{ registers: registers.to_vec() }\n\n }\n\n\n\n /// Get registers' values from the response.\n\n /// \n\n /// # Examples\n\n /// ```\n\n /// let registers = vec![0x2047, 0x0000, 0x0123];\n\n /// let rsp = modbus::ReadHldRegResponse::new(&registers);\n\n /// assert_eq!(rsp.get_registers(), &registers);\n\n /// ```\n\n pub fn get_registers(&self) -> &Vec<u16> {\n\n &self.registers\n\n }\n\n}\n\n\n\nimpl Function for Response {\n\n fn encode(&self) -> Result<Vec<u8>, Error> {\n\n let mut result = Vec::new();\n", "file_path": "src/pdu/hex_access/read_hld_reg.rs", "rank": 33, "score": 22.345925842846448 }, { "content": " fn try_from(value: &[u8]) -> Result<Self, Self::Error> {\n\n if value.len() != 2 {\n\n return Err(Error::InvalidDataLength);\n\n }\n\n let val_array: [u8; 2] = value.try_into().unwrap();\n\n\n\n Value::try_from(val_array)\n\n }\n\n}\n\n\n\nimpl TryFrom<bool> for Value {\n\n type Error = Infallible;\n\n\n\n fn try_from(value: bool) -> Result<Self, Self::Error> {\n\n match value {\n\n true => Ok(Value::On),\n\n false => Ok(Value::Off),\n\n }\n\n }\n\n}\n", "file_path": "src/pdu/bit_access/write_single_coil.rs", "rank": 34, "score": 21.997932564784193 }, { "content": " return Err(Error::InvalidDataLength);\n\n }\n\n if data[0] != FunctionCode::WriteSingleReg as u8 {\n\n return Err(Error::InvalidData);\n\n }\n\n \n\n Ok(Self{address: u16::from_be_bytes(data[1..=2].try_into().unwrap()),\n\n value: u16::from_be_bytes(data[3..=4].try_into().unwrap())})\n\n }\n\n}\n\n\n\nimpl Request for Message {\n\n type Rsp = Message;\n\n}\n\n\n\nimpl Response for Message {\n\n fn get_exc_function_code() -> u8 {\n\n FunctionCode::ExcWriteSingleReg.try_into().unwrap()\n\n }\n\n}\n", "file_path": "src/pdu/hex_access/write_single_reg.rs", "rank": 35, "score": 21.536671578375355 }, { "content": " self.address\n\n }\n\n\n\n /// Get value from the Write Single Coil function\n\n /// \n\n /// # Examples\n\n /// ```\n\n /// let value = true;\n\n /// let req = modbus::WriteSingleCoilRequest::new(0xfedc, value);\n\n /// assert_eq!(req.get_value(), value);\n\n /// ```\n\n pub fn get_value(&self) -> bool {\n\n self.value.try_into().unwrap()\n\n }\n\n}\n\n\n\nimpl Function for Message {\n\n fn encode(&self) -> Result<Vec<u8>, Error> {\n\n let mut result = Vec::new();\n\n result.push(FunctionCode::WriteSingleCoil as u8);\n", "file_path": "src/pdu/bit_access/write_single_coil.rs", "rank": 36, "score": 21.493162198818094 }, { "content": "use std::convert::TryInto;\n\nuse std::vec::Vec;\n\n\n\nuse crate::Error;\n\nuse crate::pdu::{MAX_SIZE, Function, Request as ReqT, Response as RspT, FunctionCode};\n\nuse super::DSCR_PER_BYTE;\n\n\n\n/// Read Discrete Inputs function request\n\n#[derive(Debug, PartialEq)]\n\npub struct Request {\n\n address: u16,\n\n quantity: u16,\n\n}\n\n\n\nimpl Request {\n\n /// Create a new Read Discrete Inputs request\n\n /// \n\n /// # Examples\n\n /// \n\n /// ```\n", "file_path": "src/pdu/bit_access/read_dscr_in.rs", "rank": 37, "score": 21.470351616293854 }, { "content": "use crate::error::Error;\n\nuse crate::pdu::{Function, FunctionCode, MAX_SIZE, Request as ReqT, Response as RspT};\n\nuse super::DSCR_PER_BYTE;\n\nuse std::convert::TryInto;\n\nuse std::vec::Vec;\n\n\n\n/// Read Coils function request\n\n#[derive(Debug, PartialEq)]\n\npub struct Request {\n\n address: u16,\n\n quantity: u16,\n\n}\n\n\n\nimpl Request {\n\n /// Create a new Read Coils request\n\n /// \n\n /// # Examples\n\n /// \n\n /// ```\n\n /// let request = modbus::ReadCoilsRequest::new(0x000a, 0x0004);\n", "file_path": "src/pdu/bit_access/read_coils.rs", "rank": 38, "score": 21.381452359833936 }, { "content": " /// Read PDU of a response frame through given transport.\n\n /// \n\n /// This method shall be used only in master mode.\n\n fn read_rsp_pdu(&mut self, stream: &mut Self::Stream, src: &Self::Dst) -> Result<Vec<u8>, Error>;\n\n\n\n /// Read PDU of a request frame through given transport.\n\n /// \n\n /// This method shall be used only is the slave mode.\n\n fn read_req_pdu(&mut self) -> Result<(Vec<u8>, Self::Stream), Error>;\n\n\n\n /// Write PDU of a response frame through given transport.\n\n /// \n\n /// This method shall be used only in the slave mode.\n\n fn write_rsp_pdu(&mut self, stream: &mut Self::Stream, pdu: &[u8]) -> Result<(), Error>;\n\n\n\n /// Write a request frame and read a response frame.\n\n /// \n\n /// # Examples\n\n /// ```no_run\n\n /// # use modbus::Transport;\n", "file_path": "src/transport/mod.rs", "rank": 40, "score": 19.66213549446649 }, { "content": "use std::convert::TryInto;\n\nuse std::vec::Vec;\n\n\n\nuse crc16;\n\n\n\nuse crate::error::Error;\n\n\n\npub struct Frame<'a> {\n\n address: u8,\n\n pdu: &'a [u8],\n\n}\n\n\n\nimpl<'a> Frame<'a> {\n\n pub fn new(address: u8, pdu: &'a [u8]) -> Self {\n\n Frame{address, pdu}\n\n }\n\n\n\n pub fn get_pdu(&self) -> Vec<u8> {\n\n Vec::from(self.pdu)\n\n }\n", "file_path": "src/transport/rtu/frame.rs", "rank": 42, "score": 19.175179532975474 }, { "content": "\n\n pub fn is_address(&self, other: u8) -> bool {\n\n self.address == other\n\n }\n\n\n\n pub fn encode(&self) -> Result<Vec<u8>, Error> {\n\n let mut result = Vec::new();\n\n result.push(self.address);\n\n result.append(&mut self.pdu.to_vec());\n\n\n\n let crc = crc16::State::<crc16::MODBUS>::calculate(&result);\n\n result.append(&mut crc.to_le_bytes().to_vec());\n\n\n\n Ok(result)\n\n }\n\n\n\n pub fn decode(data: &'a [u8]) -> Result<Self, Error> {\n\n let len = data.len();\n\n if len < 4 {\n\n return Err(Error::InvalidDataLength);\n", "file_path": "src/transport/rtu/frame.rs", "rank": 45, "score": 18.83174955887096 }, { "content": " /// ```\n\n pub fn new(address: u16, quantity: u16) -> Self {\n\n // TODO: debug_assert quantity > 0\n\n Request{address, quantity}\n\n }\n\n\n\n /// Get address of the first coil from the request\n\n /// \n\n /// # Examples\n\n /// \n\n /// ```\n\n /// let address = 0x01234;\n\n /// let request = modbus::ReadCoilsRequest::new(address, 0x0001);\n\n /// \n\n /// assert_eq!(request.get_address(), address);\n\n /// ```\n\n pub fn get_address(&self) -> u16 {\n\n self.address\n\n }\n\n\n", "file_path": "src/pdu/bit_access/read_coils.rs", "rank": 46, "score": 18.000495046681845 }, { "content": " /// let req = modbus::ReadInRegRequest::new(0x0102, 0x0001);\n\n /// ```\n\n pub fn new(address: u16, quantity: u16) -> Self {\n\n Self {address, quantity}\n\n }\n\n\n\n /// Get address of the first register from the request\n\n /// \n\n /// # Examples\n\n /// \n\n /// ```\n\n /// let address = 0x4321;\n\n /// let request = modbus::ReadInRegRequest::new(address, 0x0001);\n\n /// \n\n /// assert_eq!(request.get_address(), address);\n\n /// ```\n\n pub fn get_address(&self) -> u16 {\n\n self.address\n\n }\n\n\n", "file_path": "src/pdu/hex_access/read_in_reg.rs", "rank": 47, "score": 17.928516715064582 }, { "content": " /// let req = modbus::ReadHldRegRequest::new(0x0102, 0x0001);\n\n /// ```\n\n pub fn new(address: u16, quantity: u16) -> Self {\n\n Self {address, quantity}\n\n }\n\n\n\n /// Get address of the first register from the request\n\n /// \n\n /// # Examples\n\n /// \n\n /// ```\n\n /// let address = 0x4321;\n\n /// let request = modbus::ReadHldRegRequest::new(address, 0x0001);\n\n /// \n\n /// assert_eq!(request.get_address(), address);\n\n /// ```\n\n pub fn get_address(&self) -> u16 {\n\n self.address\n\n }\n\n\n", "file_path": "src/pdu/hex_access/read_hld_reg.rs", "rank": 48, "score": 17.745252693969135 }, { "content": " return Ok((result, ()));\n\n }\n\n }\n\n } else {\n\n Err(Error::InvalidValue)\n\n }\n\n }\n\n\n\n fn write_rsp_pdu(&mut self, _: &mut Self::Stream, pdu: &[u8]) -> Result<(), Error> {\n\n if let Role::Slave(unit_id) = self.role {\n\n self.write_pdu(unit_id, pdu)\n\n } else {\n\n Err(Error::InvalidValue)\n\n }\n\n }\n\n}", "file_path": "src/transport/rtu/conn.rs", "rank": 49, "score": 17.645892508691176 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_encode_read_coils_request() {\n\n let pdu = Request{address: 0x1234, quantity: 0x00cd}.encode().unwrap();\n\n let expected_pdu = vec![0x01, 0x12, 0x34, 0x00, 0xcd];\n\n assert_eq!(pdu, expected_pdu);\n\n }\n\n\n\n #[test]\n\n fn test_encode_read_zero_coils_request() {\n\n let result = Request{address: 0x1234, quantity: 0}.encode().err().unwrap();\n\n match result {\n\n Error::InvalidValue => {}\n\n _ => panic!(format!(\"Expected InvalidValue, but got {:?}\", result)),\n", "file_path": "src/pdu/bit_access/read_coils.rs", "rank": 50, "score": 17.588257049629814 }, { "content": " /// let request = modbus::ReadDscrInRequest::new(0x000a, 0x0004);\n\n /// ```\n\n pub fn new(address: u16, quantity: u16) -> Self {\n\n Request{address, quantity}\n\n }\n\n\n\n /// Get address of the first discrete input from the request\n\n /// \n\n /// # Examples\n\n /// \n\n /// ```\n\n /// let address = 0x01234;\n\n /// let request = modbus::ReadDscrInRequest::new(address, 0x0001);\n\n /// \n\n /// assert_eq!(request.get_address(), address);\n\n /// ```\n\n pub fn get_address(&self) -> u16 {\n\n self.address\n\n }\n\n\n", "file_path": "src/pdu/bit_access/read_dscr_in.rs", "rank": 51, "score": 17.33600226665881 }, { "content": " self.pdu.to_vec()\n\n }\n\n\n\n pub fn encode(&self) -> Result<Vec<u8>, Error> {\n\n let mut result = Vec::new();\n\n result.append(&mut self.transaction_id.to_be_bytes().to_vec());\n\n result.append(&mut MODBUS_ID.to_be_bytes().to_vec());\n\n result.append(&mut ((self.pdu.len() + 1) as u16).to_be_bytes().to_vec());\n\n result.push(self.unit_id);\n\n result.append(&mut self.pdu.to_vec());\n\n\n\n Ok(result)\n\n }\n\n\n\n pub fn decode(data: &'a [u8]) -> Result<Self, Error> {\n\n let len = data.len();\n\n if len < 8 {\n\n return Err(Error::TooShortData);\n\n }\n\n if u16::from_be_bytes(data[2..=3].try_into().unwrap()) != MODBUS_ID {\n", "file_path": "src/transport/tcp/frame.rs", "rank": 52, "score": 17.102300147088517 }, { "content": "}\n\n\n\nimpl Transport for Rtu {\n\n type Dst = u8;\n\n type Stream = ();\n\n\n\n fn start_master(&mut self) -> Result<(), Error> {\n\n self.role = Role::Master;\n\n Ok(())\n\n }\n\n\n\n fn start_slave(&mut self, unit_id: u8) -> Result<(), Error> {\n\n match unit_id {\n\n 1..=247 => {\n\n self.role = Role::Slave(unit_id);\n\n Ok(())\n\n }\n\n _ => Err(Error::InvalidValue)\n\n }\n\n }\n", "file_path": "src/transport/rtu/conn.rs", "rank": 53, "score": 17.050101936622287 }, { "content": " type Rsp = Message;\n\n}\n\n\n\nimpl Response for Message {\n\n fn get_exc_function_code() -> u8 {\n\n FunctionCode::ExcWriteSingleCoil.try_into().unwrap()\n\n }\n\n}\n\n\n\nimpl Setter for Message {\n\n fn create_expected_response(&self) -> Self::Rsp {\n\n self.clone()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "src/pdu/bit_access/write_single_coil.rs", "rank": 54, "score": 16.92363947338746 }, { "content": " {\n\n // TODO: Timeout\n\n Self::read_pdu(stream, src.unit_id)\n\n }\n\n\n\n fn read_req_pdu(&mut self) -> Result<(Vec<u8>, Self::Stream), Error> {\n\n if let Some(listener) = &self.listener {\n\n let (mut socket, _addr) = listener.accept()?;\n\n\n\n Ok((Self::read_pdu(&mut socket, self.unit_id)?, socket))\n\n }\n\n else {\n\n Err(Error::InvalidValue)\n\n }\n\n }\n\n\n\n fn write_rsp_pdu(&mut self, stream: &mut Self::Stream, pdu: &[u8]) -> Result<(), Error> {\n\n Self::write_pdu(stream, pdu, self.unit_id)\n\n }\n\n}\n", "file_path": "src/transport/tcp/conn.rs", "rank": 55, "score": 16.68036119037265 }, { "content": " /// Create a new Write Single Coil function\n\n /// \n\n /// # Examples\n\n /// ```\n\n /// let req = modbus::WriteSingleCoilRequest::new(0xabcd, true);\n\n /// let rsp = modbus::WriteSingleCoilResponse::new(0x0123, false);\n\n /// ```\n\n pub fn new(address: u16, value: bool) -> Self {\n\n Message{address, value: value.try_into().unwrap()}\n\n }\n\n\n\n /// Get address of the coil from the Write Single Coil function\n\n /// \n\n /// # Examples\n\n /// ```\n\n /// let address = 0x0abc;\n\n /// let rsp = modbus::WriteSingleCoilResponse::new(address, false);\n\n /// assert_eq!(rsp.get_address(), address);\n\n /// ```\n\n pub fn get_address(&self) -> u16 {\n", "file_path": "src/pdu/bit_access/write_single_coil.rs", "rank": 56, "score": 16.41860803659536 }, { "content": " }\n\n if num_bytes as usize != data.len() - 2 {\n\n return Err(Error::InvalidDataLength);\n\n }\n\n\n\n let num_registers = (num_bytes / 2) as usize;\n\n let mut registers = Vec::with_capacity(num_registers);\n\n for i in 0..num_registers {\n\n let reg_idx = 2 + 2 * i;\n\n let reg = u16::from_be_bytes(data[reg_idx..=(reg_idx+1)].try_into().unwrap());\n\n registers.push(reg);\n\n }\n\n Ok(Self {registers})\n\n }\n\n}\n\n\n\nimpl RspT for Response {\n\n fn get_exc_function_code() -> u8 {\n\n FunctionCode::ExcReadInReg.try_into().unwrap()\n\n }\n", "file_path": "src/pdu/hex_access/read_in_reg.rs", "rank": 57, "score": 16.35154793878881 }, { "content": "\n\n result.push(byte);\n\n }\n\n\n\n Ok(result)\n\n }\n\n _ => Err(Error::InvalidValue)\n\n }\n\n }\n\n\n\n fn decode(data: &[u8]) -> Result<Self, Error> {\n\n if data.len() < 2 {\n\n return Err(Error::InvalidDataLength);\n\n }\n\n if data[0] != FunctionCode::ReadDscrIn as u8 {\n\n return Err(Error::InvalidData);\n\n }\n\n\n\n let byte_count = data[1] as usize;\n\n if data.len() != byte_count + 2 {\n", "file_path": "src/pdu/bit_access/read_dscr_in.rs", "rank": 58, "score": 16.337749671586444 }, { "content": " }\n\n if num_bytes as usize != data.len() - 2 {\n\n return Err(Error::InvalidDataLength);\n\n }\n\n\n\n let num_registers = (num_bytes / 2) as usize;\n\n let mut registers = Vec::with_capacity(num_registers);\n\n for i in 0..num_registers {\n\n let reg_idx = 2 + 2 * i;\n\n let reg = u16::from_be_bytes(data[reg_idx..=(reg_idx+1)].try_into().unwrap());\n\n registers.push(reg);\n\n }\n\n Ok(Self {registers})\n\n }\n\n}\n\n\n\nimpl RspT for Response {\n\n fn get_exc_function_code() -> u8 {\n\n FunctionCode::ExcReadHldReg.try_into().unwrap()\n\n }\n", "file_path": "src/pdu/hex_access/read_hld_reg.rs", "rank": 59, "score": 16.21580796909682 }, { "content": "\n\n let byte_count = data[1] as usize;\n\n if data.len() != byte_count + 2 {\n\n return Err(Error::InvalidDataLength);\n\n }\n\n\n\n let mut result = Vec::with_capacity(byte_count * DSCR_PER_BYTE);\n\n for byte_num in 0..byte_count {\n\n for bit_num in 0..DSCR_PER_BYTE {\n\n result.push(if data[2 + byte_num] & (1 << bit_num) != 0 { true } else { false });\n\n }\n\n }\n\n\n\n Ok(Self {coils: result})\n\n }\n\n}\n\n\n\nimpl RspT for Response {\n\n fn get_exc_function_code() -> u8 {\n\n FunctionCode::ExcReadCoils.try_into().unwrap()\n", "file_path": "src/pdu/bit_access/read_coils.rs", "rank": 60, "score": 16.123913634653906 }, { "content": " /// let mut mb = modbus::tcp::Tcp::new();\n\n /// mb.start_slave(10).unwrap();\n\n /// let (req, stream) = mb.read_req().unwrap();\n\n /// \n\n /// if let modbus::RequestData::ReadCoils(request) = req {\n\n /// let result = mb.write_rsp(stream, modbus::ReadCoilsResponse::new(&[true, false]));\n\n /// }\n\n /// ```\n\n fn write_rsp<Rsp: Response>(&mut self, mut stream: Self::Stream, response: Rsp) -> Result<(), Error> {\n\n self.write_rsp_pdu(&mut stream, &response.encode()?)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::ReadCoilsResponse;\n\n\n\n /*\n\n use crate::ReadCoilsRequest;\n", "file_path": "src/transport/mod.rs", "rank": 61, "score": 16.056648017846054 }, { "content": " /// # use std::net::{IpAddr, Ipv4Addr};\n\n /// #\n\n /// let mut mb = modbus::tcp::Tcp::new();\n\n /// let dst = modbus::tcp::Dst::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 10);\n\n /// let req = modbus::ReadCoilsRequest::new(0x0123, 0x0002);\n\n /// let rsp = mb.write_req_read_rsp(&dst, &req);\n\n /// ```\n\n fn write_req_read_rsp<Req: Request>(&mut self, dst: &Self::Dst, req: &Req) -> Result<Option<Req::Rsp>, Error> {\n\n let req_pdu: Vec<u8> = req.encode()?;\n\n let mut stream = self.write_req_pdu(dst, &req_pdu)?;\n\n\n\n if Self::is_broadcast(dst) {\n\n Ok(None)\n\n } else {\n\n let rsp_pdu = self.read_rsp_pdu(&mut stream, dst)?;\n\n Ok(Some(Req::Rsp::decode_response(&rsp_pdu)?))\n\n }\n\n }\n\n\n\n /// Write a setter request and read a response frame.\n", "file_path": "src/transport/mod.rs", "rank": 62, "score": 15.928155886431142 }, { "content": " /// \n\n /// This function handles unexpected responses\n\n /// \n\n /// # Examples\n\n /// ```no_run\n\n /// # use modbus::Transport;\n\n /// # use std::net::{IpAddr, Ipv4Addr};\n\n /// #\n\n /// let mut mb = modbus::tcp::Tcp::new();\n\n /// let dst = modbus::tcp::Dst::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 10);\n\n /// let req = modbus::WriteSingleCoilRequest::new(0x0123, true);\n\n /// mb.write_req_read_rsp(&dst, &req).unwrap();\n\n /// ```\n\n fn write_setter_req<Req: Setter>(&mut self, dst: &Self::Dst, req: &Req) -> Result<(), Error> \n\n where Req::Rsp: PartialEq \n\n {\n\n let req_pdu: Vec<u8> = req.encode()?;\n\n let mut stream = self.write_req_pdu(dst, &req_pdu)?;\n\n\n\n if Self::is_broadcast(dst) {\n", "file_path": "src/transport/mod.rs", "rank": 63, "score": 15.689699135432477 }, { "content": "\n\n fn is_broadcast(dst: &Self::Dst) -> bool {\n\n *dst == BROADCAST_DST\n\n }\n\n\n\n fn write_req_pdu(&mut self, dst: &Self::Dst, pdu: &[u8]) -> Result<Self::Stream, Error> {\n\n self.write_pdu(*dst, pdu)?;\n\n Ok(())\n\n }\n\n\n\n fn read_rsp_pdu(&mut self, _: &mut Self::Stream, src: &Self::Dst) -> Result<Vec<u8>, Error> {\n\n self.read_pdu(*src, false)\n\n }\n\n\n\n fn read_req_pdu(&mut self) -> Result<(Vec<u8>, Self::Stream), Error> {\n\n if let Role::Slave(unit_id) = self.role {\n\n loop {\n\n let result = self.read_pdu(unit_id, true);\n\n\n\n if let Ok(result) = result {\n", "file_path": "src/transport/rtu/conn.rs", "rank": 64, "score": 15.62158785817148 }, { "content": " pub fn new(coils: &[bool]) -> Self {\n\n Self {coils: coils.to_vec()}\n\n }\n\n\n\n /// Get vector of coils from the given response.\n\n /// \n\n /// # Examples\n\n /// ```\n\n /// let coil_values = [true, false, true, true, false, false, true, false];\n\n /// let response = modbus::ReadCoilsResponse::new(&coil_values);\n\n /// let new_coil_values = response.get_coils();\n\n /// assert_eq!(&coil_values.to_vec(), new_coil_values)\n\n /// ```\n\n pub fn get_coils(&self) -> &Vec<bool> {\n\n &self.coils\n\n }\n\n}\n\n\n\nimpl Function for Response {\n\n fn encode(&self) -> Result<Vec<u8>, Error> {\n", "file_path": "src/pdu/bit_access/read_coils.rs", "rank": 65, "score": 15.448851643068377 }, { "content": " byte |= 1 << bit_num;\n\n }\n\n }\n\n\n\n result.push(byte);\n\n }\n\n\n\n Ok(result)\n\n }\n\n _ => Err(Error::InvalidValue),\n\n }\n\n }\n\n\n\n fn decode(data: &[u8]) -> Result<Self, Error> where Self: Sized {\n\n if data.len() < 3 {\n\n return Err(Error::InvalidDataLength);\n\n }\n\n if data[0] != FunctionCode::ReadCoils as u8 {\n\n return Err(Error::InvalidData);\n\n }\n", "file_path": "src/pdu/bit_access/read_coils.rs", "rank": 66, "score": 15.310116606133292 }, { "content": " result.push(FunctionCode::ReadInReg as u8);\n\n result.push((self.registers.len() * 2) as u8);\n\n for reg in &self.registers {\n\n result.append(&mut reg.to_be_bytes().to_vec());\n\n }\n\n\n\n Ok(result)\n\n }\n\n\n\n fn decode(data: &[u8]) -> Result<Self, Error> where Self: Sized {\n\n if data.len() < 2 {\n\n return Err(Error::InvalidDataLength);\n\n }\n\n if data[0] != FunctionCode::ReadInReg as u8 {\n\n return Err(Error::InvalidData);\n\n }\n\n\n\n let num_bytes = data[1];\n\n if num_bytes % 2 != 0 {\n\n return Err(Error::InvalidData);\n", "file_path": "src/pdu/hex_access/read_in_reg.rs", "rank": 67, "score": 15.239533633488488 }, { "content": " return Err(Error::InvalidDataLength);\n\n }\n\n\n\n let mut result = Self{inputs: Vec::with_capacity(byte_count * DSCR_PER_BYTE)};\n\n for byte_num in 2..2+byte_count {\n\n for bit_num in 0..DSCR_PER_BYTE {\n\n result.inputs.push(if data[byte_num] & (1 << bit_num) != 0 { true } else { false });\n\n }\n\n }\n\n\n\n Ok(result)\n\n }\n\n}\n\n\n\nimpl RspT for Response {\n\n fn get_exc_function_code() -> u8 {\n\n FunctionCode::ExcReadDscrIn.try_into().unwrap()\n\n }\n\n}\n\n\n", "file_path": "src/pdu/bit_access/read_dscr_in.rs", "rank": 68, "score": 15.212792998316107 }, { "content": " result.push(FunctionCode::ReadHldReg as u8);\n\n result.push((self.registers.len() * 2) as u8);\n\n for reg in &self.registers {\n\n result.append(&mut reg.to_be_bytes().to_vec());\n\n }\n\n\n\n Ok(result)\n\n }\n\n\n\n fn decode(data: &[u8]) -> Result<Self, Error> where Self: Sized {\n\n if data.len() < 2 {\n\n return Err(Error::InvalidDataLength);\n\n }\n\n if data[0] != FunctionCode::ReadHldReg as u8 {\n\n return Err(Error::InvalidData);\n\n }\n\n\n\n let num_bytes = data[1];\n\n if num_bytes % 2 != 0 {\n\n return Err(Error::InvalidData);\n", "file_path": "src/pdu/hex_access/read_hld_reg.rs", "rank": 69, "score": 15.0783111812615 }, { "content": " ExceptionCode::IllegalFunction => write!(f, \"[exc] Illegal function\"),\n\n ExceptionCode::IllegalDataAddress => write!(f, \"[exc] Illegal data address\"),\n\n ExceptionCode::IllegalDataValue => write!(f, \"[exc] Illegal data value\"),\n\n ExceptionCode::ServerDeviceFailure => write!(f, \"[exc] Server device failure\"),\n\n ExceptionCode::Acknowledge => write!(f, \"[exc] Acknowledge\"),\n\n ExceptionCode::ServerDeviceBusy => write!(f, \"[exc] Server device busy\"),\n\n ExceptionCode::MemoryParityError => write!(f, \"[exc] Memory parity error\"),\n\n ExceptionCode::GatewayPathUnavailable => write!(f, \"[exc] Gateway path unavailable\"),\n\n ExceptionCode::GatewayTargetDeviceFailedToRespond => write!(f, \"[exc] Gateway target device failed to respond\"),\n\n }\n\n }\n\n}\n\n\n\nimpl TryFrom<u8> for ExceptionCode {\n\n type Error = Error;\n\n\n\n fn try_from(v: u8) -> Result<Self, Error> {\n\n match v {\n\n x if x == ExceptionCode::IllegalFunction as u8 => Ok(ExceptionCode::IllegalFunction),\n\n x if x == ExceptionCode::IllegalDataAddress as u8 => Ok(ExceptionCode::IllegalDataAddress),\n", "file_path": "src/pdu/mod.rs", "rank": 70, "score": 15.043028806901116 }, { "content": " self.last_baud_timestamp = Instant::now();\n\n\n\n Ok(())\n\n }\n\n\n\n fn read_pdu(&mut self, expected_unit_id: u8, infinitely: bool) -> Result<Vec<u8>, Error> {\n\n let mut rsp_frame = Vec::new();\n\n let mut rsp_byte: [u8; 1] = [0];\n\n\n\n loop {\n\n match self.serial.read(&mut rsp_byte) {\n\n Ok(num_bytes) => {\n\n assert_eq!(num_bytes, 1);\n\n rsp_frame.push(rsp_byte[0]);\n\n\n\n self.last_baud_timestamp = Instant::now();\n\n }\n\n Err(err) => {\n\n match err.kind() {\n\n std::io::ErrorKind::TimedOut => {\n", "file_path": "src/transport/rtu/conn.rs", "rank": 71, "score": 14.802205278104106 }, { "content": " const MAX_BYTE_COUNT: usize = MAX_SIZE - 2;\n\n let byte_count = self.coils.len() / DSCR_PER_BYTE + if self.coils.len() % DSCR_PER_BYTE > 0 { 1 } else { 0 };\n\n\n\n match byte_count {\n\n 0 => Err(Error::InvalidValue),\n\n 1..=MAX_BYTE_COUNT => {\n\n let mut result = Vec::new();\n\n result.push(FunctionCode::ReadCoils as u8);\n\n result.push(byte_count as u8);\n\n\n\n for byte_num in 0..byte_count {\n\n let mut byte: u8 = 0;\n\n\n\n for bit_num in 0..DSCR_PER_BYTE {\n\n let coil_id = byte_num * DSCR_PER_BYTE + bit_num;\n\n if coil_id >= self.coils.len() {\n\n break;\n\n }\n\n\n\n if self.coils[coil_id] {\n", "file_path": "src/pdu/bit_access/read_coils.rs", "rank": 72, "score": 14.375934779773154 }, { "content": "\n\n fn start_slave(&mut self, unit_id: u8) -> Result<(), Error> {\n\n self.unit_id = unit_id;\n\n self.listener = Some(TcpListener::bind(SocketAddr::from(([127, 0, 0, 1], TCP_PORT)))?);\n\n Ok(())\n\n }\n\n\n\n fn is_broadcast(dst: &Self::Dst) -> bool {\n\n dst.unit_id == BROADCAST_UNIT_ID\n\n }\n\n\n\n fn write_req_pdu(&mut self, dst: &Self::Dst, pdu: &[u8]) -> Result<Self::Stream, Error> {\n\n let peer_addr = SocketAddr::from((dst.ip_addr, TCP_PORT));\n\n let mut stream = Self::connect(&peer_addr)?;\n\n\n\n Self::write_pdu(&mut stream, pdu, dst.unit_id)?;\n\n Ok(stream)\n\n }\n\n\n\n fn read_rsp_pdu(&mut self, stream: &mut Self::Stream, src: &self::Dst) -> Result<Vec<u8>, Error>\n", "file_path": "src/transport/tcp/conn.rs", "rank": 73, "score": 14.346355015944846 }, { "content": "pub mod rtu;\n\npub mod tcp;\n\n\n\nuse crate::error::Error;\n\nuse crate::pdu::{Request, Response, Setter, RequestData, decode_req};\n\n\n\n/// The trait implemented by Modbus protocol link layers \n", "file_path": "src/transport/mod.rs", "rank": 74, "score": 14.002231317894864 }, { "content": "\n\nimpl Tcp {\n\n /// Create a new instance of the Modbus transport\n\n /// \n\n /// # Examples\n\n /// ```\n\n /// let modbus = modbus::tcp::Tcp::new();\n\n /// ```\n\n pub fn new() -> Self {\n\n Self {listener: None, unit_id: 255}\n\n }\n\n\n\n fn connect(addr: &SocketAddr) -> Result<TcpStream, Error> {\n\n let stream = TcpStream::connect_timeout(addr, Duration::from_secs(1))?;\n\n stream.set_read_timeout(Some(Duration::from_secs(1)))?;\n\n Ok(stream)\n\n }\n\n\n\n fn read_pdu(stream: &mut TcpStream, expected_unit_id: u8) -> Result<Vec<u8>, Error> {\n\n let mut frame_pdu = Vec::new();\n", "file_path": "src/transport/tcp/conn.rs", "rank": 75, "score": 13.922457074313957 }, { "content": " ExcWriteSingleReg = 0x86,\n\n ExcWriteMultiReg = 0x90,\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum ExceptionCode {\n\n IllegalFunction = 0x01,\n\n IllegalDataAddress = 0x02,\n\n IllegalDataValue = 0x03,\n\n ServerDeviceFailure = 0x04,\n\n Acknowledge = 0x05,\n\n ServerDeviceBusy = 0x06,\n\n MemoryParityError = 0x08,\n\n GatewayPathUnavailable = 0x0A,\n\n GatewayTargetDeviceFailedToRespond = 0x0B,\n\n}\n\n\n\nimpl fmt::Display for ExceptionCode {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n", "file_path": "src/pdu/mod.rs", "rank": 76, "score": 13.409868549439828 }, { "content": "extern crate num;\n\n#[macro_use]\n\nextern crate num_derive;\n\n\n\nmod error;\n\nmod pdu;\n\nmod transport;\n\n\n\npub use error::Error;\n\npub use pdu::{Request, Setter};\n\npub use pdu::RequestData;\n\n\n\npub use pdu::bit_access::read_coils::Request as ReadCoilsRequest;\n\npub use pdu::bit_access::read_dscr_in::Request as ReadDscrInRequest;\n\npub use pdu::hex_access::read_hld_reg::Request as ReadHldRegRequest;\n\npub use pdu::hex_access::read_in_reg::Request as ReadInRegRequest;\n\npub use pdu::bit_access::write_single_coil::Message as WriteSingleCoilRequest;\n\npub use pdu::hex_access::write_single_reg::Message as WriteSingleRegRequest;\n\npub use pdu::hex_access::write_multi_reg::Request as WriteMultiRegRequest;\n\n\n", "file_path": "src/lib.rs", "rank": 77, "score": 13.406050094125135 }, { "content": " Message{address, value}\n\n }\n\n\n\n /// Get address of the register from the Write Single Reigster function\n\n /// \n\n /// # Examples\n\n /// ```\n\n /// let address = 0x0abc;\n\n /// let rsp = modbus::WriteSingleRegResponse::new(address, 0x0000);\n\n /// assert_eq!(rsp.get_address(), address);\n\n /// ```\n\n pub fn get_address(&self) -> u16 {\n\n self.address\n\n }\n\n\n\n /// Get value from the Write Single Register function\n\n /// \n\n /// # Examples\n\n /// ```\n\n /// let value = 0x0123;\n", "file_path": "src/pdu/hex_access/write_single_reg.rs", "rank": 78, "score": 13.300684907106191 }, { "content": " const MAX_BYTE_COUNT: usize = MAX_SIZE - 3;\n\n \n\n match byte_count {\n\n 1..=MAX_BYTE_COUNT => {\n\n let mut result = Vec::new();\n\n result.push(FunctionCode::ReadDscrIn as u8);\n\n result.push(byte_count as u8);\n\n\n\n for byte_num in 0..byte_count {\n\n let mut byte: u8 = 0;\n\n for bit_num in 0..DSCR_PER_BYTE {\n\n let i = byte_num * DSCR_PER_BYTE + bit_num;\n\n if i >= self.inputs.len() {\n\n break;\n\n }\n\n\n\n if self.inputs[i] {\n\n byte |= 1 << bit_num;\n\n }\n\n }\n", "file_path": "src/pdu/bit_access/read_dscr_in.rs", "rank": 79, "score": 13.227585888596597 }, { "content": "use crate::Error;\n\nuse crate::pdu::{Function, FunctionCode, Request, Response, Setter};\n\nuse std::convert::{Infallible, TryFrom, TryInto};\n\n\n\n#[derive(Clone, Copy, Debug, FromPrimitive, PartialEq)]\n", "file_path": "src/pdu/bit_access/write_single_coil.rs", "rank": 80, "score": 13.220828218136301 }, { "content": " }\n\n}\n\n\n\nimpl ReqT for Request {\n\n type Rsp = Response;\n\n}\n\n\n\n/// Read Holding Registers function response\n\npub struct Response {\n\n registers: Vec<u16>,\n\n}\n\n\n\nimpl Response {\n\n /// Create a new Read Holding Registers response\n\n /// \n\n /// # Examples\n\n /// ```\n\n /// let registers: [u16; 1] = [0x1023];\n\n /// let rsp = modbus::ReadInRegResponse::new(&registers);\n\n /// ```\n", "file_path": "src/pdu/hex_access/read_in_reg.rs", "rank": 81, "score": 12.366120286575724 }, { "content": "//! Modbus over TCP/IP\n\n \n\nuse crate::error::Error;\n\nuse std::convert::TryInto;\n\nuse std::io::prelude::*;\n\nuse std::net::{IpAddr, SocketAddr, TcpListener, TcpStream};\n\nuse std::time::Duration;\n\nuse super::frame::Frame;\n\nuse super::super::Transport;\n\n\n\nconst TCP_PORT: u16 = 502;\n\nconst BROADCAST_UNIT_ID: u8 = 0;\n\n\n\n/// Structure describing destination node for TCP/IP Modbus functions\n\npub struct Dst {\n\n ip_addr: IpAddr,\n\n unit_id: u8,\n\n}\n\n\n\nimpl Dst {\n", "file_path": "src/transport/tcp/conn.rs", "rank": 82, "score": 12.33595743636704 }, { "content": " }\n\n}\n\n\n\nimpl ReqT for Request {\n\n type Rsp = Response;\n\n}\n\n\n\n/// Read Holding Registers function response\n\npub struct Response {\n\n registers: Vec<u16>,\n\n}\n\n\n\nimpl Response {\n\n /// Create a new Read Holding Registers response\n\n /// \n\n /// # Examples\n\n /// ```\n\n /// let registers: [u16; 1] = [0x1023];\n\n /// let rsp = modbus::ReadHldRegResponse::new(&registers);\n\n /// ```\n", "file_path": "src/pdu/hex_access/read_hld_reg.rs", "rank": 83, "score": 12.258394096175847 }, { "content": " }\n\n Err(err) => panic!(\"Unexpected parsing error: {:?}\", err),\n\n }\n\n }\n\n }\n\n\n\n fn write_pdu(stream: &mut TcpStream, pdu: &[u8], unit_id: u8) -> Result<(), Error> {\n\n let frame = Frame::new(unit_id, pdu);\n\n stream.write_all(&frame.encode()?)?;\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Transport for Tcp {\n\n type Dst = Dst;\n\n type Stream = TcpStream;\n\n\n\n fn start_master(&mut self) -> Result<(), Error> {\n\n Ok(())\n\n }\n", "file_path": "src/transport/tcp/conn.rs", "rank": 84, "score": 12.015184722615393 }, { "content": "\n\nimpl Setter for Message {\n\n fn create_expected_response(&self) -> Self::Rsp {\n\n self.clone()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_encode_request() {\n\n let req = Message::new(0xdead, 0xfade);\n\n let pdu = req.encode().unwrap();\n\n let expected_pdu = vec![0x06, 0xde, 0xad, 0xfa, 0xde];\n\n\n\n assert_eq!(pdu, expected_pdu);\n\n }\n\n\n", "file_path": "src/pdu/hex_access/write_single_reg.rs", "rank": 85, "score": 11.593177018969666 }, { "content": "pub mod bit_access;\n\npub mod hex_access;\n\n\n\nuse crate::Error;\n\nuse num_enum::IntoPrimitive;\n\nuse std::convert::TryFrom;\n\nuse std::fmt;\n\n\n\nconst MAX_SIZE: usize = 253;\n\n\n", "file_path": "src/pdu/mod.rs", "rank": 86, "score": 11.108987626601014 }, { "content": " Self {inputs: inputs.to_vec()}\n\n }\n\n\n\n /// Get list of inputs from the Read Discrete Inputs response\n\n /// \n\n /// # Examples\n\n /// ```\n\n /// let inputs = vec![true, true, false, false];\n\n /// let response = modbus::ReadDscrInResponse::new(&inputs);\n\n /// assert_eq!(response.get_inputs(), &inputs);\n\n /// ```\n\n pub fn get_inputs(&self) -> &Vec<bool> {\n\n &self.inputs\n\n }\n\n}\n\n\n\nimpl Function for Response {\n\n fn encode(&self) -> Result<Vec<u8>, Error> {\n\n let in_cnt = self.inputs.len();\n\n let byte_count = in_cnt / DSCR_PER_BYTE + if in_cnt % DSCR_PER_BYTE != 0 { 1 } else { 0 };\n", "file_path": "src/pdu/bit_access/read_dscr_in.rs", "rank": 87, "score": 10.986207043129644 }, { "content": " return Err(Error::InvalidData);\n\n }\n\n\n\n let expected_len = (u16::from_be_bytes(data[4..=5].try_into().unwrap()) + 6) as usize;\n\n if len < expected_len {\n\n return Err(Error::TooShortData);\n\n }\n\n if len > expected_len {\n\n return Err(Error::InvalidDataLength);\n\n }\n\n\n\n Ok(Self{transaction_id: u16::from_be_bytes(data[0..=1].try_into().unwrap()), \n\n unit_id: data[6],\n\n pdu: &data[7..]})\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "src/transport/tcp/frame.rs", "rank": 88, "score": 10.824648342898994 }, { "content": "}\n\n\n\nimpl ReqT for Request {\n\n type Rsp = Response;\n\n}\n\n\n\n/// Read Discrete Inputs function response\n\n#[derive(Debug, PartialEq)]\n\npub struct Response {\n\n inputs: Vec<bool>,\n\n}\n\n\n\nimpl Response {\n\n /// Create a new Read Discrete Inputs response\n\n /// \n\n /// # Examples\n\n /// ```\n\n /// let response = modbus::ReadDscrInResponse::new(&[false, true, false]);\n\n /// ```\n\n pub fn new(inputs: &[bool]) -> Self {\n", "file_path": "src/pdu/bit_access/read_dscr_in.rs", "rank": 89, "score": 10.735202138712765 }, { "content": " /// use modbus::Transport;\n\n /// \n\n /// let mut mb = modbus::tcp::Tcp::new();\n\n /// mb.start_slave(10).unwrap();\n\n /// let (req, stream) = mb.read_req().unwrap();\n\n /// ```\n\n fn read_req(&mut self) -> Result<(RequestData, Self::Stream), Error> {\n\n let (req_pdu, stream) = self.read_req_pdu()?;\n\n let req_data = decode_req(&req_pdu)?;\n\n Ok((req_data, stream))\n\n }\n\n\n\n /// Write a response frame.\n\n /// \n\n /// Call to this method shall follow [Transport::read_req] in the Modbus slave mode.\n\n /// \n\n /// # Examples\n\n /// ```no_run\n\n /// use modbus::Transport;\n\n /// \n", "file_path": "src/transport/mod.rs", "rank": 90, "score": 10.60679879298003 }, { "content": " if let Err(error) = res {\n\n panic!(\"Error during writing response: {:?}\", error);\n\n }\n\n println!(\"{:?}\", req);\n\n assert_eq!(false, true);\n\n }\n\n */\n\n\n\n #[test]\n\n fn test_reading_coils() {\n\n let exc_fn_code = ReadCoilsResponse::get_exc_function_code();\n\n assert_eq!(0x81, exc_fn_code);\n\n\n\n let err = ReadCoilsResponse::decode_response(&[0x81, 0x01]);\n\n if let Err(error) = err {\n\n match error {\n\n Error::ExceptionResponse(_) => {}\n\n _ => panic!(\"Invalid error reported\"),\n\n }\n\n }\n\n else {\n\n panic!(\"Expected error, but got Ok result\");\n\n }\n\n }\n\n}\n", "file_path": "src/transport/mod.rs", "rank": 91, "score": 10.531698191694577 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn encode_request() {\n\n let req = Request::new(0x0102, 0x0001);\n\n let pdu = req.encode().unwrap();\n\n assert_eq!(pdu, vec![0x04 as u8, 0x01, 0x02, 0x00, 0x01]);\n\n }\n\n\n\n #[test]\n\n fn decode_request() {\n\n let pdu: [u8; 5] = [0x04, 0xab, 0xcd, 0x00, 0x18];\n\n let req = Request::decode(&pdu).unwrap();\n\n assert_eq!(req.get_address(), 0xabcd);\n\n assert_eq!(req.get_quantity(), 0x0018);\n\n }\n", "file_path": "src/pdu/hex_access/read_in_reg.rs", "rank": 92, "score": 10.52041058613942 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn encode_request() {\n\n let req = Request::new(0x0102, 0x0001);\n\n let pdu = req.encode().unwrap();\n\n assert_eq!(pdu, vec![0x03 as u8, 0x01, 0x02, 0x00, 0x01]);\n\n }\n\n\n\n #[test]\n\n fn decode_request() {\n\n let pdu: [u8; 5] = [0x03, 0xab, 0xcd, 0x00, 0x18];\n\n let req = Request::decode(&pdu).unwrap();\n\n assert_eq!(req.get_address(), 0xabcd);\n\n assert_eq!(req.get_quantity(), 0x0018);\n\n }\n", "file_path": "src/pdu/hex_access/read_hld_reg.rs", "rank": 93, "score": 10.52041058613942 }, { "content": "\n\n #[test]\n\n fn test_decode_read_coils_request() {\n\n let pdu = [0x01, 0x12, 0x34, 0xab, 0xcd];\n\n let result = Request::decode(&pdu).unwrap();\n\n assert_eq!(result.address, 0x1234);\n\n assert_eq!(result.quantity, 0xabcd);\n\n }\n\n\n\n #[test]\n\n fn test_decode_read_coils_response() {\n\n let pdu = [0x01, 0x03, 0xCD, 0x6B, 0x05];\n\n let result = Response::decode(&pdu).unwrap();\n\n for (i, expected_value) in [true, false, true, true, false, false, true, true,\n\n true, true, false, true, false, true, true, false,\n\n true, false, true].iter().enumerate() {\n\n assert_eq!(result.coils[i], *expected_value);\n\n }\n\n }\n\n}\n", "file_path": "src/pdu/bit_access/read_coils.rs", "rank": 94, "score": 10.232323967665824 }, { "content": " x if x == ExceptionCode::IllegalDataValue as u8 => Ok(ExceptionCode::IllegalDataValue),\n\n x if x == ExceptionCode::ServerDeviceFailure as u8 => Ok(ExceptionCode::ServerDeviceFailure),\n\n x if x == ExceptionCode::Acknowledge as u8 => Ok(ExceptionCode::Acknowledge),\n\n x if x == ExceptionCode::ServerDeviceBusy as u8 => Ok(ExceptionCode::ServerDeviceBusy),\n\n x if x == ExceptionCode::MemoryParityError as u8 => Ok(ExceptionCode::MemoryParityError),\n\n x if x == ExceptionCode::GatewayPathUnavailable as u8 => Ok(ExceptionCode::GatewayPathUnavailable),\n\n x if x == ExceptionCode::GatewayTargetDeviceFailedToRespond as u8 => Ok(ExceptionCode::GatewayTargetDeviceFailedToRespond),\n\n _ => Err(Error::InvalidData),\n\n }\n\n }\n\n}\n\n\n\n/// Enumeration of Modbus request functions.\n\n/// \n\n/// This enumeration is used to report received request in the Modbus slave mode.\n\n#[derive(Debug)]\n\npub enum RequestData {\n\n ReadCoils(bit_access::read_coils::Request),\n\n ReadDscrIn(bit_access::read_dscr_in::Request),\n\n ReadHldReg(hex_access::read_hld_reg::Request),\n\n ReadInReg(hex_access::read_in_reg::Request),\n\n WriteSingleCoil(bit_access::write_single_coil::Message),\n\n WriteSingleReg(hex_access::write_single_reg::Message),\n\n WriteMultiReg(hex_access::write_multi_reg::Request),\n\n}\n\n\n", "file_path": "src/pdu/mod.rs", "rank": 95, "score": 10.175888767843729 }, { "content": "use crate::error::Error;\n\nuse std::convert::TryInto;\n\nuse std::sync::atomic::{AtomicU16, Ordering};\n\n\n\nconst MODBUS_ID: u16 = 0;\n\nstatic TRANSACTION_ID: AtomicU16 = AtomicU16::new(0);\n\n\n", "file_path": "src/transport/tcp/frame.rs", "rank": 96, "score": 9.573402694062867 }, { "content": " Ok(())\n\n } else {\n\n let rsp_pdu = self.read_rsp_pdu(&mut stream, dst)?;\n\n let rsp = Req::Rsp::decode_response(&rsp_pdu)?;\n\n let exp_rsp = req.create_expected_response();\n\n\n\n if exp_rsp == rsp {\n\n Ok(())\n\n } else {\n\n Err(Error::InvalidData)\n\n }\n\n }\n\n }\n\n\n\n /// Read a request frame.\n\n /// \n\n /// This method with [Transport::write_rsp] are the main functionality in the Modbus slave mode.\n\n /// \n\n /// # Examples\n\n /// ```no_run\n", "file_path": "src/transport/mod.rs", "rank": 97, "score": 9.047009322262241 }, { "content": " }\n\n\n\n let expected_crc = crc16::State::<crc16::MODBUS>::calculate(&data[0..len-2]);\n\n let crc = u16::from_le_bytes(data[len-2..len].try_into().unwrap());\n\n\n\n if expected_crc != crc {\n\n return Err(Error::InvalidData);\n\n }\n\n\n\n Ok(Self{address: data[0], pdu: &data[1..len-2]})\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_encode() {\n\n let frame = Frame::new(2, &[0x07]).encode().unwrap();\n", "file_path": "src/transport/rtu/frame.rs", "rank": 98, "score": 8.86171229110593 }, { "content": " }\n\n}\n\n\n\nimpl ReqT for Request {\n\n type Rsp = Response;\n\n}\n\n\n\n/// Read Coils function response\n\n#[derive(Debug, PartialEq)]\n\npub struct Response {\n\n coils: Vec<bool>,\n\n}\n\n\n\nimpl Response {\n\n /// Create a new Read Coils response.\n\n /// \n\n /// # Examples\n\n /// ```\n\n /// let response = modbus::ReadCoilsResponse::new(&[true, false]);\n\n /// ```\n", "file_path": "src/pdu/bit_access/read_coils.rs", "rank": 99, "score": 8.84441834200329 } ]
Rust
projects/rivium/src/lib.rs
chykon/rivium-mono
f7d45afbf94441a5f925ac8e3a2087f18f02e20c
/* const MEMORY_SIZE: usize = 1024 * 1024 * 512; const REGISTERS_COUNT: usize = 32 + 1; const CODE_RANGE_BEGIN: usize = 0; const CODE_RANGE_END: usize = CODE_RANGE_BEGIN + (1024 * 1024 * 128) - 1; static mut MEMORY: [u8; MEMORY_SIZE] = [0; MEMORY_SIZE]; static mut REGISTERS: [u32; REGISTERS_COUNT] = [0; REGISTERS_COUNT]; static mut ERROR_MESSAGE: &str = ""; const X0: usize = 0; const X1: usize = 1; const X2: usize = 2; const X3: usize = 3; const X4: usize = 4; const X5: usize = 5; const X6: usize = 6; const X7: usize = 7; const X8: usize = 8; const X9: usize = 9; const X10: usize = 10; const X11: usize = 11; const X12: usize = 12; const X13: usize = 13; const X14: usize = 14; const X15: usize = 15; const X16: usize = 16; const X17: usize = 17; const X18: usize = 18; const X19: usize = 19; const X20: usize = 20; const X21: usize = 21; const X22: usize = 22; const X23: usize = 23; const X24: usize = 24; const X25: usize = 25; const X26: usize = 26; const X27: usize = 27; const X28: usize = 28; const X29: usize = 29; const X30: usize = 30; const X31: usize = 31; const PC: usize = 32; const INST_20: u32 = 0b00000000000100000000000000000000; const INST_24_21: u32 = 0b00000001111000000000000000000000; const INST_30_25: u32 = 0b01111110000000000000000000000000; const INST_31: u32 = 0b10000000000000000000000000000000; const INST_7: u32 = 0b00000000000000000000000010000000; const INST_11_8: u32 = 0b00000000000000000000111100000000; const INST_19_12: u32 = 0b00000000000011111111000000000000; const INST_30_20: u32 = 0b01111111111100000000000000000000; const LOAD: u32 = 0b0000011; const LOAD_FP: u32 = 0b0000111; const CUSTOM_0: u32 = 0b0001011; const MISC_MEM: u32 = 0b0001111; const OP_IMM: u32 = 0b0010011; const AUIPC: u32 = 0b0010111; const OP_IMM_32: u32 = 0b0011011; const STORE: u32 = 0b0100011; const STORE_FP: u32 = 0b0100111; const CUSTOM_1: u32 = 0b0101011; const AMO: u32 = 0b0101111; const OP: u32 = 0b0110011; const LUI: u32 = 0b0110111; const OP_32: u32 = 0b0111011; const MADD: u32 = 0b1000011; const MSUB: u32 = 0b1000111; const NMSUB: u32 = 0b1001011; const NMADD: u32 = 0b1001111; const OP_FP: u32 = 0b1010011; const RESERVED_0: u32 = 0b1010111; const CUSTOM_2: u32 = 0b1011011; const BRANCH: u32 = 0b1100011; const JALR: u32 = 0b1100111; const RESERVED_1: u32 = 0b1101011; const JAL: u32 = 0b1101111; const SYSTEM: u32 = 0b1110011; const RESERVED_2: u32 = 0b1110111; const CUSTOM_3: u32 = 0b1111011; const BEQ: u32 = 0b000; const BNE: u32 = 0b001; const BLT: u32 = 0b100; const BGE: u32 = 0b101; const BLTU: u32 = 0b110; const BGEU: u32 = 0b111; const LB: u32 = 0b000; const LH: u32 = 0b001; const LW: u32 = 0b010; const LBU: u32 = 0b100; const LHU: u32 = 0b101; const SB: u32 = 0b000; const SH: u32 = 0b001; const SW: u32 = 0b010; const ADDI: u32 = 0b000; const SLTI: u32 = 0b010; const SLTIU: u32 = 0b011; const XORI: u32 = 0b100; const ORI: u32 = 0b110; const ANDI: u32 = 0b111; const SLLI: u32 = 0b0000000001; const SRLI: u32 = 0b0000000101; const SRAI: u32 = 0b0100000101; const ADD: u32 = 0b0100000000; const SUB: u32 = 0b0000000000; const SLL: u32 = 0b0000000001; const SLT: u32 = 0b0000000010; const SLTU: u32 = 0b0000000011; const XOR: u32 = 0b0000000100; const SRL: u32 = 0b0000000101; const SRA: u32 = 0b0100000101; const OR: u32 = 0b0000000110; const AND: u32 = 0b0000000111; const FENCE: u32 = 0b000; const ECALL: u32 = 0b0000000000000000000000000; const EBREAK: u32 = 0b0000000000010000000000000; */ #[wasm_bindgen::prelude::wasm_bindgen] pub fn set_panic_hook() { std::panic::set_hook(Box::new(console_error_panic_hook::hook)) } #[wasm_bindgen::prelude::wasm_bindgen] pub fn jsonify_intermediate(string: &str) -> String { let mut map = serde_json::Map::new(); let data: serde_json::Value = serde_json::from_str(string).unwrap(); let array = data.as_array().unwrap(); for (i, element) in array.iter().enumerate() { let mut strings: Vec<String> = Vec::new(); let element_array = element.as_array().unwrap(); for elem in element_array { let elem_str = elem.as_str().unwrap().to_lowercase(); strings.push(elem_str); } map.insert(i.to_string(), serde_json::json!(strings)); } serde_json::to_string(&map).unwrap() } #[wasm_bindgen::prelude::wasm_bindgen] pub fn intermediate_to_text(string: &str) -> String { let data: serde_json::Value = serde_json::from_str(string).unwrap(); let array = data.as_object().unwrap(); let mut output_string = String::new(); for element in array { let element_array = element.1.as_array().unwrap(); let elem = &element_array[0]; let elem_str = elem.as_str().unwrap(); if element_array.len() == 1 { output_string.push_str(elem_str); output_string.push('\n'); continue; } else if element_array.len() == 3 { let elem_str_2 = element_array[1].as_str().unwrap(); let elem_str_3 = element_array[2].as_str().unwrap(); let mut str_2 = String::new(); str_2.push_str(" x"); str_2.push_str(elem_str_2); str_2.push_str(", "); let mut str_3 = String::new(); str_3.push_str(elem_str_3); output_string.push_str(elem_str); output_string.push_str(&str_2); output_string.push_str(&str_3); output_string.push('\n'); continue; } match elem_str { "add" | "sub" | "sll" | "slt" | "sltu" | "xor" | "srl" | "sra" | "or" | "and" => { let elem_str_2 = element_array[1].as_str().unwrap(); let elem_str_3 = element_array[2].as_str().unwrap(); let elem_str_4 = element_array[3].as_str().unwrap(); let mut str_2 = String::new(); str_2.push_str(" x"); str_2.push_str(elem_str_2); str_2.push_str(", "); let mut str_3 = String::new(); str_3.push('x'); str_3.push_str(elem_str_3); str_3.push_str(", "); let mut str_4 = String::new(); str_4.push('x'); str_4.push_str(elem_str_4); str_4.push_str(", "); output_string.push_str(elem_str); output_string.push_str(&str_2); output_string.push_str(&str_3); output_string.push_str(&str_4); } _ => { let elem_str_2 = element_array[1].as_str().unwrap(); let elem_str_3 = element_array[2].as_str().unwrap(); let elem_str_4 = element_array[3].as_str().unwrap(); let mut str_2 = String::new(); str_2.push_str(" x"); str_2.push_str(elem_str_2); str_2.push_str(", "); let mut str_3 = String::new(); str_3.push('x'); str_3.push_str(elem_str_3); str_3.push_str(", "); let mut str_4 = String::new(); str_4.push_str(elem_str_4); output_string.push_str(elem_str); output_string.push_str(&str_2); output_string.push_str(&str_3); output_string.push_str(&str_4); } } output_string.push('\n'); } output_string } #[wasm_bindgen::prelude::wasm_bindgen] pub fn analyze(string: &str) -> bool { let data: serde_json::Value = serde_json::from_str(string).unwrap(); let obj = data.as_object().unwrap(); let arr = obj.get("0").unwrap().as_array().unwrap(); let mut result = true; match arr[0].as_str().unwrap() { "lui" | "auipc" => { let imm = arr[2].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b111111111111) != 0b000000000000 { result = false; } } "jal" => { let imm = arr[2].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b11111111111000000000000000000001) != 0b00000000000000000000000000000000 { result = false; } } "jalr" | "lb" | "lh" | "lw" | "lbu" | "lhu" | "sb" | "sh" | "sw" | "addi" | "slti" | "sltiu" | "xori" | "ori" | "andi" => { let imm = arr[3].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b11111111111111111111000000000000) != 0b00000000000000000000000000000000 { result = false; } } "beq" | "bne" | "blt" | "bge" | "bltu" | "bgeu" => { let imm = arr[3].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b11111111111111111110000000000001) != 0b00000000000000000000000000000000 { result = false; } } "slli" | "srli" => { let imm = arr[3].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b11111111111000000000000000000001) != 0b00000000000000000000000000000000 { result = false; } } "srai" => { let imm = arr[3].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b11111111111000000000000000000001) != 0b01000000000000000000000000000000 { result = false; } } _ => {} } result } /* #[wasm_bindgen::prelude::wasm_bindgen] pub fn load_machine_code(byte: u8, address: usize) -> bool { if address > CODE_RANGE_END { false } else { unsafe { MEMORY[address] = byte } true } } #[wasm_bindgen::prelude::wasm_bindgen] pub fn set_to_register(register: usize, value: u32) { if register == X0 { unsafe { REGISTERS[register] = 0 } } else { unsafe { REGISTERS[register] = value } } } #[wasm_bindgen::prelude::wasm_bindgen] pub fn get_from_register(register: usize) -> u32 { if register == X0 { unsafe { REGISTERS[register] = 0; REGISTERS[register] } } else { unsafe { REGISTERS[register] } } } fn create_instruction(byte_1: u8, byte_2: u8, byte_3: u8, byte_4: u8) -> u32 { let mut instruction = 0; instruction |= byte_1 as u32; instruction |= (byte_2 as u32) << 8; instruction |= (byte_3 as u32) << 16; instruction |= (byte_4 as u32) << 24; instruction } fn iimm_to_value(instruction: u32) -> u32 { let val_1 = (instruction & INST_20) >> 20; let val_2 = (instruction & INST_24_21) >> 21; let val_3 = (instruction & INST_30_25) >> 25; let val_4 = (instruction & INST_31) >> 31; (val_4 << 31) | (val_3 << 5) | (val_2 << 1) | val_1 } fn value_to_iimm(value: u32) -> u32 { let imm_1 = (value & INST_20) >> 20; let imm_2 = (value & INST_24_21) >> 21; let imm_3 = (value & INST_30_25) >> 25; let imm_4 = (value & INST_31) >> 31; (imm_4 << 31) | (imm_3 << 5) | (imm_2 << 1) | imm_1 } // simm // bimm // uimm // jimm fn get_opcode(instruction: u32) -> u32 { instruction & 0b1111111 } fn get_rd(instruction: u32) -> u32 { (instruction >> 7) & 0b11111 } fn get_funct3(instruction: u32) -> u32 { (instruction >> 12) & 0b111 } fn get_rs1(instruction: u32) -> u32 { (instruction >> 15) & 0b11111 } // Disassembler fn decode_instruction(instruction: u32) -> serde_json::Value { if (instruction & 0b11) != 0b11 { return serde_json::json!("DASM:DECODE_0B11_END") } else if (instruction & 0b11100) == 0b11100 { return serde_json::json!("DASM:DECODE_0B11100_END") } let mut operation = ""; let mut operand_1 = ""; let mut operand_2 = ""; let mut operand_3 = ""; let mut opcode = get_opcode(instruction); if opcode == OP_IMM { let rd = get_rd(instruction); let funct3 = get_funct3(instruction); let rs1 = get_rs1(instruction); let iimm = iimm_to_value(instruction); match funct3 { ADDI => operation = "ADDI", _ => return serde_json::json!("DASM:DECODE_0B11100_END") // a }; } serde_json::json!({ operation: [ operand_1, operand_2, operand_3 ] }) } // Emulator #[wasm_bindgen::prelude::wasm_bindgen] pub fn execute_instruction() -> bool { unsafe { ERROR_MESSAGE = ""; } // Fetch let current_pc = get_from_register(PC) as usize; if (current_pc + 3) > CODE_RANGE_END { unsafe { ERROR_MESSAGE = "VM:CODE_RANGE_END"; } return false } let instruction_byte_1; let instruction_byte_2; let instruction_byte_3; let instruction_byte_4; unsafe { instruction_byte_1 = MEMORY[current_pc]; instruction_byte_2 = MEMORY[current_pc + 1]; instruction_byte_3 = MEMORY[current_pc + 2]; instruction_byte_4 = MEMORY[current_pc + 3]; } let instruction = create_instruction( instruction_byte_1, instruction_byte_2, instruction_byte_3, instruction_byte_4 ); let next_pc = (current_pc as u32) + 4; // Decode /* if (instruction & 0b11) != 0b11 { unsafe { ERROR_MESSAGE = "VM:DECODE_0B11_END"; } return false } else if (instruction & 0b11100) == 0b11100 { unsafe { ERROR_MESSAGE = "VM:DECODE_0B11100_END"; } return false } */ // move to dis ams //???? // fail -> error // execute // debug signal -> do // check pc-register increment if branch // ... set_to_register(PC, next_pc); true } */
/* const MEMORY_SIZE: usize = 1024 * 1024 * 512; const REGISTERS_COUNT: usize = 32 + 1; const CODE_RANGE_BEGIN: usize = 0; const CODE_RANGE_END: usize = CODE_RANGE_BEGIN + (1024 * 1024 * 128) - 1; static mut MEMORY: [u8; MEMORY_SIZE] = [0; MEMORY_SIZE]; static mut REGISTERS: [u32; REGISTERS_COUNT] = [0; REGISTERS_COUNT]; static mut ERROR_MESSAGE: &str = ""; const X0: usize = 0; const X1: usize = 1; const X2: usize = 2; const X3: usize = 3; const X4: usize = 4; const X5: usize = 5; const X6: usize = 6; const X7: usize = 7; const X8: usize = 8; const X9: usize = 9; const X10: usize = 10; const X11: usize = 11; const X12: usize = 12; const X13: usize = 13; const X14: usize = 14; const X15: usize = 15; const X16: usize = 16; const X17: usize = 17; const X18: usize = 18; const X19: usize = 19; const X20: usize = 20; const X21: usize = 21; const X22: usize = 22; const X23: usize = 23; const X24: usize = 24; const X25: usize = 25; const X26: usize = 26; const X27: usize = 27; const X28: usize = 28; const X29: usize = 29; const X30: usize = 30; const X31: usize = 31; const PC: usize = 32; const INST_20: u32 = 0b00000000000100000000000000000000; const INST_24_21: u32 = 0b00000001111000000000000000000000; const INST_30_25: u32 = 0b01111110000000000000000000000000; const INST_31: u32 = 0b10000000000000000000000000000000; const INST_7: u32 = 0b00000000000000000000000010000000; const INST_11_8: u32 = 0b00000000000000000000111100000000; const INST_19_12: u32 = 0b00000000000011111111000000000000; const INST_30_20: u32 = 0b01111111111100000000000000000000; const LOAD: u32 = 0b0000011; const LOAD_FP: u32 = 0b0000111; const CUSTOM_0: u32 = 0b0001011; const MISC_MEM: u32 = 0b0001111; const OP_IMM: u32 = 0b0010011; const AUIPC: u32 = 0b0010111; const OP_IMM_32: u32 = 0b0011011; const STORE: u32 = 0b0100011; const STORE_FP: u32 = 0b0100111; const CUSTOM_1: u32 = 0b0101011; const AMO: u32 = 0b0101111; const OP: u32 = 0b0110011; const LUI: u32 = 0b0110111; const OP_32: u32 = 0b0111011; const MADD: u32 = 0b1000011; const MSUB: u32 = 0b1000111; const NMSUB: u32 = 0b1001011; const NMADD: u32 = 0b1001111; const OP_FP: u32 = 0b1010011; const RESERVED_0: u32 = 0b1010111; const CUSTOM_2: u32 = 0b1011011; const BRANCH: u32 = 0b1100011; const JALR: u32 = 0b1100111; const RESERVED_1: u32 = 0b1101011; const JAL: u32 = 0b1101111; const SYSTEM: u32 = 0b1110011; const RESERVED_2: u32 = 0b1110111; const CUSTOM_3: u32 = 0b1111011; const BEQ: u32 = 0b000; const BNE: u32 = 0b001; const BLT: u32 = 0b100; const BGE: u32 = 0b101; const BLTU: u32 = 0b110; const BGEU: u32 = 0b111; const LB: u32 = 0b000; const LH: u32 = 0b001; const LW: u32 = 0b010; const LBU: u32 = 0b100; const LHU: u32 = 0b101; const SB: u32 = 0b000; const SH: u32 = 0b001; const SW: u32 = 0b010; const ADDI: u32 = 0b000; const SLTI: u32 = 0b010; const SLTIU: u32 = 0b011; const XORI: u32 = 0b100; const ORI: u32 = 0b110; const ANDI: u32 = 0b111; const SLLI: u32 = 0b0000000001; const SRLI: u32 = 0b0000000101; const SRAI: u32 = 0b0100000101; const ADD: u32 = 0b0100000000; const SUB: u32 = 0b0000000000; const SLL: u32 = 0b0000000001; const SLT: u32 = 0b0000000010; const SLTU: u32 = 0b0000000011; const XOR: u32 = 0b0000000100; const SRL: u32 = 0b0000000101; const SRA: u32 = 0b0100000101; const OR: u32 = 0b0000000110; const AND: u32 = 0b0000000111; const FENCE: u32 = 0b000; const ECALL: u32 = 0b0000000000000000000000000; const EBREAK: u32 = 0b0000000000010000000000000; */ #[wasm_bindgen::prelude::wasm_bindgen] pub fn set_panic_hook() { std::panic::set_hook(Box::new(console_error_panic_hook::hook)) } #[wasm_bindgen::prelude::wasm_bindgen] pub fn jsonify_intermediate(string: &str) -> String { let mut map = serde_json::Map::new(); let data: serde_json::Value = serde_json::from_str(string).unwrap(); let array = data.as_array().unwrap(); for (i, element) in array.iter().enumerate() { let mut strings: Vec<String> = Vec::new(); let element_array = element.as_array().unwrap(); for elem in element_array { let elem_str = elem.as_str().unwrap().to_lowercase(); strings.push(elem_str); } map.insert(i.to_string(), serde_json::json!(strings)); } serde_json::to_string(&map).unwrap() } #[wasm_bindgen::prelude::wasm_bindgen] pub fn intermediate_to_text(string: &str) -> String { let data: serde_json::Value = serde_json::from_str(string).unwrap(); let array = data.as_object().unwrap(); let mut output_string = String::new(); for element in array { let element_array = element.1.as_array().unwrap(); let elem = &element_array[0]; let elem_str = elem.as_str().unwrap(); if element_array.len() == 1 { output_string.push_str(elem_str); output_string.push('\n'); continue; } else if element_array.len() == 3 { let elem_str_2 = element_array[1].as_str().unwrap(); let elem_str_3 = element_array[2].as_str().unwrap(); let mut str_2 = String::new(); str_2.push_str(" x"); str_2.push_str(elem_str_2); str_2.push_str(", "); let mut str_3 = String::new(); str_3.push_str(elem_str_3); output_string.push_str(elem_str); output_string.push_str(&str_2); output_string.push_str(&str_3); output_string.push('\n'); continue; } match elem_str { "add" | "sub" | "sll" | "slt" | "sltu" | "xor" | "srl" | "sra" | "or" | "and" => { let elem_str_2 = element_array[1].as_str().unwrap(); let elem_str_3 = element_array[2].as_str().unwrap(); let elem_str_4 = element_array[3].as_str().unwrap(); let mut str_2 = String::new(); str
nstruction & 0b11100) == 0b11100 { unsafe { ERROR_MESSAGE = "VM:DECODE_0B11100_END"; } return false } */ // move to dis ams //???? // fail -> error // execute // debug signal -> do // check pc-register increment if branch // ... set_to_register(PC, next_pc); true } */
_2.push_str(" x"); str_2.push_str(elem_str_2); str_2.push_str(", "); let mut str_3 = String::new(); str_3.push('x'); str_3.push_str(elem_str_3); str_3.push_str(", "); let mut str_4 = String::new(); str_4.push('x'); str_4.push_str(elem_str_4); str_4.push_str(", "); output_string.push_str(elem_str); output_string.push_str(&str_2); output_string.push_str(&str_3); output_string.push_str(&str_4); } _ => { let elem_str_2 = element_array[1].as_str().unwrap(); let elem_str_3 = element_array[2].as_str().unwrap(); let elem_str_4 = element_array[3].as_str().unwrap(); let mut str_2 = String::new(); str_2.push_str(" x"); str_2.push_str(elem_str_2); str_2.push_str(", "); let mut str_3 = String::new(); str_3.push('x'); str_3.push_str(elem_str_3); str_3.push_str(", "); let mut str_4 = String::new(); str_4.push_str(elem_str_4); output_string.push_str(elem_str); output_string.push_str(&str_2); output_string.push_str(&str_3); output_string.push_str(&str_4); } } output_string.push('\n'); } output_string } #[wasm_bindgen::prelude::wasm_bindgen] pub fn analyze(string: &str) -> bool { let data: serde_json::Value = serde_json::from_str(string).unwrap(); let obj = data.as_object().unwrap(); let arr = obj.get("0").unwrap().as_array().unwrap(); let mut result = true; match arr[0].as_str().unwrap() { "lui" | "auipc" => { let imm = arr[2].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b111111111111) != 0b000000000000 { result = false; } } "jal" => { let imm = arr[2].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b11111111111000000000000000000001) != 0b00000000000000000000000000000000 { result = false; } } "jalr" | "lb" | "lh" | "lw" | "lbu" | "lhu" | "sb" | "sh" | "sw" | "addi" | "slti" | "sltiu" | "xori" | "ori" | "andi" => { let imm = arr[3].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b11111111111111111111000000000000) != 0b00000000000000000000000000000000 { result = false; } } "beq" | "bne" | "blt" | "bge" | "bltu" | "bgeu" => { let imm = arr[3].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b11111111111111111110000000000001) != 0b00000000000000000000000000000000 { result = false; } } "slli" | "srli" => { let imm = arr[3].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b11111111111000000000000000000001) != 0b00000000000000000000000000000000 { result = false; } } "srai" => { let imm = arr[3].as_str().unwrap().parse::<i64>().unwrap(); if (imm & 0b11111111111000000000000000000001) != 0b01000000000000000000000000000000 { result = false; } } _ => {} } result } /* #[wasm_bindgen::prelude::wasm_bindgen] pub fn load_machine_code(byte: u8, address: usize) -> bool { if address > CODE_RANGE_END { false } else { unsafe { MEMORY[address] = byte } true } } #[wasm_bindgen::prelude::wasm_bindgen] pub fn set_to_register(register: usize, value: u32) { if register == X0 { unsafe { REGISTERS[register] = 0 } } else { unsafe { REGISTERS[register] = value } } } #[wasm_bindgen::prelude::wasm_bindgen] pub fn get_from_register(register: usize) -> u32 { if register == X0 { unsafe { REGISTERS[register] = 0; REGISTERS[register] } } else { unsafe { REGISTERS[register] } } } fn create_instruction(byte_1: u8, byte_2: u8, byte_3: u8, byte_4: u8) -> u32 { let mut instruction = 0; instruction |= byte_1 as u32; instruction |= (byte_2 as u32) << 8; instruction |= (byte_3 as u32) << 16; instruction |= (byte_4 as u32) << 24; instruction } fn iimm_to_value(instruction: u32) -> u32 { let val_1 = (instruction & INST_20) >> 20; let val_2 = (instruction & INST_24_21) >> 21; let val_3 = (instruction & INST_30_25) >> 25; let val_4 = (instruction & INST_31) >> 31; (val_4 << 31) | (val_3 << 5) | (val_2 << 1) | val_1 } fn value_to_iimm(value: u32) -> u32 { let imm_1 = (value & INST_20) >> 20; let imm_2 = (value & INST_24_21) >> 21; let imm_3 = (value & INST_30_25) >> 25; let imm_4 = (value & INST_31) >> 31; (imm_4 << 31) | (imm_3 << 5) | (imm_2 << 1) | imm_1 } // simm // bimm // uimm // jimm fn get_opcode(instruction: u32) -> u32 { instruction & 0b1111111 } fn get_rd(instruction: u32) -> u32 { (instruction >> 7) & 0b11111 } fn get_funct3(instruction: u32) -> u32 { (instruction >> 12) & 0b111 } fn get_rs1(instruction: u32) -> u32 { (instruction >> 15) & 0b11111 } // Disassembler fn decode_instruction(instruction: u32) -> serde_json::Value { if (instruction & 0b11) != 0b11 { return serde_json::json!("DASM:DECODE_0B11_END") } else if (instruction & 0b11100) == 0b11100 { return serde_json::json!("DASM:DECODE_0B11100_END") } let mut operation = ""; let mut operand_1 = ""; let mut operand_2 = ""; let mut operand_3 = ""; let mut opcode = get_opcode(instruction); if opcode == OP_IMM { let rd = get_rd(instruction); let funct3 = get_funct3(instruction); let rs1 = get_rs1(instruction); let iimm = iimm_to_value(instruction); match funct3 { ADDI => operation = "ADDI", _ => return serde_json::json!("DASM:DECODE_0B11100_END") // a }; } serde_json::json!({ operation: [ operand_1, operand_2, operand_3 ] }) } // Emulator #[wasm_bindgen::prelude::wasm_bindgen] pub fn execute_instruction() -> bool { unsafe { ERROR_MESSAGE = ""; } // Fetch let current_pc = get_from_register(PC) as usize; if (current_pc + 3) > CODE_RANGE_END { unsafe { ERROR_MESSAGE = "VM:CODE_RANGE_END"; } return false } let instruction_byte_1; let instruction_byte_2; let instruction_byte_3; let instruction_byte_4; unsafe { instruction_byte_1 = MEMORY[current_pc]; instruction_byte_2 = MEMORY[current_pc + 1]; instruction_byte_3 = MEMORY[current_pc + 2]; instruction_byte_4 = MEMORY[current_pc + 3]; } let instruction = create_instruction( instruction_byte_1, instruction_byte_2, instruction_byte_3, instruction_byte_4 ); let next_pc = (current_pc as u32) + 4; // Decode /* if (instruction & 0b11) != 0b11 { unsafe { ERROR_MESSAGE = "VM:DECODE_0B11_END"; } return false } else if (i
random
[ { "content": "class Memory {\n\n xlen;\n\n space; // maximum length in bytes, 2^XLEN\n\n vcount;\n\n mrcount;\n\n regres; // reserve for registers, MRCOUNT*XLEN*VCOUNT\n\n mempow;\n\n length; // actual length, (2^MEMPOW)-REGRES\n\n buffer; // memory data\n\n\n\n constructor(xlen, vcount, mrcount, mempow) {\n\n this.xlen = xlen;\n\n this.vcount = vcount;\n\n this.mrcount = mrcount;\n\n this.mempow = mempow;\n\n this.space = 2 ** this.xlen;\n\n this.regres = this.mrcount * this.xlen * this.vcount;\n\n this.length = (2 ** this.mempow) - this.regres;\n\n const wasmMemoryPageSize = 65536;\n\n const pages = Math.ceil(this.length / wasmMemoryPageSize);\n\n const wasmMemory = new WebAssembly.Memory({ initial: pages, maximum: pages, shared: false });\n\n this.buffer = wasmMemory.buffer;\n\n }\n\n\n\n setByte(address, byte) {\n\n const u8arr = new Uint8Array(this.buffer);\n\n u8arr[this.computeAddress(address)] = byte;\n\n }\n\n\n\n getByte(address) {\n\n const u8arr = new Uint8Array(this.buffer);\n\n const byte = u8arr[this.computeAddress(address)];\n\n if (byte === undefined) {\n\n throw Error('byte === undefined');\n\n }\n\n else {\n\n return byte;\n\n }\n\n }\n\n\n\n computeAddress(address) {\n\n return address % this.length;\n\n }\n", "file_path": "projects/rivium/web/old/memory.js", "rank": 16, "score": 29931.836429485113 }, { "content": " constructor(xlen, vcount, mrcount, mempow) {\n\n this.xlen = xlen;\n\n this.vcount = vcount;\n\n this.mrcount = mrcount;\n\n this.mempow = mempow;\n\n this.space = 2 ** this.xlen;\n\n this.regres = this.mrcount * this.xlen * this.vcount;\n\n this.length = (2 ** this.mempow) - this.regres;\n\n const wasmMemoryPageSize = 65536;\n\n const pages = Math.ceil(this.length / wasmMemoryPageSize);\n\n const wasmMemory = new WebAssembly.Memory({ initial: pages, maximum: pages, shared: false });\n\n this.buffer = wasmMemory.buffer;\n", "file_path": "projects/rivium/web/old/memory.js", "rank": 17, "score": 29375.494960998225 }, { "content": " setByte(address, byte) {\n\n const u8arr = new Uint8Array(this.buffer);\n\n u8arr[this.computeAddress(address)] = byte;\n", "file_path": "projects/rivium/web/old/memory.js", "rank": 18, "score": 28829.86126866914 }, { "content": " getByte(address) {\n\n const u8arr = new Uint8Array(this.buffer);\n\n const byte = u8arr[this.computeAddress(address)];\n\n if (byte === undefined) {\n\n throw Error('byte === undefined');\n\n }\n\n else {\n\n return byte;\n\n }\n", "file_path": "projects/rivium/web/old/memory.js", "rank": 19, "score": 28829.86126866914 }, { "content": " computeAddress(address) {\n\n return address % this.length;\n", "file_path": "projects/rivium/web/old/memory.js", "rank": 20, "score": 28829.86126866914 }, { "content": "const memory = require('./old/memory.js')\n", "file_path": "projects/rivium/web/main.js", "rank": 21, "score": 23174.802580358446 }, { "content": "const memory = require('./memory.js')\n", "file_path": "projects/rivium/web/old/test.js", "rank": 22, "score": 22507.489265234737 }, { "content": " loadMachineCode (machineCode) {\n\n for (let i = 0; i < machineCode.length; ++i) {\n\n this.mem?.setByte(i, machineCode[i])\n\n }\n\n return machineCode\n", "file_path": "projects/rivium/web/main.js", "rank": 23, "score": 21659.99571539528 }, { "content": " testingMemory () {\n\n const mem = new memory.Memory(common.Xlen.word, common.Vcount.one, common.Mrcount.default, common.Mempow.med)\n\n mem.setByte(1, 257)\n\n mem.setByte(mem.length, 2)\n\n if (mem.getByte(0) !== 2) {\n\n throw Error('testing memory 1')\n\n }\n\n if (mem.getByte(mem.length + 1) !== 1) {\n\n throw Error('testing memory 2')\n\n }\n", "file_path": "projects/rivium/web/old/test.js", "rank": 24, "score": 21286.618013962197 }, { "content": " setRegisterValue(regnum, value, counter = false) {\n\n if (!counter && ((regnum === Regs.pc) || (regnum === Regs.ppc))) {\n\n throw Error('set-access to counters');\n\n }\n\n else if (regnum === Regs.x0) {\n\n return;\n\n }\n\n const i32arr = new Int32Array(this.mem.buffer);\n\n const regOffset = this.mem.mrcount * this.vnum;\n\n i32arr[(i32arr.length - 1) - (regOffset + regnum)] = value;\n", "file_path": "projects/rivium/web/old/vcore.js", "rank": 25, "score": 20985.283628304856 }, { "content": " getRegisterValue(regnum, counter = false) {\n\n if (!counter && ((regnum === Regs.pc) || (regnum === Regs.ppc))) {\n\n throw Error('get-access to counters');\n\n }\n\n else if (regnum === Regs.x0) {\n\n return 0;\n\n }\n\n const i32arr = new Int32Array(this.mem.buffer);\n\n const regOffset = this.mem.mrcount * this.vnum;\n\n const value = i32arr[(i32arr.length - 1) - (regOffset + regnum)];\n\n if (value === undefined) {\n\n throw Error('value === undefined');\n\n }\n\n return value;\n", "file_path": "projects/rivium/web/old/vcore.js", "rank": 26, "score": 20982.82486729641 }, { "content": "class Memory {\n\n xlen;\n\n space; // maximum length in bytes, 2^XLEN\n\n vcount;\n\n mrcount;\n\n regres; // reserve for registers, MRCOUNT*XLEN*VCOUNT\n\n mempow;\n\n length; // actual length, (2^MEMPOW)-REGRES\n\n buffer; // memory data\n\n\n\n constructor(xlen, vcount, mrcount, mempow) {\n\n this.xlen = xlen;\n\n this.vcount = vcount;\n\n this.mrcount = mrcount;\n\n this.mempow = mempow;\n\n this.space = 2 ** this.xlen;\n\n this.regres = this.mrcount * this.xlen * this.vcount;\n\n this.length = (2 ** this.mempow) - this.regres;\n\n const wasmMemoryPageSize = 65536;\n\n const pages = Math.ceil(this.length / wasmMemoryPageSize);\n\n const wasmMemory = new WebAssembly.Memory({ initial: pages, maximum: pages, shared: false });\n\n this.buffer = wasmMemory.buffer;\n\n }\n\n\n\n setByte(address, byte) {\n\n const u8arr = new Uint8Array(this.buffer);\n\n u8arr[this.computeAddress(address)] = byte;\n\n }\n\n\n\n getByte(address) {\n\n const u8arr = new Uint8Array(this.buffer);\n\n const byte = u8arr[this.computeAddress(address)];\n\n if (byte === undefined) {\n\n throw Error('byte === undefined');\n\n }\n\n else {\n\n return byte;\n\n }\n\n }\n\n\n\n computeAddress(address) {\n\n return address % this.length;\n\n }\n\n}\n\n\n\nmodule.exports = {\n\n Memory\n\n}\n", "file_path": "projects/rivium/web/old/memory.js", "rank": 27, "score": 18737.554102007663 }, { "content": "class Test {\n\n // Memory //\n\n testingMemory () {\n\n const mem = new memory.Memory(common.Xlen.word, common.Vcount.one, common.Mrcount.default, common.Mempow.med)\n\n mem.setByte(1, 257)\n\n mem.setByte(mem.length, 2)\n\n if (mem.getByte(0) !== 2) {\n\n throw Error('testing memory 1')\n\n }\n\n if (mem.getByte(mem.length + 1) !== 1) {\n\n throw Error('testing memory 2')\n\n }\n\n }\n\n\n\n // Vcore //\n\n testingVcore () {\n\n const mem = new memory.Memory(common.Xlen.word, common.Vcount.one, common.Mrcount.default, common.Mempow.med)\n\n const core = new vcore.Vcore(mem, common.Vnum.zero, common.Ialign.word, common.Ilen.word)\n\n // PC-register\n\n try {\n\n core.fetchInstruction()\n\n } catch (error) {\n\n const instruction = ((error.message).split(':')).pop()\n\n if (instruction !== '00000000000000000000000000000000') {\n\n console.log(error.message)\n\n throw Error('testing vcore 1')\n\n }\n\n }\n\n mem.setByte(4, 255)\n\n mem.setByte(5, 255)\n\n mem.setByte(6, 255)\n\n mem.setByte(7, 255)\n\n try {\n\n core.fetchInstruction()\n\n } catch (error) {\n\n const instruction = ((error.message).split(':')).pop()\n\n if (instruction !== '11111111111111111111111111111111') {\n\n throw Error('testing vcore 2')\n\n }\n\n }\n\n mem.setByte(8, 1)\n\n mem.setByte(9, 3)\n\n mem.setByte(10, 7)\n\n mem.setByte(11, 15)\n\n try {\n\n core.fetchInstruction()\n\n } catch (error) {\n\n const instruction = ((error.message).split(':')).pop()\n\n if (instruction !== '00001111000001110000001100000001') {\n\n throw Error('testing vcore 3')\n\n }\n\n }\n\n // ADDI\n\n let instruction = 0 | vcore.Opcode.OP_IMM // opcode\n\n instruction |= vcore.Regs.x31 * (2 ** 7) // rd\n\n instruction |= 0b000 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x30 * (2 ** 15) // rs1\n\n instruction |= 64 * (2 ** 20) // I-imm\n\n mem.setByte(12, instruction)\n\n mem.setByte(13, instruction >>> 8)\n\n mem.setByte(14, instruction >>> 16)\n\n mem.setByte(15, instruction >>> 24)\n\n core.fetchInstruction()\n\n if (core.getRegisterValue(vcore.Regs.x31) !== 64) {\n\n throw Error('testing vcore 4')\n\n }\n\n if (core.getRegisterValue(vcore.Regs.x30) !== 64) {\n\n throw Error('testing vcore 5')\n\n }\n\n instruction = 0 | vcore.Opcode.OP_IMM // opcode\n\n instruction |= vcore.Regs.x31 * (2 ** 7) // rd\n\n instruction |= 0b000 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x30 * (2 ** 15) // rs1\n\n instruction |= -70 * (2 ** 20) // I-imm\n\n mem.setByte(16, instruction)\n\n mem.setByte(17, instruction >>> 8)\n\n mem.setByte(18, instruction >>> 16)\n\n mem.setByte(19, instruction >>> 24)\n\n core.fetchInstruction()\n\n if (core.getRegisterValue(vcore.Regs.x31) !== -6) {\n\n throw Error('testing vcore 6')\n\n }\n\n if (core.getRegisterValue(vcore.Regs.x30) !== -6) {\n\n throw Error('testing vcore 7')\n\n }\n\n // SLTI\n\n instruction = 0 | vcore.Opcode.OP_IMM // opcode\n\n instruction |= vcore.Regs.x20 * (2 ** 7) // rd\n\n instruction |= 0b010 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x10 * (2 ** 15) // rs1\n\n instruction |= 64 * (2 ** 20) // I-imm\n\n mem.setByte(20, instruction)\n\n mem.setByte(21, instruction >>> 8)\n\n mem.setByte(22, instruction >>> 16)\n\n mem.setByte(23, instruction >>> 24)\n\n core.fetchInstruction()\n\n if (core.getRegisterValue(vcore.Regs.x20) !== 1) {\n\n throw Error('testing vcore 8')\n\n }\n\n if (core.getRegisterValue(vcore.Regs.x10) !== 0) {\n\n throw Error('testing vcore 9')\n\n }\n\n instruction = 0 | vcore.Opcode.OP_IMM // opcode\n\n instruction |= vcore.Regs.x20 * (2 ** 7) // rd\n\n instruction |= 0b010 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x20 * (2 ** 15) // rs1\n\n instruction |= -1 * (2 ** 20) // I-imm\n\n mem.setByte(24, instruction)\n\n mem.setByte(25, instruction >>> 8)\n\n mem.setByte(26, instruction >>> 16)\n\n mem.setByte(27, instruction >>> 24)\n\n core.fetchInstruction()\n\n if (core.getRegisterValue(vcore.Regs.x20) !== 0) {\n\n throw Error('testing vcore 10')\n\n }\n\n // SLTIU\n\n instruction = 0 | vcore.Opcode.OP_IMM // opcode\n\n instruction |= vcore.Regs.x30 * (2 ** 7) // rd\n\n instruction |= 0b011 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x31 * (2 ** 15) // rs1\n\n instruction |= 64 * (2 ** 20) // I-imm\n\n mem.setByte(28, instruction)\n\n mem.setByte(29, instruction >>> 8)\n\n mem.setByte(30, instruction >>> 16)\n\n mem.setByte(31, instruction >>> 24)\n\n core.fetchInstruction()\n\n if (core.getRegisterValue(vcore.Regs.x30) !== 0) {\n\n throw Error('testing vcore 11')\n\n }\n\n // ANDI\n\n instruction = 0 | vcore.Opcode.OP_IMM // opcode\n\n instruction |= vcore.Regs.x30 * (2 ** 7) // rd\n\n instruction |= 0b111 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x31 * (2 ** 15) // rs1\n\n instruction |= 6 * (2 ** 20) // I-imm\n\n mem.setByte(32, instruction)\n\n mem.setByte(33, instruction >>> 8)\n\n mem.setByte(34, instruction >>> 16)\n\n mem.setByte(35, instruction >>> 24)\n\n core.fetchInstruction()\n\n if (core.getRegisterValue(vcore.Regs.x30) !== 2) {\n\n throw Error('testing vcore 12')\n\n }\n\n // ORI\n\n instruction = 0 | vcore.Opcode.OP_IMM // opcode\n\n instruction |= vcore.Regs.x30 * (2 ** 7) // rd\n\n instruction |= 0b110 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x31 * (2 ** 15) // rs1\n\n instruction |= 6 * (2 ** 20) // I-imm\n\n mem.setByte(36, instruction)\n\n mem.setByte(37, instruction >>> 8)\n\n mem.setByte(38, instruction >>> 16)\n\n mem.setByte(39, instruction >>> 24)\n\n core.fetchInstruction()\n\n if (core.getRegisterValue(vcore.Regs.x30) !== -2) {\n\n throw Error('testing vcore 13')\n\n }\n\n // XORI\n\n instruction = 0 | vcore.Opcode.OP_IMM // opcode\n\n instruction |= vcore.Regs.x30 * (2 ** 7) // rd\n\n instruction |= 0b100 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x31 * (2 ** 15) // rs1\n\n instruction |= 6 * (2 ** 20) // I-imm\n\n mem.setByte(40, instruction)\n\n mem.setByte(41, instruction >>> 8)\n\n mem.setByte(42, instruction >>> 16)\n\n mem.setByte(43, instruction >>> 24)\n\n core.fetchInstruction()\n\n if (core.getRegisterValue(vcore.Regs.x30) !== -4) {\n\n throw Error('testing vcore 14')\n\n }\n\n // SLLI\n\n instruction = 0 | vcore.Opcode.OP_IMM // opcode\n\n instruction |= vcore.Regs.x30 * (2 ** 7) // rd\n\n instruction |= 0b001 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x31 * (2 ** 15) // rs1\n\n let shamt = 1 * (2 ** 20)\n\n let upperImm = 0 * (2 ** 25)\n\n instruction |= shamt | upperImm // I-imm\n\n mem.setByte(44, instruction)\n\n mem.setByte(45, instruction >>> 8)\n\n mem.setByte(46, instruction >>> 16)\n\n mem.setByte(47, instruction >>> 24)\n\n core.fetchInstruction()\n\n if (core.getRegisterValue(vcore.Regs.x30) !== -12) {\n\n throw Error('testing vcore 15')\n\n }\n\n // SRAI\n\n instruction = 0 | vcore.Opcode.OP_IMM // opcode\n\n instruction |= vcore.Regs.x30 * (2 ** 7) // rd\n\n instruction |= 0b101 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x31 * (2 ** 15) // rs1\n\n shamt = 1 * (2 ** 20)\n\n upperImm = 32 * (2 ** 25)\n\n instruction |= shamt | upperImm // I-imm\n\n mem.setByte(48, instruction)\n\n mem.setByte(49, instruction >>> 8)\n\n mem.setByte(50, instruction >>> 16)\n\n mem.setByte(51, instruction >>> 24)\n\n core.fetchInstruction()\n\n if (core.getRegisterValue(vcore.Regs.x30) !== -3) {\n\n throw Error('testing vcore 16')\n\n }\n\n // SRLI\n\n instruction = 0 | vcore.Opcode.OP_IMM // opcode\n\n instruction |= vcore.Regs.x30 * (2 ** 7) // rd\n\n instruction |= 0b101 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x31 * (2 ** 15) // rs1\n\n shamt = 1 * (2 ** 20)\n\n upperImm = 0 * (2 ** 25)\n\n instruction |= shamt | upperImm // I-imm\n\n mem.setByte(52, instruction)\n\n mem.setByte(53, instruction >>> 8)\n\n mem.setByte(54, instruction >>> 16)\n\n mem.setByte(55, instruction >>> 24)\n\n core.fetchInstruction()\n\n if (core.getRegisterValue(vcore.Regs.x30) !== 0b01111111111111111111111111111101) {\n\n throw Error('testing vcore 17')\n\n }\n\n // LUI\n\n instruction = 0 | vcore.Opcode.LUI // opcode\n\n instruction |= vcore.Regs.x10 * (2 ** 7) // rd\n\n instruction |= 1 * (2 ** 31) // U-imm\n\n mem.setByte(56, instruction)\n\n mem.setByte(57, instruction >>> 8)\n\n mem.setByte(58, instruction >>> 16)\n\n mem.setByte(59, instruction >>> 24)\n\n core.fetchInstruction()\n\n if ((core.getRegisterValue(vcore.Regs.x10) >>> 0) !== 0b10000000000000000000000000000000) {\n\n throw Error('testing vcore 18')\n\n }\n\n // AUIPC\n\n instruction = 0 | vcore.Opcode.AUIPC // opcode\n\n instruction |= vcore.Regs.x10 * (2 ** 7) // rd\n\n instruction |= 1 * (2 ** 31) // U-imm\n\n mem.setByte(60, instruction)\n\n mem.setByte(61, instruction >>> 8)\n\n mem.setByte(62, instruction >>> 16)\n\n mem.setByte(63, instruction >>> 24)\n\n core.fetchInstruction()\n\n if ((core.getRegisterValue(vcore.Regs.x10) >>> 0) !== 0b10000000000000000000000000111100) {\n\n throw Error('testing vcore 19')\n\n }\n\n // ADD\n\n instruction = 0 | vcore.Opcode.OP // opcode\n\n instruction |= vcore.Regs.x9 * (2 ** 7) // rd\n\n instruction |= 0b000 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x30 * (2 ** 15) // rs1\n\n instruction |= vcore.Regs.x31 * (2 ** 20) // rs2\n\n instruction |= 0 * (2 ** 25) // funct7\n\n mem.setByte(64, instruction)\n\n mem.setByte(65, instruction >>> 8)\n\n mem.setByte(66, instruction >>> 16)\n\n mem.setByte(67, instruction >>> 24)\n\n core.fetchInstruction()\n\n if ((core.getRegisterValue(vcore.Regs.x9) >>> 0) !== 0b01111111111111111111111111110111) {\n\n throw Error('testing vcore 20')\n\n }\n\n // SLT\n\n instruction = 0 | vcore.Opcode.OP // opcode\n\n instruction |= vcore.Regs.x9 * (2 ** 7) // rd\n\n instruction |= 0b010 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x30 * (2 ** 15) // rs1\n\n instruction |= vcore.Regs.x31 * (2 ** 20) // rs2\n\n instruction |= 0 * (2 ** 25) // funct7\n\n mem.setByte(68, instruction)\n\n mem.setByte(69, instruction >>> 8)\n\n mem.setByte(70, instruction >>> 16)\n\n mem.setByte(71, instruction >>> 24)\n\n core.fetchInstruction()\n\n if ((core.getRegisterValue(vcore.Regs.x9) >>> 0) !== 0) {\n\n throw Error('testing vcore 21')\n\n }\n\n // SLTU\n\n instruction = 0 | vcore.Opcode.OP // opcode\n\n instruction |= vcore.Regs.x9 * (2 ** 7) // rd\n\n instruction |= 0b011 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x30 * (2 ** 15) // rs1\n\n instruction |= vcore.Regs.x31 * (2 ** 20) // rs2\n\n instruction |= 0 * (2 ** 25) // funct7\n\n mem.setByte(72, instruction)\n\n mem.setByte(73, instruction >>> 8)\n\n mem.setByte(74, instruction >>> 16)\n\n mem.setByte(75, instruction >>> 24)\n\n core.fetchInstruction()\n\n if ((core.getRegisterValue(vcore.Regs.x9) >>> 0) !== 1) {\n\n throw Error('testing vcore 22')\n\n }\n\n // AND\n\n instruction = 0 | vcore.Opcode.OP // opcode\n\n instruction |= vcore.Regs.x9 * (2 ** 7) // rd\n\n instruction |= 0b111 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x30 * (2 ** 15) // rs1\n\n instruction |= vcore.Regs.x31 * (2 ** 20) // rs2\n\n instruction |= 0 * (2 ** 25) // funct7\n\n mem.setByte(76, instruction)\n\n mem.setByte(77, instruction >>> 8)\n\n mem.setByte(78, instruction >>> 16)\n\n mem.setByte(79, instruction >>> 24)\n\n core.fetchInstruction()\n\n if ((core.getRegisterValue(vcore.Regs.x9) >>> 0) !== 0b01111111111111111111111111111000) {\n\n throw Error('testing vcore 23')\n\n }\n\n // OR\n\n instruction = 0 | vcore.Opcode.OP // opcode\n\n instruction |= vcore.Regs.x9 * (2 ** 7) // rd\n\n instruction |= 0b110 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x30 * (2 ** 15) // rs1\n\n instruction |= vcore.Regs.x31 * (2 ** 20) // rs2\n\n instruction |= 0 * (2 ** 25) // funct7\n\n mem.setByte(80, instruction)\n\n mem.setByte(81, instruction >>> 8)\n\n mem.setByte(82, instruction >>> 16)\n\n mem.setByte(83, instruction >>> 24)\n\n core.fetchInstruction()\n\n if ((core.getRegisterValue(vcore.Regs.x9) >>> 0) !== 0b11111111111111111111111111111111) {\n\n throw Error('testing vcore 24')\n\n }\n\n // XOR\n\n instruction = 0 | vcore.Opcode.OP // opcode\n\n instruction |= vcore.Regs.x9 * (2 ** 7) // rd\n\n instruction |= 0b100 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x30 * (2 ** 15) // rs1\n\n instruction |= vcore.Regs.x31 * (2 ** 20) // rs2\n\n instruction |= 0 * (2 ** 25) // funct7\n\n mem.setByte(84, instruction)\n\n mem.setByte(85, instruction >>> 8)\n\n mem.setByte(86, instruction >>> 16)\n\n mem.setByte(87, instruction >>> 24)\n\n core.fetchInstruction()\n\n if ((core.getRegisterValue(vcore.Regs.x9) >>> 0) !== 0b10000000000000000000000000000111) {\n\n throw Error('testing vcore 25')\n\n }\n\n // SLL\n\n instruction = 0 | vcore.Opcode.OP // opcode\n\n instruction |= vcore.Regs.x9 * (2 ** 7) // rd\n\n instruction |= 0b001 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x30 * (2 ** 15) // rs1\n\n instruction |= vcore.Regs.x31 * (2 ** 20) // rs2\n\n instruction |= 0 * (2 ** 25) // funct7\n\n mem.setByte(88, instruction)\n\n mem.setByte(89, instruction >>> 8)\n\n mem.setByte(90, instruction >>> 16)\n\n mem.setByte(91, instruction >>> 24)\n\n core.fetchInstruction()\n\n if ((core.getRegisterValue(vcore.Regs.x9) >>> 0) !== 0b11110100000000000000000000000000) {\n\n throw Error('testing vcore 26')\n\n }\n\n // SRL\n\n instruction = 0 | vcore.Opcode.OP // opcode\n\n instruction |= vcore.Regs.x9 * (2 ** 7) // rd\n\n instruction |= 0b101 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x30 * (2 ** 15) // rs1\n\n instruction |= vcore.Regs.x31 * (2 ** 20) // rs2\n\n instruction |= 0 * (2 ** 25) // funct7\n\n mem.setByte(92, instruction)\n\n mem.setByte(93, instruction >>> 8)\n\n mem.setByte(94, instruction >>> 16)\n\n mem.setByte(95, instruction >>> 24)\n\n core.fetchInstruction()\n\n if ((core.getRegisterValue(vcore.Regs.x9) >>> 0) !== 0b00000000000000000000000000011111) {\n\n throw Error('testing vcore 27')\n\n }\n\n // SUB\n\n instruction = 0 | vcore.Opcode.OP // opcode\n\n instruction |= vcore.Regs.x9 * (2 ** 7) // rd\n\n instruction |= 0b000 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x30 * (2 ** 15) // rs1\n\n instruction |= vcore.Regs.x31 * (2 ** 20) // rs2\n\n instruction |= 32 * (2 ** 25) // funct7\n\n mem.setByte(96, instruction)\n\n mem.setByte(97, instruction >>> 8)\n\n mem.setByte(98, instruction >>> 16)\n\n mem.setByte(99, instruction >>> 24)\n\n core.fetchInstruction()\n\n if ((core.getRegisterValue(vcore.Regs.x9) >>> 0) !== 0b10000000000000000000000000000011) {\n\n throw Error('testing vcore 28')\n\n }\n\n // SRA\n\n instruction = 0 | vcore.Opcode.OP // opcode\n\n instruction |= vcore.Regs.x9 * (2 ** 7) // rd\n\n instruction |= 0b101 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x30 * (2 ** 15) // rs1\n\n instruction |= vcore.Regs.x31 * (2 ** 20) // rs2\n\n instruction |= 32 * (2 ** 25) // funct7\n\n mem.setByte(100, instruction)\n\n mem.setByte(101, instruction >>> 8)\n\n mem.setByte(102, instruction >>> 16)\n\n mem.setByte(103, instruction >>> 24)\n\n core.fetchInstruction()\n\n if ((core.getRegisterValue(vcore.Regs.x9) >>> 0) !== 0b00000000000000000000000000011111) {\n\n throw Error('testing vcore 29')\n\n }\n\n // JAL\n\n instruction = 0 | vcore.Opcode.JAL // opcode\n\n instruction |= vcore.Regs.x15 * (2 ** 7) // rd\n\n instruction |= core.valueToJimm(8) // imm\n\n mem.setByte(104, instruction)\n\n mem.setByte(105, instruction >>> 8)\n\n mem.setByte(106, instruction >>> 16)\n\n mem.setByte(107, instruction >>> 24)\n\n mem.setByte(112, 0b11111111)\n\n mem.setByte(113, 0b11111111)\n\n mem.setByte(114, 0b11111111)\n\n mem.setByte(115, 0b11111111)\n\n core.fetchInstruction()\n\n if ((core.getRegisterValue(vcore.Regs.x15) >>> 0) !== 108) {\n\n throw Error('testing vcore 30')\n\n }\n\n try {\n\n core.fetchInstruction()\n\n } catch (error) {\n\n const instruction = ((error.message).split(':')).pop()\n\n if (instruction !== '11111111111111111111111111111111') {\n\n throw Error('testing vcore 31')\n\n }\n\n }\n\n // JALR\n\n instruction = 0 | vcore.Opcode.JALR // opcode\n\n instruction |= vcore.Regs.x31 * (2 ** 7) // rd\n\n instruction |= 0b000 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x0 * (2 ** 15) // rs1\n\n instruction |= 2 * (2 ** 20) // I-imm\n\n mem.setByte(116, instruction)\n\n mem.setByte(117, instruction >>> 8)\n\n mem.setByte(118, instruction >>> 16)\n\n mem.setByte(119, instruction >>> 24)\n\n core.fetchInstruction()\n\n if (core.getRegisterValue(vcore.Regs.x31) !== 120) {\n\n throw Error('testing vcore 32')\n\n }\n\n try {\n\n core.fetchInstruction()\n\n } catch (error) {\n\n const msg = (error.message).split(':')\n\n const ppcValue = msg.pop()\n\n msg.pop()\n\n const pcValue = msg.pop()\n\n if ((ppcValue !== '116') || (pcValue !== '118')) {\n\n throw Error('testing vcore 33')\n\n }\n\n }\n\n core.setRegisterValue(vcore.Regs.pc, 120, true)\n\n // BEQ\n\n instruction = 0 | vcore.Opcode.BRANCH // opcode\n\n instruction |= 0b000 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x0 * (2 ** 15) // rs1\n\n instruction |= vcore.Regs.x0 * (2 ** 20) // rs2\n\n instruction |= core.valueToBimm(8) // imm\n\n mem.setByte(120, instruction)\n\n mem.setByte(121, instruction >>> 8)\n\n mem.setByte(122, instruction >>> 16)\n\n mem.setByte(123, instruction >>> 24)\n\n core.fetchInstruction()\n\n let pcValue = core.getRegisterValue(vcore.Regs.pc, true) >>> 0\n\n let ppcValue = core.getRegisterValue(vcore.Regs.ppc, true) >>> 0\n\n if ((pcValue !== 128) || (ppcValue !== 120)) {\n\n throw Error('testing vcore 34')\n\n }\n\n // BNE\n\n instruction = 0 | vcore.Opcode.BRANCH // opcode\n\n instruction |= 0b001 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x0 * (2 ** 15) // rs1\n\n instruction |= vcore.Regs.x0 * (2 ** 20) // rs2\n\n instruction |= core.valueToBimm(8) // imm\n\n mem.setByte(128, instruction)\n\n mem.setByte(129, instruction >>> 8)\n\n mem.setByte(130, instruction >>> 16)\n\n mem.setByte(131, instruction >>> 24)\n\n core.fetchInstruction()\n\n pcValue = core.getRegisterValue(vcore.Regs.pc, true) >>> 0\n\n ppcValue = core.getRegisterValue(vcore.Regs.ppc, true) >>> 0\n\n if ((pcValue !== 132) || (ppcValue !== 128)) {\n\n throw Error('testing vcore 35')\n\n }\n\n // BLT\n\n instruction = 0 | vcore.Opcode.BRANCH // opcode\n\n instruction |= 0b100 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x31 * (2 ** 15) // rs1\n\n instruction |= vcore.Regs.x0 * (2 ** 20) // rs2\n\n instruction |= core.valueToBimm(100) // imm\n\n mem.setByte(132, instruction)\n\n mem.setByte(133, instruction >>> 8)\n\n mem.setByte(134, instruction >>> 16)\n\n mem.setByte(135, instruction >>> 24)\n\n core.fetchInstruction()\n\n pcValue = core.getRegisterValue(vcore.Regs.pc, true) >>> 0\n\n ppcValue = core.getRegisterValue(vcore.Regs.ppc, true) >>> 0\n\n if ((pcValue !== 136) || (ppcValue !== 132)) {\n\n throw Error('testing vcore 36')\n\n }\n\n core.setRegisterValue(vcore.Regs.x1, -1)\n\n // BLTU\n\n instruction = 0 | vcore.Opcode.BRANCH // opcode\n\n instruction |= 0b110 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x31 * (2 ** 15) // rs1\n\n instruction |= vcore.Regs.x1 * (2 ** 20) // rs2\n\n instruction |= core.valueToBimm(100) // imm\n\n mem.setByte(136, instruction)\n\n mem.setByte(137, instruction >>> 8)\n\n mem.setByte(138, instruction >>> 16)\n\n mem.setByte(139, instruction >>> 24)\n\n core.fetchInstruction()\n\n pcValue = core.getRegisterValue(vcore.Regs.pc, true) >>> 0\n\n ppcValue = core.getRegisterValue(vcore.Regs.ppc, true) >>> 0\n\n if ((pcValue !== 236) || (ppcValue !== 136)) {\n\n throw Error('testing vcore 37')\n\n }\n\n // BGE\n\n instruction = 0 | vcore.Opcode.BRANCH // opcode\n\n instruction |= 0b101 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x31 * (2 ** 15) // rs1\n\n instruction |= vcore.Regs.x1 * (2 ** 20) // rs2\n\n instruction |= core.valueToBimm(8) // imm\n\n mem.setByte(236, instruction)\n\n mem.setByte(237, instruction >>> 8)\n\n mem.setByte(238, instruction >>> 16)\n\n mem.setByte(239, instruction >>> 24)\n\n core.fetchInstruction()\n\n pcValue = core.getRegisterValue(vcore.Regs.pc, true) >>> 0\n\n ppcValue = core.getRegisterValue(vcore.Regs.ppc, true) >>> 0\n\n if ((pcValue !== 244) || (ppcValue !== 236)) {\n\n throw Error('testing vcore 38')\n\n }\n\n // BGEU\n\n instruction = 0 | vcore.Opcode.BRANCH // opcode\n\n instruction |= 0b111 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x31 * (2 ** 15) // rs1\n\n instruction |= vcore.Regs.x1 * (2 ** 20) // rs2\n\n instruction |= core.valueToBimm(8) // imm\n\n mem.setByte(244, instruction)\n\n mem.setByte(245, instruction >>> 8)\n\n mem.setByte(246, instruction >>> 16)\n\n mem.setByte(247, instruction >>> 24)\n\n core.fetchInstruction()\n\n pcValue = core.getRegisterValue(vcore.Regs.pc, true) >>> 0\n\n ppcValue = core.getRegisterValue(vcore.Regs.ppc, true) >>> 0\n\n if ((pcValue !== 248) || (ppcValue !== 244)) {\n\n throw Error('testing vcore 39')\n\n }\n\n // LB\n\n instruction = 0 | vcore.Opcode.LOAD // opcode\n\n instruction |= vcore.Regs.x0 * (2 ** 7) // rd\n\n instruction |= 0b000 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x31 * (2 ** 15) // rs1\n\n instruction |= core.valueToIimm(1) // I-imm\n\n mem.setByte(248, instruction)\n\n mem.setByte(249, instruction >>> 8)\n\n mem.setByte(250, instruction >>> 16)\n\n mem.setByte(251, instruction >>> 24)\n\n try {\n\n core.fetchInstruction()\n\n } catch (error) {\n\n const msg = (error.message).split(':')\n\n if (msg.pop() !== '00000000000111111000000000000011') {\n\n throw Error('testing vcore 40')\n\n }\n\n }\n\n instruction = 0 | vcore.Opcode.LOAD // opcode\n\n instruction |= vcore.Regs.x1 * (2 ** 7) // rd\n\n instruction |= 0b000 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x31 * (2 ** 15) // rs1 (120)\n\n instruction |= core.valueToIimm(-3) // I-imm\n\n mem.setByte(252, instruction)\n\n mem.setByte(253, instruction >>> 8)\n\n mem.setByte(254, instruction >>> 16)\n\n mem.setByte(255, instruction >>> 24)\n\n core.fetchInstruction()\n\n if ((core.getRegisterValue(vcore.Regs.x1)) !== 0b1111) {\n\n throw Error('testing vcore 41')\n\n }\n\n // LH\n\n instruction = 0 | vcore.Opcode.LOAD // opcode\n\n instruction |= vcore.Regs.x1 * (2 ** 7) // rd\n\n instruction |= 0b001 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x31 * (2 ** 15) // rs1 (120)\n\n instruction |= core.valueToIimm(-113) // I-imm\n\n mem.setByte(256, instruction)\n\n mem.setByte(257, instruction >>> 8)\n\n mem.setByte(258, instruction >>> 16)\n\n mem.setByte(259, instruction >>> 24)\n\n core.fetchInstruction()\n\n if ((core.getRegisterValue(vcore.Regs.x1)) !== 0b111111111) {\n\n throw Error('testing vcore 42')\n\n }\n\n // LW\n\n instruction = 0 | vcore.Opcode.LOAD // opcode\n\n instruction |= vcore.Regs.x1 * (2 ** 7) // rd\n\n instruction |= 0b010 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x31 * (2 ** 15) // rs1 (120)\n\n instruction |= core.valueToIimm(-113) // I-imm\n\n mem.setByte(260, instruction)\n\n mem.setByte(261, instruction >>> 8)\n\n mem.setByte(262, instruction >>> 16)\n\n mem.setByte(263, instruction >>> 24)\n\n core.fetchInstruction()\n\n if ((core.getRegisterValue(vcore.Regs.x1)) !== 0b111000000110000000111111111) {\n\n throw Error('testing vcore 43')\n\n }\n\n // LBU\n\n instruction = 0 | vcore.Opcode.LOAD // opcode\n\n instruction |= vcore.Regs.x1 * (2 ** 7) // rd\n\n instruction |= 0b100 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x31 * (2 ** 15) // rs1 (120)\n\n instruction |= core.valueToIimm(-113) // I-imm\n\n mem.setByte(264, instruction)\n\n mem.setByte(265, instruction >>> 8)\n\n mem.setByte(266, instruction >>> 16)\n\n mem.setByte(267, instruction >>> 24)\n\n mem.setByte(7, -2)\n\n core.fetchInstruction()\n\n if ((core.getRegisterValue(vcore.Regs.x1)) !== 0b11111110) {\n\n throw Error('testing vcore 44')\n\n }\n\n // LHU\n\n instruction = 0 | vcore.Opcode.LOAD // opcode\n\n instruction |= vcore.Regs.x1 * (2 ** 7) // rd\n\n instruction |= 0b101 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x31 * (2 ** 15) // rs1 (120)\n\n instruction |= core.valueToIimm(-113) // I-imm\n\n mem.setByte(268, instruction)\n\n mem.setByte(269, instruction >>> 8)\n\n mem.setByte(270, instruction >>> 16)\n\n mem.setByte(271, instruction >>> 24)\n\n mem.setByte(8, -2)\n\n core.fetchInstruction()\n\n if ((core.getRegisterValue(vcore.Regs.x1)) !== 0b1111111011111110) {\n\n throw Error('testing vcore 45')\n\n }\n\n // SB\n\n core.setRegisterValue(vcore.Regs.x30, -7)\n\n instruction = 0 | vcore.Opcode.STORE // opcode\n\n instruction |= 0b000 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x31 * (2 ** 15) // rs1 (120)\n\n instruction |= vcore.Regs.x30 * (2 ** 20) // rs2 (-7)\n\n instruction |= core.valueToSimm(180) // S-imm\n\n mem.setByte(272, instruction)\n\n mem.setByte(273, instruction >>> 8)\n\n mem.setByte(274, instruction >>> 16)\n\n mem.setByte(275, instruction >>> 24)\n\n core.fetchInstruction()\n\n if (mem.getByte(300) !== 0b11111001) {\n\n throw Error('testing vcore 46')\n\n }\n\n // SH\n\n mem.setByte(300, 0)\n\n core.setRegisterValue(vcore.Regs.x30, -7)\n\n instruction = 0 | vcore.Opcode.STORE // opcode\n\n instruction |= 0b001 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x31 * (2 ** 15) // rs1 (120)\n\n instruction |= vcore.Regs.x30 * (2 ** 20) // rs2 (-7)\n\n instruction |= core.valueToSimm(180) // S-imm\n\n mem.setByte(276, instruction)\n\n mem.setByte(277, instruction >>> 8)\n\n mem.setByte(278, instruction >>> 16)\n\n mem.setByte(279, instruction >>> 24)\n\n core.fetchInstruction()\n\n let memVal = mem.getByte(300 + 0)\n\n memVal |= (mem.getByte(300 + 1)) * (2 ** 8)\n\n if (memVal !== 0b1111111111111001) {\n\n throw Error('testing vcore 47')\n\n }\n\n // SW\n\n mem.setByte(300, 0)\n\n mem.setByte(301, 0)\n\n core.setRegisterValue(vcore.Regs.x30, -7)\n\n instruction = 0 | vcore.Opcode.STORE // opcode\n\n instruction |= 0b010 * (2 ** 12) // funct3\n\n instruction |= vcore.Regs.x31 * (2 ** 15) // rs1 (120)\n\n instruction |= vcore.Regs.x30 * (2 ** 20) // rs2 (-7)\n\n instruction |= core.valueToSimm(180) // S-imm\n\n mem.setByte(280, instruction)\n\n mem.setByte(281, instruction >>> 8)\n\n mem.setByte(282, instruction >>> 16)\n\n mem.setByte(283, instruction >>> 24)\n\n core.fetchInstruction()\n\n memVal = mem.getByte(300 + 0)\n\n memVal |= (mem.getByte(300 + 1)) * (2 ** 8)\n\n memVal |= (mem.getByte(300 + 2)) * (2 ** 16)\n\n memVal |= (mem.getByte(300 + 3)) * (2 ** 24)\n\n if ((memVal >>> 0) !== 0b11111111111111111111111111111001) {\n\n throw Error('testing vcore 48')\n\n }\n\n // FENCE = NOP\n\n }\n\n\n\n // All //\n\n testingAll () {\n\n console.log('BEGIN TESTING ALL')\n\n this.testingMemory()\n\n this.testingVcore()\n\n console.log('END TESTING ALL')\n\n }\n", "file_path": "projects/rivium/web/old/test.js", "rank": 35, "score": 9.043516760929794 }, { "content": "class Vcore {\n\n mem;\n\n vnum;\n\n ialign;\n\n ialignByte; // IALIGN/8\n\n ilen;\n\n ilenByte; // ILEN/8\n\n\n\n constructor(mem, vnum, ialign, ilen) {\n\n this.mem = mem;\n\n this.vnum = vnum;\n\n this.ialign = ialign;\n\n this.ilen = ilen;\n\n this.ialignByte = this.ialign / 8;\n\n this.ilenByte = this.ilen / 8;\n\n }\n\n\n\n // Register value //\n\n setRegisterValue(regnum, value, counter = false) {\n\n if (!counter && ((regnum === Regs.pc) || (regnum === Regs.ppc))) {\n\n throw Error('set-access to counters');\n\n }\n\n else if (regnum === Regs.x0) {\n\n return;\n\n }\n\n const i32arr = new Int32Array(this.mem.buffer);\n\n const regOffset = this.mem.mrcount * this.vnum;\n\n i32arr[(i32arr.length - 1) - (regOffset + regnum)] = value;\n\n }\n\n\n\n getRegisterValue(regnum, counter = false) {\n\n if (!counter && ((regnum === Regs.pc) || (regnum === Regs.ppc))) {\n\n throw Error('get-access to counters');\n\n }\n\n else if (regnum === Regs.x0) {\n\n return 0;\n\n }\n\n const i32arr = new Int32Array(this.mem.buffer);\n\n const regOffset = this.mem.mrcount * this.vnum;\n\n const value = i32arr[(i32arr.length - 1) - (regOffset + regnum)];\n\n if (value === undefined) {\n\n throw Error('value === undefined');\n\n }\n\n return value;\n\n }\n\n\n\n // Fetcher //\n\n fetchInstruction(intermediate) {\n\n if (intermediate === undefined) {\n\n intermediate = []\n\n }\n\n const pcValue = this.getRegisterValue(Regs.pc, true);\n\n const ppcValue = this.getRegisterValue(Regs.ppc, true);\n\n if ((pcValue % 4) !== 0) {\n\n this.throwInstructionAddressMisalignedException(pcValue, ppcValue);\n\n return;\n\n }\n\n this.setRegisterValue(Regs.ppc, this.getRegisterValue(Regs.pc, true), true);\n\n const byte1 = (this.mem.getByte(this.getRegisterValue(Regs.pc, true)) * (2 ** 0)) >>> 0;\n\n this.setRegisterValue(Regs.pc, this.getRegisterValue(Regs.pc, true) + 1, true);\n\n const byte2 = (this.mem.getByte(this.getRegisterValue(Regs.pc, true)) * (2 ** 8)) >>> 0;\n\n this.setRegisterValue(Regs.pc, this.getRegisterValue(Regs.pc, true) + 1, true);\n\n const byte3 = (this.mem.getByte(this.getRegisterValue(Regs.pc, true)) * (2 ** 16)) >>> 0;\n\n this.setRegisterValue(Regs.pc, this.getRegisterValue(Regs.pc, true) + 1, true);\n\n const byte4 = (this.mem.getByte(this.getRegisterValue(Regs.pc, true)) * (2 ** 24)) >>> 0;\n\n this.setRegisterValue(Regs.pc, this.getRegisterValue(Regs.pc, true) + 1, true);\n\n const instruction = (0 | byte1 | byte2 | byte3 | byte4) >>> 0;\n\n const opcode = instruction & 0b00000000000000000000000001111111;\n\n if (opcode === Opcode.OP_IMM) {\n\n // Integer Register-Immediate Instructions\n\n const rd = (instruction >>> 7) & 0b11111;\n\n const rs1 = (instruction >>> 15) & 0b11111;\n\n const funct3 = (instruction >>> 12) & 0b111;\n\n if (funct3 === 0b000) {\n\n // ADDI; NOP (ADDI x0, x0, 0)\n\n // rd = rs1 = rs1 + imm\n\n const immValue = this.iimmToValue(instruction) >> 0;\n\n this.setRegisterValue(rs1, this.getRegisterValue(rs1) + immValue);\n\n this.setRegisterValue(rd, this.getRegisterValue(rs1));\n\n intermediate.push('ADDI')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(immValue)\n\n }\n\n else if (funct3 === 0b010) {\n\n // SLTI\n\n // if (rs1 < imm) {rd = 1} else {rd = 0}\n\n const immValue = this.iimmToValue(instruction) >> 0;\n\n let result = 0;\n\n if (this.getRegisterValue(rs1) < immValue) {\n\n result = 1;\n\n }\n\n else {\n\n result = 0;\n\n }\n\n this.setRegisterValue(rd, result);\n\n intermediate.push('SLTI')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(immValue)\n\n }\n\n else if (funct3 === 0b011) {\n\n // SLTIU\n\n // if ((rs1>>>0) < (imm>>>0)) {rd = 1} else {rd = 0}\n\n const immValue = this.iimmToValue(instruction);\n\n let result = 0;\n\n if ((this.getRegisterValue(rs1) >>> 0) < (immValue >>> 0)) {\n\n result = 1;\n\n }\n\n else {\n\n result = 0;\n\n }\n\n this.setRegisterValue(rd, result);\n\n intermediate.push('SLTIU')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(immValue)\n\n }\n\n else if (funct3 === 0b111) {\n\n // ANDI\n\n // rd = rs1 AND imm\n\n const immValue = this.iimmToValue(instruction);\n\n this.setRegisterValue(rd, this.getRegisterValue(rs1) & immValue);\n\n intermediate.push('ANDI')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(immValue)\n\n }\n\n else if (funct3 === 0b110) {\n\n // ORI\n\n // rd = rs1 OR imm\n\n const immValue = this.iimmToValue(instruction);\n\n this.setRegisterValue(rd, this.getRegisterValue(rs1) | immValue);\n\n intermediate.push('ORI')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(immValue)\n\n }\n\n else if (funct3 === 0b100) {\n\n // XORI\n\n // rd = rs1 XOR imm\n\n const immValue = this.iimmToValue(instruction);\n\n this.setRegisterValue(rd, this.getRegisterValue(rs1) ^ immValue);\n\n intermediate.push('XORI')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(immValue)\n\n }\n\n else if (funct3 === 0b001) {\n\n // SLLI\n\n // rd = rs1 << (imm & 0b11111)\n\n const immValue = this.iimmToValue(instruction);\n\n const shamt = immValue & 0b11111;\n\n this.setRegisterValue(rd, this.getRegisterValue(rs1) * (2 ** shamt));\n\n intermediate.push('SLLI')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(immValue)\n\n }\n\n else if (funct3 === 0b101) {\n\n const immValue = this.iimmToValue(instruction);\n\n const shamt = immValue & 0b11111;\n\n const shiftType = immValue & 0b010000000000;\n\n if (shiftType === 0) {\n\n // SRLI\n\n // rd = rs1 >>> shamt\n\n this.setRegisterValue(rd, this.getRegisterValue(rs1) >>> shamt);\n\n intermediate.push('SRLI')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(immValue)\n\n }\n\n else if (shiftType === 0b010000000000) {\n\n // SRAI\n\n // rd = rs1 >> shamt\n\n this.setRegisterValue(rd, this.getRegisterValue(rs1) >> shamt);\n\n intermediate.push('SRAI')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(immValue)\n\n }\n\n else {\n\n let instructionBinary = (instruction >>> 0).toString(2);\n\n instructionBinary = instructionBinary.padStart(this.ilen, '0');\n\n this.throwIllegalInstructionException(instructionBinary);\n\n }\n\n }\n\n else {\n\n let instructionBinary = (instruction >>> 0).toString(2);\n\n instructionBinary = instructionBinary.padStart(this.ilen, '0');\n\n this.throwIllegalInstructionException(instructionBinary);\n\n }\n\n }\n\n else if (opcode === Opcode.LUI) {\n\n // LUI\n\n // rd = imm & 0b11111111111111111111000000000000\n\n const rd = (instruction >>> 7) & 0b11111;\n\n const immValue = this.uimmToValue(instruction) >> 0;\n\n this.setRegisterValue(rd, immValue);\n\n intermediate.push('LUI')\n\n intermediate.push(rd)\n\n intermediate.push(immValue)\n\n }\n\n else if (opcode === Opcode.AUIPC) {\n\n // AUIPC\n\n // rd = imm + ppc\n\n const rd = (instruction >>> 7) & 0b11111;\n\n const immValue = this.uimmToValue(instruction) >> 0;\n\n this.setRegisterValue(rd, (immValue + this.getRegisterValue(Regs.ppc, true)));\n\n intermediate.push('AUIPC')\n\n intermediate.push(rd)\n\n intermediate.push(immValue)\n\n }\n\n else if (opcode === Opcode.OP) {\n\n // Integer Register-Register Operations\n\n const rd = (instruction >>> 7) & 0b11111;\n\n const rs1 = (instruction >>> 15) & 0b11111;\n\n const rs2 = (instruction >>> 20) & 0b11111;\n\n const funct3 = (instruction >>> 12) & 0b111;\n\n const funct7 = (instruction >>> 25) & 0b1111111;\n\n if (funct7 === 0) {\n\n if (funct3 === 0b000) {\n\n // ADD\n\n // rd = rs1 + rs2\n\n const result = this.getRegisterValue(rs1) + this.getRegisterValue(rs2);\n\n this.setRegisterValue(rd, result);\n\n intermediate.push('ADD')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(rs2)\n\n }\n\n else if (funct3 === 0b010) {\n\n // SLT\n\n // rd = 1 if (rs1 < rs2) else 0\n\n let result = 0;\n\n if (this.getRegisterValue(rs1) < this.getRegisterValue(rs2)) {\n\n result = 1;\n\n }\n\n else {\n\n result = 0;\n\n }\n\n this.setRegisterValue(rd, result);\n\n intermediate.push('SLT')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(rs2)\n\n }\n\n else if (funct3 === 0b011) {\n\n // SLTU\n\n // rd = 1 if ((rs1>>>0) < (rs2>>>0)) else 0\n\n let result = 0;\n\n if ((this.getRegisterValue(rs1) >>> 0) < (this.getRegisterValue(rs2) >>> 0)) {\n\n result = 1;\n\n }\n\n else {\n\n result = 0;\n\n }\n\n this.setRegisterValue(rd, result);\n\n intermediate.push('SLTU')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(rs2)\n\n }\n\n else if (funct3 === 0b111) {\n\n // AND\n\n // rd = rs1 & rs2\n\n const result = this.getRegisterValue(rs1) & this.getRegisterValue(rs2);\n\n this.setRegisterValue(rd, result);\n\n intermediate.push('AND')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(rs2)\n\n }\n\n else if (funct3 === 0b110) {\n\n // OR\n\n // rd = rs1 | rs2\n\n const result = this.getRegisterValue(rs1) | this.getRegisterValue(rs2);\n\n this.setRegisterValue(rd, result);\n\n intermediate.push('OR')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(rs2)\n\n }\n\n else if (funct3 === 0b100) {\n\n // XOR\n\n // rd = rs1 ^ rs2\n\n const result = this.getRegisterValue(rs1) ^ this.getRegisterValue(rs2);\n\n this.setRegisterValue(rd, result);\n\n intermediate.push('XOR')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(rs2)\n\n }\n\n else if (funct3 === 0b001) {\n\n // SLL\n\n // rd = rs1 << (rs2 & 0b11111)\n\n const result = this.getRegisterValue(rs1) * (2 ** (this.getRegisterValue(rs2) & 0b11111));\n\n this.setRegisterValue(rd, result);\n\n intermediate.push('SLL')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(rs2)\n\n }\n\n else if (funct3 === 0b101) {\n\n // SRL\n\n // rd = rs1 >>> (rs2 & 0b11111)\n\n const result = this.getRegisterValue(rs1) >>> (this.getRegisterValue(rs2) & 0b11111);\n\n this.setRegisterValue(rd, result);\n\n intermediate.push('SRL')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(rs2)\n\n }\n\n else {\n\n let instructionBinary = (instruction >>> 0).toString(2);\n\n instructionBinary = instructionBinary.padStart(this.ilen, '0');\n\n this.throwIllegalInstructionException(instructionBinary);\n\n }\n\n }\n\n else if (funct7 === 0b0100000) {\n\n if (funct3 === 0b000) {\n\n // SUB\n\n // rd = rs1 - rs2\n\n const result = this.getRegisterValue(rs1) - this.getRegisterValue(rs2);\n\n this.setRegisterValue(rd, result);\n\n intermediate.push('SUB')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(rs2)\n\n }\n\n else if (funct3 === 0b101) {\n\n // SRA\n\n // rd = rs1 >> (rs2 & 0b11111)\n\n const result = this.getRegisterValue(rs1) >> (this.getRegisterValue(rs2) & 0b11111);\n\n this.setRegisterValue(rd, result);\n\n intermediate.push('SRA')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(rs2)\n\n }\n\n else {\n\n let instructionBinary = (instruction >>> 0).toString(2);\n\n instructionBinary = instructionBinary.padStart(this.ilen, '0');\n\n this.throwIllegalInstructionException(instructionBinary);\n\n }\n\n }\n\n else {\n\n let instructionBinary = (instruction >>> 0).toString(2);\n\n instructionBinary = instructionBinary.padStart(this.ilen, '0');\n\n this.throwIllegalInstructionException(instructionBinary);\n\n }\n\n }\n\n else if (opcode === Opcode.JAL) {\n\n // Control Transfer Instructions\n\n // Unconditional Jumps\n\n // JAL\n\n // rd = pc\n\n // pc = ppc + imm\n\n const rd = (instruction >>> 7) & 0b11111;\n\n const immValue = this.jimmToValue(instruction) >> 0;\n\n this.setRegisterValue(rd, this.getRegisterValue(Regs.pc, true));\n\n this.setRegisterValue(Regs.pc, this.getRegisterValue(Regs.ppc, true) + immValue, true);\n\n intermediate.push('JAL')\n\n intermediate.push(rd)\n\n intermediate.push(immValue)\n\n }\n\n else if (opcode === Opcode.JALR) {\n\n // JALR\n\n // rd = pc\n\n // pc = ppc + (0b11111111111111111111111111111110 & (rs1 + imm))\n\n const rd = (instruction >>> 7) & 0b11111;\n\n const rs1 = (instruction >>> 15) & 0b11111;\n\n const funct3 = (instruction >>> 12) & 0b111;\n\n if (funct3 === 0b000) {\n\n const immValue = this.iimmToValue(instruction) >> 0;\n\n let result = this.getRegisterValue(rs1) + immValue;\n\n result &= 0b11111111111111111111111111111110;\n\n result += this.getRegisterValue(Regs.ppc, true);\n\n this.setRegisterValue(rd, this.getRegisterValue(Regs.pc, true));\n\n this.setRegisterValue(Regs.pc, result, true);\n\n intermediate.push('JALR')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(immValue)\n\n }\n\n else {\n\n let instructionBinary = (instruction >>> 0).toString(2);\n\n instructionBinary = instructionBinary.padStart(this.ilen, '0');\n\n this.throwIllegalInstructionException(instructionBinary);\n\n }\n\n }\n\n else if (opcode === Opcode.BRANCH) {\n\n // Conditional Branches\n\n // branch: pc = ppc + imm\n\n const rs1 = (instruction >>> 15) & 0b11111;\n\n const rs2 = (instruction >>> 20) & 0b11111;\n\n const funct3 = (instruction >>> 12) & 0b111;\n\n const immValue = this.bimmToValue(instruction) >>> 0;\n\n if (funct3 === 0b000) {\n\n // BEQ\n\n // if (rs1 === rs2) {branch}\n\n if (this.getRegisterValue(rs1) === this.getRegisterValue(rs2)) {\n\n const adr = this.getRegisterValue(Regs.ppc, true) + immValue;\n\n this.setRegisterValue(Regs.pc, adr, true);\n\n }\n\n intermediate.push('BEQ')\n\n intermediate.push(rs1)\n\n intermediate.push(rs2)\n\n intermediate.push(immValue)\n\n }\n\n else if (funct3 === 0b001) {\n\n // BNE\n\n // if (rs1 !== rs2) {branch}\n\n if (this.getRegisterValue(rs1) !== this.getRegisterValue(rs2)) {\n\n const adr = this.getRegisterValue(Regs.ppc, true) + immValue;\n\n this.setRegisterValue(Regs.pc, adr, true);\n\n }\n\n intermediate.push('BNE')\n\n intermediate.push(rs1)\n\n intermediate.push(rs2)\n\n intermediate.push(immValue)\n\n }\n\n else if (funct3 === 0b100) {\n\n // BLT\n\n // if (rs1 < rs2) {branch}\n\n if (this.getRegisterValue(rs1) < this.getRegisterValue(rs2)) {\n\n const adr = this.getRegisterValue(Regs.ppc, true) + immValue;\n\n this.setRegisterValue(Regs.pc, adr, true);\n\n }\n\n intermediate.push('BLT')\n\n intermediate.push(rs1)\n\n intermediate.push(rs2)\n\n intermediate.push(immValue)\n\n }\n\n else if (funct3 === 0b110) {\n\n // BLTU\n\n // if ((rs1>>>0) < (rs2>>>0)) {branch}\n\n if ((this.getRegisterValue(rs1) >>> 0) < (this.getRegisterValue(rs2) >>> 0)) {\n\n const adr = this.getRegisterValue(Regs.ppc, true) + immValue;\n\n this.setRegisterValue(Regs.pc, adr, true);\n\n }\n\n intermediate.push('BLTU')\n\n intermediate.push(rs1)\n\n intermediate.push(rs2)\n\n intermediate.push(immValue)\n\n }\n\n else if (funct3 === 0b101) {\n\n // BGE\n\n // if (rs1 >= rs2) {branch}\n\n if (this.getRegisterValue(rs1) >= this.getRegisterValue(rs2)) {\n\n const adr = this.getRegisterValue(Regs.ppc, true) + immValue;\n\n this.setRegisterValue(Regs.pc, adr, true);\n\n }\n\n intermediate.push('BGE')\n\n intermediate.push(rs1)\n\n intermediate.push(rs2)\n\n intermediate.push(immValue)\n\n }\n\n else if (funct3 === 0b111) {\n\n // BGEU\n\n // if ((rs1>>>0) >= (rs2>>>0)) {branch}\n\n if ((this.getRegisterValue(rs1) >>> 0) >= (this.getRegisterValue(rs2) >>> 0)) {\n\n const adr = this.getRegisterValue(Regs.ppc, true) + immValue;\n\n this.setRegisterValue(Regs.pc, adr, true);\n\n }\n\n intermediate.push('BGEU')\n\n intermediate.push(rs1)\n\n intermediate.push(rs2)\n\n intermediate.push(immValue)\n\n }\n\n else {\n\n let instructionBinary = (instruction >>> 0).toString(2);\n\n instructionBinary = instructionBinary.padStart(this.ilen, '0');\n\n this.throwIllegalInstructionException(instructionBinary);\n\n }\n\n }\n\n else if (opcode === Opcode.LOAD) {\n\n // Load and Store Instructions\n\n // adr = rs1 + imm\n\n const rd = (instruction >>> 7) & 0b11111;\n\n if (rd === Regs.x0) {\n\n let instructionBinary = (instruction >>> 0).toString(2);\n\n instructionBinary = instructionBinary.padStart(this.ilen, '0');\n\n this.throwIllegalInstructionException(instructionBinary);\n\n }\n\n const rs1 = (instruction >>> 15) & 0b11111;\n\n const funct3 = (instruction >>> 12) & 0b111;\n\n const immValue = this.iimmToValue(instruction) >> 0;\n\n const adr = this.getRegisterValue(rs1) + immValue;\n\n if (funct3 === 0b000) {\n\n // LB\n\n // rd = signExt(mem[adr])\n\n const memData = this.mem.getByte(adr);\n\n this.setRegisterValue(rd, memData);\n\n intermediate.push('LB')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(immValue)\n\n }\n\n else if (funct3 === 0b001) {\n\n // LH\n\n // rd = signExt(mem[adr..adr+1])\n\n let memData = this.mem.getByte(adr);\n\n memData |= this.mem.getByte(adr + 1) * (2 ** 8);\n\n if (((this.mem.getByte(adr + 1) >>> 7) & 0b1) === 0b1) {\n\n memData |= 0b11111111111111110000000000000000;\n\n }\n\n else {\n\n memData &= 0b00000000000000001111111111111111;\n\n }\n\n this.setRegisterValue(rd, memData);\n\n intermediate.push('LH')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(immValue)\n\n }\n\n else if (funct3 === 0b010) {\n\n // LW\n\n // rd = signExt(mem[adr..adr+3])\n\n let memData = this.mem.getByte(adr);\n\n memData |= this.mem.getByte(adr + 1) * (2 ** 8);\n\n memData |= this.mem.getByte(adr + 2) * (2 ** 16);\n\n memData |= this.mem.getByte(adr + 3) * (2 ** 24);\n\n this.setRegisterValue(rd, memData);\n\n intermediate.push('LW')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(immValue)\n\n }\n\n else if (funct3 === 0b100) {\n\n // LBU\n\n // rd = zeroExt(mem[adr])\n\n const memData = (this.mem.getByte(adr) >>> 0);\n\n this.setRegisterValue(rd, memData);\n\n intermediate.push('LBU')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(immValue)\n\n }\n\n else if (funct3 === 0b101) {\n\n // LHU\n\n // rd = zeroExt(mem[adr..adr+1])\n\n let memData = (this.mem.getByte(adr)) >>> 0;\n\n memData |= (this.mem.getByte(adr + 1) * (2 ** 8)) >>> 0;\n\n memData >>>= 0;\n\n this.setRegisterValue(rd, memData);\n\n intermediate.push('LHU')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(immValue)\n\n }\n\n else {\n\n let instructionBinary = (instruction >>> 0).toString(2);\n\n instructionBinary = instructionBinary.padStart(this.ilen, '0');\n\n this.throwIllegalInstructionException(instructionBinary);\n\n }\n\n }\n\n else if (opcode === Opcode.STORE) {\n\n const rs1 = (instruction >>> 15) & 0b11111;\n\n const rs2 = (instruction >>> 20) & 0b11111;\n\n const funct3 = (instruction >>> 12) & 0b111;\n\n const immValue = this.simmToValue(instruction) >> 0;\n\n const adr = this.getRegisterValue(rs1) + immValue;\n\n if (funct3 === 0b000) {\n\n // SB\n\n // mem[adr] = rs2[0]\n\n this.mem.setByte(adr, this.getRegisterValue(rs2));\n\n intermediate.push('SB')\n\n intermediate.push(rs1)\n\n intermediate.push(rs2)\n\n intermediate.push(immValue)\n\n }\n\n else if (funct3 === 0b001) {\n\n // SH\n\n // mem[adr..adr+1] = rs2[0..1]\n\n this.mem.setByte(adr + 0, this.getRegisterValue(rs2));\n\n this.mem.setByte(adr + 1, this.getRegisterValue(rs2) >> 8);\n\n intermediate.push('SH')\n\n intermediate.push(rs1)\n\n intermediate.push(rs2)\n\n intermediate.push(immValue)\n\n }\n\n else if (funct3 === 0b010) {\n\n // SW\n\n // mem[adr..adr+3] = rs2[0..3]\n\n this.mem.setByte(adr + 0, this.getRegisterValue(rs2));\n\n this.mem.setByte(adr + 1, this.getRegisterValue(rs2) >> 8);\n\n this.mem.setByte(adr + 2, this.getRegisterValue(rs2) >> 16);\n\n this.mem.setByte(adr + 3, this.getRegisterValue(rs2) >> 24);\n\n intermediate.push('SW')\n\n intermediate.push(rs1)\n\n intermediate.push(rs2)\n\n intermediate.push(immValue)\n\n }\n\n else {\n\n let instructionBinary = (instruction >>> 0).toString(2);\n\n instructionBinary = instructionBinary.padStart(this.ilen, '0');\n\n this.throwIllegalInstructionException(instructionBinary);\n\n }\n\n }\n\n else if (opcode === Opcode.MISC_MEM) {\n\n // Memory Ordering Instructions\n\n const funct3 = (instruction >>> 12) & 0b111;\n\n if (funct3 === 0b000) {\n\n // FENCE = NOP\n\n const rd = (instruction >>> 7) & 0b11111;\n\n const rs1 = (instruction >>> 15) & 0b11111;\n\n const immValue = this.iimmToValue(instruction) >> 0;\n\n intermediate.push('FENCE')\n\n intermediate.push(rd)\n\n intermediate.push(rs1)\n\n intermediate.push(immValue)\n\n }\n\n else {\n\n let instructionBinary = (instruction >>> 0).toString(2);\n\n instructionBinary = instructionBinary.padStart(this.ilen, '0');\n\n this.throwIllegalInstructionException(instructionBinary);\n\n }\n\n }\n\n else if (opcode === Opcode.SYSTEM) {\n\n // Environment Call and Breakpoints\n\n const rd = (instruction >>> 7) & 0b11111;\n\n const rs1 = (instruction >>> 15) & 0b11111;\n\n const funct3 = (instruction >>> 12) & 0b111;\n\n const funct12 = (instruction >>> 20) & 0b111111111111;\n\n if ((rd !== 0) || (rs1 !== 0) || (funct3 !== 0)) {\n\n let instructionBinary = (instruction >>> 0).toString(2);\n\n instructionBinary = instructionBinary.padStart(this.ilen, '0');\n\n this.throwIllegalInstructionException(instructionBinary);\n\n }\n\n if (funct12 === 0) {\n\n // ECALL\n\n intermediate.push('ECALL')\n\n }\n\n else if (funct12 === 1) {\n\n // EBREAK\n\n intermediate.push('EBREAK')\n\n }\n\n else {\n\n let instructionBinary = (instruction >>> 0).toString(2);\n\n instructionBinary = instructionBinary.padStart(this.ilen, '0');\n\n this.throwIllegalInstructionException(instructionBinary);\n\n }\n\n }\n\n else {\n\n let instructionBinary = (instruction >>> 0).toString(2);\n\n instructionBinary = instructionBinary.padStart(this.ilen, '0');\n\n this.throwIllegalInstructionException(instructionBinary);\n\n }\n\n for (let i = 0; i < intermediate.length; ++i) {\n\n intermediate[i] = intermediate[i].toString()\n\n }\n\n console.info(intermediate)\n\n }\n\n\n\n // Immediates //\n\n // I-immediate\n\n valueToIimm(value) {\n\n const part1 = (value * (2 ** 20)) & 0b00000000000100000000000000000000;\n\n const part2 = (value * (2 ** 20)) & 0b00000001111000000000000000000000;\n\n const part3 = (value * (2 ** 20)) & 0b01111110000000000000000000000000;\n\n const part4 = value & 0b10000000000000000000000000000000;\n\n const imm = 0 | part1 | part2 | part3 | part4;\n\n return imm;\n\n }\n\n\n\n iimmToValue(instruction) {\n\n const part1 = (instruction >>> 20) & 0b00000000000000000000000000000001;\n\n const part2 = (instruction >>> 20) & 0b00000000000000000000000000011110;\n\n const part3 = (instruction >>> 20) & 0b00000000000000000000011111100000;\n\n let part4 = 0;\n\n const sign = (instruction & 0b10000000000000000000000000000000) >>> 0;\n\n if (sign === 0b10000000000000000000000000000000) {\n\n part4 = 0b11111111111111111111100000000000;\n\n }\n\n else {\n\n part4 = 0b00000000000000000000000000000000;\n\n }\n\n const value = 0 | part1 | part2 | part3 | part4;\n\n return value;\n\n }\n\n\n\n // S-immediate\n\n valueToSimm(value) {\n\n const part1 = (value * (2 ** 7)) & 0b00000000000000000000000010000000;\n\n const part2 = (value * (2 ** 7)) & 0b00000000000000000000111100000000;\n\n const part3 = (value * (2 ** (7 + 13))) & 0b01111110000000000000000000000000;\n\n const part4 = value & 0b10000000000000000000000000000000;\n\n const imm = 0 | part1 | part2 | part3 | part4;\n\n return imm;\n\n }\n\n\n\n simmToValue(instruction) {\n\n const part1 = (instruction >>> 7) & 0b00000000000000000000000000000001;\n\n const part2 = (instruction >>> 7) & 0b00000000000000000000000000011110;\n\n const part3 = (instruction >>> (7 + 13)) & 0b00000000000000000000011111100000;\n\n let part4 = 0;\n\n const sign = (instruction & 0b10000000000000000000000000000000) >>> 0;\n\n if (sign === 0b10000000000000000000000000000000) {\n\n part4 = 0b11111111111111111111100000000000;\n\n }\n\n else {\n\n part4 = 0b00000000000000000000000000000000;\n\n }\n\n const value = 0 | part1 | part2 | part3 | part4;\n\n return value;\n\n }\n\n\n\n // B-immediate\n\n valueToBimm(value) {\n\n const part1 = 0;\n\n const part2 = (value * (2 ** 7)) & 0b00000000000000000000111100000000;\n\n const part3 = (value * (2 ** (7 + 13))) & 0b01111110000000000000000000000000;\n\n const part4 = (value >>> 4) & 0b00000000000000000000000010000000;\n\n const part5 = value & 0b10000000000000000000000000000000;\n\n const imm = 0 | part1 | part2 | part3 | part4 | part5;\n\n return imm;\n\n }\n\n\n\n bimmToValue(instruction) {\n\n const part1 = 0;\n\n const part2 = (instruction >>> 7) & 0b00000000000000000000000000011110;\n\n const part3 = (instruction >>> (7 + 13)) & 0b00000000000000000000011111100000;\n\n const part4 = (instruction * (2 ** 4)) & 0b00000000000000000000100000000000;\n\n let part5 = 0;\n\n const sign = (instruction & 0b10000000000000000000000000000000) >>> 0;\n\n if (sign === 0b10000000000000000000000000000000) {\n\n part5 = 0b11111111111111111111000000000000;\n\n }\n\n else {\n\n part5 = 0b00000000000000000000000000000000;\n\n }\n\n const value = 0 | part1 | part2 | part3 | part4 | part5;\n\n return value;\n\n }\n\n\n\n // U-immediate\n\n valueToUimm(value) {\n\n const part1 = 0;\n\n const part2 = value & 0b00000000000011111111000000000000;\n\n const part3 = value & 0b01111111111100000000000000000000;\n\n const part4 = value & 0b10000000000000000000000000000000;\n\n const imm = 0 | part1 | part2 | part3 | part4;\n\n return imm;\n\n }\n\n\n\n uimmToValue(instruction) {\n\n const part1 = 0;\n\n const part2 = instruction & 0b00000000000011111111000000000000;\n\n const part3 = instruction & 0b01111111111100000000000000000000;\n\n let part4 = 0;\n\n const sign = (instruction & 0b10000000000000000000000000000000) >>> 0;\n\n if (sign === 0b10000000000000000000000000000000) {\n\n part4 = 0b10000000000000000000000000000000;\n\n }\n\n else {\n\n part4 = 0b00000000000000000000000000000000;\n\n }\n\n const value = 0 | part1 | part2 | part3 | part4;\n\n return value;\n\n }\n\n\n\n // J-immediate\n\n valueToJimm(value) {\n\n const part1 = 0;\n\n const part2 = (value * (2 ** 20)) & 0b00000001111000000000000000000000;\n\n const part3 = (value * (2 ** 20)) & 0b01111110000000000000000000000000;\n\n const part4 = (value * (2 ** 9)) & 0b00000000000100000000000000000000;\n\n const part5 = value & 0b00000000000011111111000000000000;\n\n const part6 = value & 0b10000000000000000000000000000000;\n\n const imm = 0 | part1 | part2 | part3 | part4 | part5 | part6;\n\n return imm;\n\n }\n\n\n\n jimmToValue(instruction) {\n\n const part1 = 0;\n\n const part2 = (instruction >>> 20) & 0b00000000000000000000000000011110;\n\n const part3 = (instruction >>> 20) & 0b00000000000000000000011111100000;\n\n const part4 = (instruction >>> 9) & 0b00000000000000000000100000000000;\n\n const part5 = instruction & 0b00000000000011111111000000000000;\n\n let part6 = 0;\n\n const sign = (instruction & 0b10000000000000000000000000000000) >>> 0;\n\n if (sign === 0b10000000000000000000000000000000) {\n\n part6 = 0b11111111111100000000000000000000;\n\n }\n\n else {\n\n part6 = 0b00000000000000000000000000000000;\n\n }\n\n const value = 0 | part1 | part2 | part3 | part4 | part5 | part6;\n\n return value;\n\n }\n\n\n\n // Exceptions //\n\n throwInstructionAddressMisalignedException(pcValue, ppcValue) {\n\n // fatal trap\n\n const info = ':pc:' + pcValue.toString() + ':ppc:' + ppcValue.toString();\n\n throw Error('exception:instruction-address-misaligned' + info);\n\n }\n\n\n\n throwIllegalInstructionException(instruction) {\n\n // fatal trap\n\n throw new Error('exception:illegal-instruction:' + instruction);\n\n }\n", "file_path": "projects/rivium/web/old/vcore.js", "rank": 36, "score": 8.306045499972408 }, { "content": "var Xlen;\n", "file_path": "projects/rivium/web/old/common.js", "rank": 42, "score": 2.2922987954863157 }, { "content": "// XLEN - register width\n\nvar Xlen;\n\n(function (Xlen) {\n\n Xlen[Xlen.word = 32] = 'word' // bits\n\n})(Xlen || (Xlen = {}))\n\n\n\n// VCOUNT - vcore count\n\nvar Vcount;\n\n(function (Vcount) {\n\n Vcount[Vcount.one = 1] = 'one'\n\n})(Vcount || (Vcount = {}))\n\n\n\n// MRCOUNT - max registers count\n\nvar Mrcount;\n\n(function (Mrcount) {\n\n Mrcount[Mrcount.default = 128] = 'default'\n\n})(Mrcount || (Mrcount = {}))\n\n\n\n// MEMPOW - powers of 2 for memory length\n\nvar Mempow;\n\n(function (Mempow) {\n\n Mempow[Mempow.min = 16] = 'min'\n\n Mempow[Mempow.med = 24] = 'med'\n\n Mempow[Mempow.max = 32] = 'max'\n\n})(Mempow || (Mempow = {}))\n\n\n\n// VNUM - vcore number\n\nvar Vnum;\n\n(function (Vnum) {\n\n Vnum[Vnum.zero = 0] = 'zero'\n\n})(Vnum || (Vnum = {}))\n\n\n\n// IALIGN - instruction alignment\n\nvar Ialign;\n\n(function (Ialign) {\n\n Ialign[Ialign.word = 32] = 'word'\n\n})(Ialign || (Ialign = {}))\n\n\n\n// ILEN - instruction length\n\nvar Ilen;\n\n(function (Ilen) {\n\n Ilen[Ilen.word = 32] = 'word'\n\n})(Ilen || (Ilen = {}))\n\n\n\nmodule.exports = {\n\n Xlen,\n\n Vcount,\n\n Mrcount,\n\n Mempow,\n\n Vnum,\n\n Ialign,\n\n Ilen\n\n}\n", "file_path": "projects/rivium/web/old/common.js", "rank": 43, "score": 2.2922987954863157 }, { "content": "var Mrcount;\n", "file_path": "projects/rivium/web/old/common.js", "rank": 44, "score": 2.146946915634613 }, { "content": "var Mempow;\n", "file_path": "projects/rivium/web/old/common.js", "rank": 45, "score": 2.1329940938557814 }, { "content": "class Rivium {\n\n constructor () {\n\n this.mem = null\n\n this.core = null\n\n }\n\n\n\n async init () {\n\n rivium.set_panic_hook()\n\n this.mem = new memory.Memory(\n\n common.Xlen.word,\n\n common.Vcount.one,\n\n common.Mrcount.default,\n\n common.Mempow.min\n\n )\n\n this.core = new vcore.Vcore(\n\n this.mem,\n\n common.Vnum.zero,\n\n common.Ialign.word,\n\n common.Ilen.word\n\n )\n\n }\n\n\n\n test () {\n\n new test.Test().testingAll()\n\n }\n\n\n\n assemblySourceCode (sourceCode) {\n\n return assembler.assembly(sourceCode, this.core)\n\n }\n\n\n\n loadMachineCode (machineCode) {\n\n for (let i = 0; i < machineCode.length; ++i) {\n\n this.mem?.setByte(i, machineCode[i])\n\n }\n\n return machineCode\n\n }\n\n\n\n translateIntermediate (arrayCode) {\n\n return rivium.jsonify_intermediate(arrayCode)\n\n }\n\n\n\n fetchInstruction () {\n\n this.core?.fetchInstruction()\n\n }\n\n\n\n disassembly (byte1, byte2, byte3, byte4) {\n\n const mem = new memory.Memory(\n\n common.Xlen.word,\n\n common.Vcount.one,\n\n common.Mrcount.default,\n\n common.Mempow.min\n\n )\n\n const core = new vcore.Vcore(\n\n mem,\n\n common.Vnum.zero,\n\n common.Ialign.word,\n\n common.Ilen.word\n\n )\n\n mem.setByte(0, byte1)\n\n mem.setByte(1, byte2)\n\n mem.setByte(2, byte3)\n\n mem.setByte(3, byte4)\n\n const intermediate = []\n\n core.fetchInstruction(intermediate)\n\n return intermediate.pop()\n\n }\n\n\n\n intermediateToText (string) {\n\n return rivium.intermediate_to_text(string)\n\n }\n\n\n\n analyze (string) {\n\n return rivium.analyze(string)\n\n }\n", "file_path": "projects/rivium/web/main.js", "rank": 46, "score": 1.6754459314439847 }, { "content": "/*\n\nimport * as memory from './old/memory.js'\n\nimport * as common from './old/common.js'\n\nimport * as vcore from './old/vcore.js'\n\nimport * as assembler from './old/assembler.js'\n\nimport * as test from './old/test.js'\n\nimport * as rivium from './rust/rivium.js'\n\nimport * as vscode from 'vscode'\n\n*/\n\n\n\nconst memory = require('./old/memory.js')\n\nconst common = require('./old/common.js')\n\nconst vcore = require('./old/vcore.js')\n\nconst assembler = require('./old/assembler.js')\n\nconst test = require('./old/test.js')\n\nconst rivium = require('./rust/rivium.js')\n\nconst vscode = require('vscode')\n\n\n\nclass Rivium {\n\n constructor () {\n\n this.mem = null\n\n this.core = null\n\n }\n\n\n\n async init () {\n\n rivium.set_panic_hook()\n\n this.mem = new memory.Memory(\n\n common.Xlen.word,\n\n common.Vcount.one,\n\n common.Mrcount.default,\n\n common.Mempow.min\n\n )\n\n this.core = new vcore.Vcore(\n\n this.mem,\n\n common.Vnum.zero,\n\n common.Ialign.word,\n\n common.Ilen.word\n\n )\n\n }\n\n\n\n test () {\n\n new test.Test().testingAll()\n\n }\n\n\n\n assemblySourceCode (sourceCode) {\n\n return assembler.assembly(sourceCode, this.core)\n\n }\n\n\n\n loadMachineCode (machineCode) {\n\n for (let i = 0; i < machineCode.length; ++i) {\n\n this.mem?.setByte(i, machineCode[i])\n\n }\n\n return machineCode\n\n }\n\n\n\n translateIntermediate (arrayCode) {\n\n return rivium.jsonify_intermediate(arrayCode)\n\n }\n\n\n\n fetchInstruction () {\n\n this.core?.fetchInstruction()\n\n }\n\n\n\n disassembly (byte1, byte2, byte3, byte4) {\n\n const mem = new memory.Memory(\n\n common.Xlen.word,\n\n common.Vcount.one,\n\n common.Mrcount.default,\n\n common.Mempow.min\n\n )\n\n const core = new vcore.Vcore(\n\n mem,\n\n common.Vnum.zero,\n\n common.Ialign.word,\n\n common.Ilen.word\n\n )\n\n mem.setByte(0, byte1)\n\n mem.setByte(1, byte2)\n\n mem.setByte(2, byte3)\n\n mem.setByte(3, byte4)\n\n const intermediate = []\n\n core.fetchInstruction(intermediate)\n\n return intermediate.pop()\n\n }\n\n\n\n intermediateToText (string) {\n\n return rivium.intermediate_to_text(string)\n\n }\n\n\n\n analyze (string) {\n\n return rivium.analyze(string)\n\n }\n\n}\n\n\n\n/**\n\n * @param {vscode.ExtensionContext} context\n\n */\n\nfunction activate (context) {\n\n const output = vscode.window.createOutputChannel('rivium')\n\n /**\n\n * @type {Rivium}\n\n */\n\n let rv\n\n\n\n const init = vscode.commands.registerCommand('rivium.init', async () => {\n\n rv = new Rivium()\n\n await rv.init()\n\n output.appendLine('init')\n\n })\n\n\n\n const execute = vscode.commands.registerCommand('rivium.execute', () => {\n\n const editor = vscode.window.activeTextEditor\n\n const document = editor?.document\n\n const text = document?.getText()\n\n const mcode = rv.assemblySourceCode(text)\n\n for (let i = 0; i < mcode.length; ++i) {\n\n rv.mem?.setByte(i, mcode[i])\n\n }\n\n while (true) {\n\n try {\n\n rv.fetchInstruction()\n\n } catch (e) {\n\n output.appendLine(e)\n\n break\n\n }\n\n }\n\n output.appendLine('executed')\n\n })\n\n\n\n const getMemory = vscode.commands.registerCommand('rivium.getMemory', () => {\n\n for (let i = 0; i < 2048; ++i) {\n\n output.appendLine('mem[' + i + ']: ' + rv.mem?.getByte(i))\n\n }\n\n })\n\n\n\n const getRegisters = vscode.commands.registerCommand('rivium.getRegisters', () => {\n\n for (let i = 0; i < 33; ++i) {\n\n let value\n\n let str\n\n if (i === 32) {\n\n value = rv.core?.getRegisterValue(i, true)\n\n str = 'reg[pc]: ' + value\n\n } else {\n\n value = rv.core?.getRegisterValue(i)\n\n str = 'reg[x' + i + ']: ' + value\n\n }\n\n output.appendLine(str)\n\n }\n\n })\n\n\n\n context.subscriptions.push(init, execute, getMemory, getRegisters)\n\n}\n\n\n\nmodule.exports = {\n\n activate\n\n}\n", "file_path": "projects/rivium/web/main.js", "rank": 47, "score": 1.2499300421742725 } ]
Rust
src/env/blocking.rs
quietboil/sibyl
bbb7cb28686d9d743252f1f28fb365eaaaad21a0
use super::Environment; use crate::{Session, ConnectionPool, Result, SessionPool}; impl Environment { /** Creates and begins a session for the given server. # Parameters * `dbname` - The TNS alias of the database to connect to. * `username` - The user ID with which to start the sessions. * `password` - The password for the corresponding `username`. # Example ``` let oracle = sibyl::env()?; let dbname = std::env::var("DBNAME")?; let dbuser = std::env::var("DBUSER")?; let dbpass = std::env::var("DBPASS")?; let session = oracle.connect(&dbname, &dbuser, &dbpass)?; assert!(!session.is_async()?); assert!(session.is_connected()?); assert!(session.ping().is_ok()); let stmt = session.prepare(" SELECT DISTINCT client_driver FROM v$session_connect_info WHERE sid = SYS_CONTEXT('USERENV', 'SID') ")?; let row = stmt.query_single(())?.unwrap(); let client_driver : &str = row.get_not_null(0)?; assert_eq!(client_driver, "sibyl"); # Ok::<(),Box<dyn std::error::Error>>(()) ``` */ pub fn connect(&self, dbname: &str, username: &str, password: &str) -> Result<Session> { Session::new(self, dbname, username, password) } /** Creates new session pool. # Parameters * `dbname` - The TNS alias of the database to connect to. * `username` - The username with which to start the sessions. * `password` - The password for the corresponding `username`. * `min` - The minimum number of sessions in the session pool. This number of sessions will be started during pool creation. After `min` sessions are started, sessions are opened only when necessary. * `inc` - The next increment for sessions to be started if the current number of sessions is less than `max`. The valid values are 0 and higher. * `max` - The maximum number of sessions that can be opened in the session pool. After this value is reached, no more sessions are opened. The valid values are 1 and higher. # Example ``` let oracle = sibyl::env()?; let dbname = std::env::var("DBNAME")?; let dbuser = std::env::var("DBUSER")?; let dbpass = std::env::var("DBPASS")?; // Create a session pool where each session will connect to the database // `dbname` and authenticate itself as `dbuser` with password `dbpass`. // Pool will have no open sessions initially. It will create 1 new session // at a time, up to the maximum of 10 sessions, when they are requested // and there are no idle sessions in the pool. let pool = oracle.create_session_pool(&dbname, &dbuser, &dbpass, 0, 1, 10)?; let session = pool.get_session()?; let stmt = session.prepare(" SELECT DISTINCT client_driver FROM v$session_connect_info WHERE sid = SYS_CONTEXT('USERENV', 'SID') ")?; let row = stmt.query_single(())?.unwrap(); let client_driver : &str = row.get_not_null(0)?; assert_eq!(client_driver, "sibyl"); # Ok::<(),Box<dyn std::error::Error>>(()) ``` */ pub fn create_session_pool(&self, dbname: &str, username: &str, password: &str, min: usize, inc: usize, max: usize) -> Result<SessionPool> { SessionPool::new(self, dbname, username, password, min, inc, max) } /** Creates new connection pool. # Parameters * `dbname` - The TNS alias of the database to connect to. * `username` - The username with which to start the sessions. * `password` - The password for the corresponding `username`. * `min` - The minimum number of connections to be opened when the pool is created. After the connection pool is created, connections are opened only when necessary. Generally, this parameter should be set to the number of concurrent statements that the application is planning or expecting to run. * `inc` - incremental number of connections to be opened when all the connections are busy and a call needs a connection. This increment is used only when the total number of open connections is less than the maximum number of connections that can be opened in that pool. * `max` - The maximum number of connections that can be opened to the database. When the maximum number of connections are open and all the connections are busy, if a call needs a connection, it waits until it gets one. # Example ``` let oracle = sibyl::env()?; let dbname = std::env::var("DBNAME")?; let dbuser = std::env::var("DBUSER")?; let dbpass = std::env::var("DBPASS")?; let pool = oracle.create_connection_pool(&dbname, &dbuser, &dbpass, 1, 1, 10)?; let session = pool.get_session(&dbuser, &dbpass)?; let stmt = session.prepare(" SELECT DISTINCT client_driver FROM v$session_connect_info WHERE sid = SYS_CONTEXT('USERENV', 'SID') ")?; let row = stmt.query_single(())?.unwrap(); let client_driver : &str = row.get_not_null(0)?; assert_eq!(client_driver, "sibyl"); # Ok::<(),Box<dyn std::error::Error>>(()) ``` */ pub fn create_connection_pool(&self, dbname: &str, username: &str, password: &str, min: usize, inc: usize, max: usize) -> Result<ConnectionPool> { ConnectionPool::new(self, dbname, username, password, min, inc, max) } }
use super::Environment; use crate::{Session, ConnectionPool, Result, SessionPool}; impl Environment { /** Creates and begins a session for the given server. # Parameters * `dbname` - The TNS alias of the database to connect to. * `username` - The user ID with which to start the sessions. * `password` - The password for the corresponding `username`. # Example ``` let oracle = sibyl::env()?; let dbname = std::env::var("DBNAME")?; let dbuser = std::env::var("DBUSER")?; let dbpass = std::env::var("DBPASS")?; let session = oracle.connect(&dbname, &dbuser, &dbpass)?; assert!(!session.is_async()?); assert!(session.is_connected()?); assert!(session.ping().is_ok()); let stmt = session.prepare(" SELECT DISTINCT client_driver
t_eq!(client_driver, "sibyl"); # Ok::<(),Box<dyn std::error::Error>>(()) ``` */ pub fn connect(&self, dbname: &str, username: &str, password: &str) -> Result<Session> { Session::new(self, dbname, username, password) } /** Creates new session pool. # Parameters * `dbname` - The TNS alias of the database to connect to. * `username` - The username with which to start the sessions. * `password` - The password for the corresponding `username`. * `min` - The minimum number of sessions in the session pool. This number of sessions will be started during pool creation. After `min` sessions are started, sessions are opened only when necessary. * `inc` - The next increment for sessions to be started if the current number of sessions is less than `max`. The valid values are 0 and higher. * `max` - The maximum number of sessions that can be opened in the session pool. After this value is reached, no more sessions are opened. The valid values are 1 and higher. # Example ``` let oracle = sibyl::env()?; let dbname = std::env::var("DBNAME")?; let dbuser = std::env::var("DBUSER")?; let dbpass = std::env::var("DBPASS")?; // Create a session pool where each session will connect to the database // `dbname` and authenticate itself as `dbuser` with password `dbpass`. // Pool will have no open sessions initially. It will create 1 new session // at a time, up to the maximum of 10 sessions, when they are requested // and there are no idle sessions in the pool. let pool = oracle.create_session_pool(&dbname, &dbuser, &dbpass, 0, 1, 10)?; let session = pool.get_session()?; let stmt = session.prepare(" SELECT DISTINCT client_driver FROM v$session_connect_info WHERE sid = SYS_CONTEXT('USERENV', 'SID') ")?; let row = stmt.query_single(())?.unwrap(); let client_driver : &str = row.get_not_null(0)?; assert_eq!(client_driver, "sibyl"); # Ok::<(),Box<dyn std::error::Error>>(()) ``` */ pub fn create_session_pool(&self, dbname: &str, username: &str, password: &str, min: usize, inc: usize, max: usize) -> Result<SessionPool> { SessionPool::new(self, dbname, username, password, min, inc, max) } /** Creates new connection pool. # Parameters * `dbname` - The TNS alias of the database to connect to. * `username` - The username with which to start the sessions. * `password` - The password for the corresponding `username`. * `min` - The minimum number of connections to be opened when the pool is created. After the connection pool is created, connections are opened only when necessary. Generally, this parameter should be set to the number of concurrent statements that the application is planning or expecting to run. * `inc` - incremental number of connections to be opened when all the connections are busy and a call needs a connection. This increment is used only when the total number of open connections is less than the maximum number of connections that can be opened in that pool. * `max` - The maximum number of connections that can be opened to the database. When the maximum number of connections are open and all the connections are busy, if a call needs a connection, it waits until it gets one. # Example ``` let oracle = sibyl::env()?; let dbname = std::env::var("DBNAME")?; let dbuser = std::env::var("DBUSER")?; let dbpass = std::env::var("DBPASS")?; let pool = oracle.create_connection_pool(&dbname, &dbuser, &dbpass, 1, 1, 10)?; let session = pool.get_session(&dbuser, &dbpass)?; let stmt = session.prepare(" SELECT DISTINCT client_driver FROM v$session_connect_info WHERE sid = SYS_CONTEXT('USERENV', 'SID') ")?; let row = stmt.query_single(())?.unwrap(); let client_driver : &str = row.get_not_null(0)?; assert_eq!(client_driver, "sibyl"); # Ok::<(),Box<dyn std::error::Error>>(()) ``` */ pub fn create_connection_pool(&self, dbname: &str, username: &str, password: &str, min: usize, inc: usize, max: usize) -> Result<ConnectionPool> { ConnectionPool::new(self, dbname, username, password, min, inc, max) } }
FROM v$session_connect_info WHERE sid = SYS_CONTEXT('USERENV', 'SID') ")?; let row = stmt.query_single(())?.unwrap(); let client_driver : &str = row.get_not_null(0)?; asser
random
[ { "content": "#[cfg(feature=\"blocking\")]\n\nfn main() -> sibyl::Result<()> {\n\n use std::{env, thread, sync::Arc};\n\n use once_cell::sync::OnceCell;\n\n use sibyl::*;\n\n\n\n static ORACLE : OnceCell<Environment> = OnceCell::new();\n\n let oracle = ORACLE.get_or_try_init(|| {\n\n env()\n\n })?;\n\n\n\n let dbname = env::var(\"DBNAME\").expect(\"database name\");\n\n let dbuser = env::var(\"DBUSER\").expect(\"user name\");\n\n let dbpass = env::var(\"DBPASS\").expect(\"password\");\n\n\n\n let pool = oracle.create_connection_pool(&dbname, &dbuser, &dbpass, 1, 1, 10)?;\n\n let pool = Arc::new(pool);\n\n\n\n let mut workers = Vec::with_capacity(98);\n\n for _i in 0..workers.capacity() {\n\n let pool = pool.clone();\n", "file_path": "examples/pooled_connections.rs", "rank": 0, "score": 117506.14516864455 }, { "content": "#[cfg(feature=\"blocking\")]\n\nfn main() -> sibyl::Result<()> {\n\n use std::{env, thread, sync::Arc};\n\n use once_cell::sync::OnceCell;\n\n use sibyl::*;\n\n\n\n static ORACLE : OnceCell<Environment> = OnceCell::new();\n\n let oracle = ORACLE.get_or_try_init(|| {\n\n Environment::new()\n\n })?;\n\n\n\n let dbname = env::var(\"DBNAME\").expect(\"database name\");\n\n let dbuser = env::var(\"DBUSER\").expect(\"user name\");\n\n let dbpass = env::var(\"DBPASS\").expect(\"password\");\n\n\n\n let pool = oracle.create_session_pool(&dbname, &dbuser, &dbpass, 0, 1, 10)?;\n\n let pool = Arc::new(pool);\n\n\n\n let mut workers = Vec::with_capacity(98);\n\n for _i in 0..workers.capacity() {\n\n let pool = pool.clone();\n", "file_path": "examples/pooled_sessions.rs", "rank": 1, "score": 117129.00648241834 }, { "content": "fn oracle() -> Result<&'static Environment> {\n\n static OCI_ENV: OnceCell<Environment> = OnceCell::new();\n\n OCI_ENV.get_or_try_init(||\n\n sibyl::env()\n\n )\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 2, "score": 115900.27079822478 }, { "content": "#[cfg(feature=\"blocking\")]\n\nfn main() -> sibyl::Result<()> {\n\n use std::{env, thread, sync::Arc};\n\n use sibyl::*;\n\n\n\n let oracle = sibyl::env()?;\n\n let oracle = Arc::new(oracle);\n\n\n\n // Start 100 \"worker\" threads\n\n let mut workers = Vec::with_capacity(100);\n\n for _i in 0..workers.capacity() {\n\n let oracle = oracle.clone();\n\n let handle = thread::spawn(move || -> Result<Option<(String,String)>> {\n\n let dbname = env::var(\"DBNAME\").expect(\"database name\");\n\n let dbuser = env::var(\"DBUSER\").expect(\"user name\");\n\n let dbpass = env::var(\"DBPASS\").expect(\"password\");\n\n\n\n let session = oracle.connect(&dbname, &dbuser, &dbpass)?;\n\n let stmt = session.prepare(\"\n\n SELECT first_name, last_name, hire_date\n\n FROM (\n", "file_path": "examples/connection_per_thread.rs", "rank": 3, "score": 112933.48210802281 }, { "content": "#[cfg(feature=\"nonblocking\")]\n\nfn main() -> sibyl::Result<()> {\n\n sibyl::block_on(async {\n\n use std::{env, sync::Arc};\n\n use once_cell::sync::OnceCell;\n\n use sibyl::*;\n\n\n\n static ORACLE : OnceCell<Environment> = OnceCell::new();\n\n let oracle = ORACLE.get_or_try_init(|| {\n\n Environment::new()\n\n })?;\n\n\n\n let dbname = env::var(\"DBNAME\").expect(\"database name\");\n\n let dbuser = env::var(\"DBUSER\").expect(\"user name\");\n\n let dbpass = env::var(\"DBPASS\").expect(\"password\");\n\n\n\n let pool = oracle.create_session_pool(&dbname, &dbuser, &dbpass, 0, 1, 10).await?;\n\n let pool = Arc::new(pool);\n\n\n\n let mut workers = Vec::with_capacity(100);\n\n for _i in 0..workers.capacity() {\n", "file_path": "examples/async_pooled_sessions.rs", "rank": 4, "score": 112573.01317711009 }, { "content": "#[cfg(feature=\"nonblocking\")]\n\nfn main() -> sibyl::Result<()> {\n\n sibyl::block_on(async {\n\n use std::{env, sync::Arc};\n\n use sibyl::*;\n\n\n\n let oracle = sibyl::env()?;\n\n let oracle = Arc::new(oracle);\n\n\n\n // Start 100 \"worker\" tasks\n\n let mut workers = Vec::with_capacity(100);\n\n for _i in 0..workers.capacity() {\n\n let oracle = oracle.clone();\n\n let handle = spawn(async move {\n\n let dbname = env::var(\"DBNAME\").expect(\"database name\");\n\n let dbuser = env::var(\"DBUSER\").expect(\"user name\");\n\n let dbpass = env::var(\"DBPASS\").expect(\"password\");\n\n\n\n let session = oracle.connect(&dbname, &dbuser, &dbpass).await?;\n\n let stmt = session.prepare(\"\n\n SELECT first_name, last_name, hire_date\n", "file_path": "examples/async_connection_per_task.rs", "rank": 5, "score": 108747.93707896734 }, { "content": "create user sibyl identified by Or4cl3;\n\ngrant connect, resource, unlimited tablespace, select_catalog_role to sibyl;\n\n\n\nbegin\n\n for r in (\n\n select owner, table_name\n\n from all_tables\n\n where owner in ('HR', 'OE', 'PM', 'IX', 'SH', 'BI')\n\n and nested = 'NO'\n\n and external = 'NO'\n\n and nvl(iot_type,'_') != 'IOT_OVERFLOW')\n\n loop\n\n begin\n\n execute immediate 'grant insert, select, update, delete on ' || r.owner || '.' || r.table_name || ' to sibyl';\n\n exception\n\n when others then\n\n dbms_output.put_line('ERROR: cannot grant access to table ' || r.owner || '.' || r.table_name || ' -- ' || substr(sqlerrm,1,200));\n\n end;\n\n end loop;\n\n\n", "file_path": "etc/create_sandbox.sql", "rank": 6, "score": 89308.34694496114 }, { "content": "#[cfg(feature=\"blocking\")]\n\nfn main() -> sibyl::Result<()> {\n\n use sibyl as oracle;\n\n\n\n let oracle = oracle::env()?;\n\n\n\n let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n\n\n let session = oracle.connect(&dbname, &dbuser, &dbpass)?;\n\n let stmt = session.prepare(\"\n\n SELECT first_name, last_name, hire_date\n\n FROM hr.employees\n\n WHERE hire_date >= :hire_date\n\n ORDER BY hire_date\n\n \")?;\n\n let date = oracle::Date::from_string(\"January 1, 2005\", \"MONTH DD, YYYY\", &session)?;\n\n let rows = stmt.query(&date)?;\n\n while let Some( row ) = rows.next()? {\n\n let first_name : Option<&str> = row.get(0)?;\n", "file_path": "examples/readme.rs", "rank": 7, "score": 86372.78266246061 }, { "content": "#[cfg(feature=\"blocking\")]\n\nfn main() -> sibyl::Result<()> {\n\n let oracle = sibyl::env()?;\n\n\n\n let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n\n\n let session = oracle.connect(&dbname, &dbuser, &dbpass)?;\n\n\n\n let stmt = session.prepare(\"\n\n SELECT c.country_name, Median(e.salary)\n\n FROM hr.employees e\n\n JOIN hr.departments d ON d.department_id = e.department_id\n\n JOIN hr.locations l ON l.location_id = d.location_id\n\n JOIN hr.countries c ON c.country_id = l.country_id\n\n JOIN hr.regions r ON r.region_id = c.region_id\n\n WHERE r.region_name = :REGION_NAME\n\n GROUP BY c.country_name\n\n \")?;\n\n\n\n let rows = stmt.query(\"Europe\")?;\n\n\n\n while let Some(row) = rows.next()? {\n\n let country_name : &str = row.get_not_null(0)?;\n\n let median_salary : u16 = row.get_not_null(1)?;\n\n println!(\"{:25}: {:>5}\", country_name, median_salary);\n\n }\n\n Ok(())\n\n}", "file_path": "examples/book_intro.rs", "rank": 8, "score": 83028.4095395903 }, { "content": "#[cfg(feature=\"nonblocking\")]\n\nfn main() -> sibyl::Result<()> {\n\n sibyl::block_on(async {\n\n use sibyl as oracle;\n\n\n\n let oracle = oracle::env()?;\n\n\n\n let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n\n\n let session = oracle.connect(&dbname, &dbuser, &dbpass).await?;\n\n let stmt = session.prepare(\"\n\n SELECT first_name, last_name, hire_date\n\n FROM hr.employees\n\n WHERE hire_date >= :hire_date\n\n ORDER BY hire_date\n\n \").await?;\n\n let date = oracle::Date::from_string(\"January 1, 2005\", \"MONTH DD, YYYY\", &oracle)?;\n\n let rows = stmt.query(&date).await?;\n\n while let Some( row ) = rows.next().await? {\n", "file_path": "examples/async_readme.rs", "rank": 9, "score": 83028.4095395903 }, { "content": "pub fn env() -> Result<Environment> {\n\n Environment::new()\n\n}\n", "file_path": "src/lib.rs", "rank": 10, "score": 81308.7960347376 }, { "content": " let handle = thread::spawn(move || -> Result<Option<(String,String)>> {\n\n let dbuser = env::var(\"DBUSER\").expect(\"user name\");\n\n let dbpass = env::var(\"DBPASS\").expect(\"password\");\n\n\n\n let session = pool.get_session(&dbuser, &dbpass)?;\n\n let stmt = session.prepare(\"\n\n SELECT first_name, last_name, hire_date\n\n FROM (\n\n SELECT first_name, last_name, hire_date\n\n , Row_Number() OVER (ORDER BY hire_date DESC, last_name) AS hire_date_rank\n\n FROM hr.employees\n\n )\n\n WHERE hire_date_rank = 1\n\n \")?;\n\n if let Some( row ) = stmt.query_single(())? {\n\n let first_name : Option<&str> = row.get(0)?;\n\n let last_name : &str = row.get_not_null(1)?;\n\n let name = first_name.map_or(last_name.to_string(), |first_name| format!(\"{} {}\", first_name, last_name));\n\n let hire_date : Date = row.get_not_null(2)?;\n\n let hire_date = hire_date.to_string(\"FMMonth DD, YYYY\")?;\n", "file_path": "examples/pooled_connections.rs", "rank": 11, "score": 80050.15167012124 }, { "content": "/*!\n\nThis example is a variant of `readme` that executes its work in multiple\n\nthreads. It creates a connection pool which is then used by worker threads\n\nthat establish their own private (and most likely stateful) sessions with\n\nthe database, which share a small number of physical connections.\n\n\n\n*Note* that connection pooling is only available in `blocking` mode and\n\nthus there is no `nonblocking` example.\n\n*/\n\n#[cfg(feature=\"blocking\")]\n", "file_path": "examples/pooled_connections.rs", "rank": 12, "score": 80043.74459282355 }, { "content": "\n\n Ok(Some((name, hire_date)))\n\n } else {\n\n Ok(None)\n\n }\n\n });\n\n workers.push(handle);\n\n }\n\n for handle in workers {\n\n let worker_id = handle.thread().id();\n\n if let Some((name,hire_date)) = handle.join().expect(\"result from worker thread\")? {\n\n println!(\"{:?}: {} was hired on {}\", worker_id, name, hire_date);\n\n } else {\n\n println!(\"{:?}: did not find the latest hire\", worker_id);\n\n }\n\n }\n\n println!(\"There are {} open connections in the pool.\", pool.open_count()?);\n\n Ok(())\n\n}\n\n\n\n/**\n\nConnection pools are not supported in nonblocking mode.\n\n\n\nOCI returns \"ORA-03126 network driver does not support non-blocking operations\"\n\nwhen one tries to set OCI_ATTR_NONBLOCKING_MODE on a pooled connection.\n\n*/\n", "file_path": "examples/pooled_connections.rs", "rank": 13, "score": 80034.6562116608 }, { "content": "#[cfg(feature=\"nonblocking\")]\n\nfn main() -> sibyl::Result<()> {\n\n sibyl::block_on(async {\n\n let oracle = sibyl::env()?;\n\n\n\n let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n\n\n let session = oracle.connect(&dbname, &dbuser, &dbpass).await?;\n\n\n\n let stmt = session.prepare(\"\n\n SELECT c.country_name, Median(e.salary)\n\n FROM hr.employees e\n\n JOIN hr.departments d ON d.department_id = e.department_id\n\n JOIN hr.locations l ON l.location_id = d.location_id\n\n JOIN hr.countries c ON c.country_id = l.country_id\n\n JOIN hr.regions r ON r.region_id = c.region_id\n\n WHERE r.region_name = :REGION_NAME\n\n GROUP BY c.country_name\n\n \").await?;\n", "file_path": "examples/async_book_intro.rs", "rank": 14, "score": 79979.68299805082 }, { "content": "/*!\n\nThis example is a variant of `readme` that executes its work in multiple\n\nthreads. It creates a session pool which threads then use to \"borrow\"\n\nstateless sessions to execute queries.\n\n*/\n\n#[cfg(feature=\"blocking\")]\n", "file_path": "examples/pooled_sessions.rs", "rank": 15, "score": 79602.09681909373 }, { "content": " let handle = thread::spawn(move || -> Result<Option<(String,String)>> {\n\n let session = pool.get_session()?;\n\n let stmt = session.prepare(\"\n\n SELECT first_name, last_name, hire_date\n\n FROM (\n\n SELECT first_name, last_name, hire_date\n\n , Row_Number() OVER (ORDER BY hire_date DESC, last_name) AS hire_date_rank\n\n FROM hr.employees\n\n )\n\n WHERE hire_date_rank = 1\n\n \")?;\n\n if let Some( row ) = stmt.query_single(())? {\n\n let first_name : Option<&str> = row.get(0)?;\n\n let last_name : &str = row.get_not_null(1)?;\n\n let name = first_name.map_or(last_name.to_string(), |first_name| format!(\"{} {}\", first_name, last_name));\n\n let hire_date : Date = row.get_not_null(2)?;\n\n let hire_date = hire_date.to_string(\"FMMonth DD, YYYY\")?;\n\n\n\n Ok(Some((name, hire_date)))\n\n } else {\n", "file_path": "examples/pooled_sessions.rs", "rank": 16, "score": 79598.40841991053 }, { "content": " Ok(None)\n\n }\n\n });\n\n workers.push(handle);\n\n }\n\n for handle in workers {\n\n let worker_id = handle.thread().id();\n\n if let Some((name,hire_date)) = handle.join().expect(\"result from worker thread\")? {\n\n println!(\"{:?}: {} was hired on {}\", worker_id, name, hire_date);\n\n } else {\n\n println!(\"{:?}: did not find the latest hire\", worker_id);\n\n }\n\n }\n\n println!(\"There are {} open sessions in the pool.\", pool.open_count()?);\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/pooled_sessions.rs", "rank": 17, "score": 79596.34915396191 }, { "content": "/*!\n\nThis example is a variant of `readme` that executes its work in multiple\n\nthreads where each thread (or task) establishes its own\n\nconnection and then uses it to execute queries.\n\n\n\nWhile this approch might work for some use cases, usually you are better\n\noff with either a session pool or a connection pool. You would use the\n\nlatter if your work need stateful sessions, but you can allow only so many\n\nactual database connections.\n\n\n\n*Note* that connection pooling is only available in `blocking` mode.\n\n*/\n\n#[cfg(feature=\"blocking\")]\n", "file_path": "examples/connection_per_thread.rs", "rank": 18, "score": 75962.85077796357 }, { "content": " for handle in workers {\n\n let worker_id = handle.thread().id();\n\n if let Some((name,hire_date)) = handle.join().expect(\"result from worker thread\")? {\n\n println!(\"{:?}: {} was hired on {}\", worker_id, name, hire_date);\n\n } else {\n\n println!(\"{:?}: did not find the latest hire\", worker_id);\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/connection_per_thread.rs", "rank": 19, "score": 75953.38545101609 }, { "content": " SELECT first_name, last_name, hire_date\n\n , Row_Number() OVER (ORDER BY hire_date DESC, last_name) AS hire_date_rank\n\n FROM hr.employees\n\n )\n\n WHERE hire_date_rank = 1\n\n \")?;\n\n if let Some( row ) = stmt.query_single(())? {\n\n let first_name : Option<&str> = row.get(0)?;\n\n let last_name : &str = row.get_not_null(1)?;\n\n let name = first_name.map_or(last_name.to_string(), |first_name| format!(\"{} {}\", first_name, last_name));\n\n let hire_date : Date = row.get_not_null(2)?;\n\n let hire_date = hire_date.to_string(\"FMMonth DD, YYYY\")?;\n\n\n\n Ok(Some((name, hire_date)))\n\n } else {\n\n Ok(None)\n\n }\n\n });\n\n workers.push(handle);\n\n }\n", "file_path": "examples/connection_per_thread.rs", "rank": 20, "score": 75949.83249695488 }, { "content": "#[cfg(feature=\"nonblocking\")]\n\nfn main() {}\n", "file_path": "examples/pooled_connections.rs", "rank": 21, "score": 75943.82568133058 }, { "content": "/*!\n\nThis example is a variant of `readme` that executes its work in multiple\n\nasync tasks. It creates a session pool which tasks then use to \"borrow\"\n\nstateless sessions to execute queries.\n\n\n\n*Note* that `block_on` used in this example abstracts `block_on` for\n\nvarious async executors and is only intended to execute Sibyl's async\n\ntests and examples. While you can certainly use it, most likely you'd\n\nwant to create your own version of it.\n\n*/\n\n#[cfg(feature=\"nonblocking\")]\n", "file_path": "examples/async_pooled_sessions.rs", "rank": 22, "score": 75548.4283468721 }, { "content": " let pool = pool.clone();\n\n let handle = spawn(async move {\n\n let session = pool.get_session().await?;\n\n let stmt = session.prepare(\"\n\n SELECT first_name, last_name, hire_date\n\n FROM (\n\n SELECT first_name, last_name, hire_date\n\n , Row_Number() OVER (ORDER BY hire_date DESC, last_name) AS hire_date_rank\n\n FROM hr.employees\n\n )\n\n WHERE hire_date_rank = 1\n\n \").await?;\n\n if let Some( row ) = stmt.query_single(()).await? {\n\n let first_name : Option<&str> = row.get(0)?;\n\n let last_name : &str = row.get_not_null(1)?;\n\n let name = first_name.map_or(last_name.to_string(), |first_name| format!(\"{} {}\", first_name, last_name));\n\n let hire_date : Date = row.get_not_null(2)?;\n\n let hire_date = hire_date.to_string(\"FMMonth DD, YYYY\")?;\n\n\n\n Ok::<_,Error>(Some((name, hire_date)))\n", "file_path": "examples/async_pooled_sessions.rs", "rank": 23, "score": 75540.4116657276 }, { "content": " } else {\n\n Ok(None)\n\n }\n\n });\n\n workers.push(handle);\n\n }\n\n let mut n = 1;\n\n for handle in workers {\n\n if let Some((name,hire_date)) = handle.await.expect(\"task's result\")? {\n\n println!(\"{:?}: {} was hired on {}\", n, name, hire_date);\n\n } else {\n\n println!(\"{:?}: did not find the latest hire\", n);\n\n }\n\n n += 1;\n\n }\n\n println!(\"There are {} open sessions in the pool.\", pool.open_count()?);\n\n\n\n Ok(())\n\n })\n\n}\n\n\n", "file_path": "examples/async_pooled_sessions.rs", "rank": 24, "score": 75535.72273828772 }, { "content": "#[cfg(feature=\"nonblocking\")]\n\nfn main() {}\n", "file_path": "examples/pooled_sessions.rs", "rank": 25, "score": 75528.25077244277 }, { "content": "/*!\n\nThis example is a variant of `readme` that executes its work in multiple\n\nasync tasks where each task establishes its own session and then uses it\n\nto execute queries.\n\n\n\nWhile this approch might work for some use cases, usually you are better\n\noff with a session pool.\n\n\n\n*Note* that `block_on` used in this example abstracts `block_on` for\n\nvarious async executors and is only intended to execute Sibyl's async\n\ntests and examples. While you can certainly use it, most likely you'd\n\nwant to create your own.\n\n*/\n\n#[cfg(feature=\"nonblocking\")]\n", "file_path": "examples/async_connection_per_task.rs", "rank": 26, "score": 72280.59025134322 }, { "content": " FROM (\n\n SELECT first_name, last_name, hire_date\n\n , Row_Number() OVER (ORDER BY hire_date DESC, last_name) AS hire_date_rank\n\n FROM hr.employees\n\n )\n\n WHERE hire_date_rank = 1\n\n \").await?;\n\n if let Some( row ) = stmt.query_single(()).await? {\n\n let first_name : Option<&str> = row.get(0)?;\n\n let last_name : &str = row.get_not_null(1)?;\n\n let name = first_name.map_or(last_name.to_string(), |first_name| format!(\"{} {}\", first_name, last_name));\n\n let hire_date : Date = row.get_not_null(2)?;\n\n let hire_date = hire_date.to_string(\"FMMonth DD, YYYY\")?;\n\n\n\n Ok::<_,Error>(Some((name, hire_date)))\n\n } else {\n\n Ok(None)\n\n }\n\n });\n\n workers.push(handle);\n", "file_path": "examples/async_connection_per_task.rs", "rank": 27, "score": 72267.45298033424 }, { "content": " }\n\n let mut n = 1;\n\n for handle in workers {\n\n if let Some((name,hire_date)) = handle.await.expect(\"task's result\")? {\n\n println!(\"{:?}: {} was hired on {}\", n, name, hire_date);\n\n } else {\n\n println!(\"{:?}: did not find the latest hire\", n);\n\n }\n\n n += 1;\n\n }\n\n Ok(())\n\n })\n\n}\n\n\n", "file_path": "examples/async_connection_per_task.rs", "rank": 28, "score": 72265.49235199484 }, { "content": "#[cfg(feature=\"nonblocking\")]\n\nfn main() {}\n", "file_path": "examples/connection_per_thread.rs", "rank": 29, "score": 72261.54520619598 }, { "content": "#[cfg(feature=\"blocking\")]\n\nfn main() {}", "file_path": "examples/async_pooled_sessions.rs", "rank": 30, "score": 71866.12023522906 }, { "content": "#[cfg(feature=\"blocking\")]\n\nfn main() {}\n", "file_path": "examples/async_connection_per_task.rs", "rank": 31, "score": 68919.83618813887 }, { "content": "fn main() -> Result<()> {\n\n let oracle = oracle()?;\n\n // ...\n\n Ok(())\n\n}\n\n```\n\n*/\n", "file_path": "src/lib.rs", "rank": 32, "score": 54651.95258588214 }, { "content": "# #[cfg(feature=\"nonblocking\")]\n\nfn main() -> sibyl::Result<()> {\n\n sibyl::block_on(async {\n\n let oracle = sibyl::env()?;\n\n\n\n let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n\n\n let session = oracle.connect(&dbname, &dbuser, &dbpass).await?;\n\n\n\n let stmt = session.prepare(\"\n\n SELECT c.country_name, Median(e.salary)\n\n FROM hr.employees e\n\n JOIN hr.departments d ON d.department_id = e.department_id\n\n JOIN hr.locations l ON l.location_id = d.location_id\n\n JOIN hr.countries c ON c.country_id = l.country_id\n\n JOIN hr.regions r ON r.region_id = c.region_id\n\n WHERE r.region_name = :REGION_NAME\n\n GROUP BY c.country_name\n\n \").await?;\n", "file_path": "src/lib.rs", "rank": 33, "score": 50260.691511336096 }, { "content": "/*!\n\nThis example demos a single-threaded program that:\n\n- Connects to the specified database,\n\n- Prepares an SQL statement,\n\n- Executes the prepared statement,\n\n- Fetches the results.\n\n\n\nSQL in this example finds the first person that was hired after the New Year of 2005.\n\n*/\n\n#[cfg(feature=\"blocking\")]\n", "file_path": "examples/readme.rs", "rank": 34, "score": 42272.31372148412 }, { "content": " let last_name : &str = row.get_not_null(1)?;\n\n let hire_date : oracle::Date = row.get_not_null(2)?;\n\n\n\n let hire_date = hire_date.to_string(\"FMMonth DD, YYYY\")?;\n\n if first_name.is_some() {\n\n println!(\"{}: {} {}\", hire_date, first_name.unwrap(), last_name);\n\n } else {\n\n println!(\"{}: {}\", hire_date, last_name);\n\n }\n\n }\n\n if stmt.row_count()? == 0 {\n\n println!(\"No one was hired after {}\", date.to_string(\"FMMonth DD, YYYY\")?);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/readme.rs", "rank": 35, "score": 42266.013658982745 }, { "content": " /**\n\n Returns the server-side time for the preceding call in microseconds.\n\n\n\n # Example\n\n\n\n 🛈 **Note** that this example is written for `blocking` mode execution. Add `await`s, where needed,\n\n to convert it to a nonblocking variant (or peek at the source to see the hidden nonblocking doctest).\n\n\n\n ```\n\n # use sibyl::Result;\n\n # #[cfg(feature=\"blocking\")]\n\n # fn main() -> Result<()> {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass)?;\n\n session.start_call_time_measurements()?;\n\n session.ping()?;\n\n let dt = session.call_time()?;\n", "file_path": "src/session.rs", "rank": 36, "score": 41890.58446684161 }, { "content": " # Example\n\n\n\n 🛈 **Note** that this example is written for `blocking` mode execution. Add `await`s, where needed,\n\n to convert it to a nonblocking variant (or peek at the source to see the hidden nonblocking doctest).\n\n\n\n ```\n\n # use sibyl::Result;\n\n # #[cfg(feature=\"blocking\")]\n\n # fn main() -> Result<()> {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass)?;\n\n session.set_action(\"Action Name Test\");\n\n\n\n let stmt = session.prepare(\"\n\n SELECT action\n\n FROM v$session\n\n WHERE sid = SYS_CONTEXT('USERENV', 'SID')\n", "file_path": "src/session.rs", "rank": 37, "score": 41890.579989292855 }, { "content": " # Example\n\n\n\n 🛈 **Note** that this example is written for `blocking` mode execution. Add `await`s, where needed,\n\n to convert it to a nonblocking variant (or peek at the source to see the hidden nonblocking doctest).\n\n\n\n ```\n\n # use sibyl::Result;\n\n # #[cfg(feature=\"blocking\")]\n\n # fn main() -> Result<()> {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass)?;\n\n session.set_client_identifier(\"Test Wielder\");\n\n\n\n let stmt = session.prepare(\"\n\n SELECT client_identifier\n\n FROM v$session\n\n WHERE sid = SYS_CONTEXT('USERENV', 'SID')\n", "file_path": "src/session.rs", "rank": 38, "score": 41890.37052109829 }, { "content": "\n\n 🛈 **Note** that this example is written for `blocking` mode execution. Add `await`s, where needed,\n\n to convert it to a nonblocking variant (or peek at the source to see the hidden nonblocking doctest).\n\n\n\n ```\n\n # use sibyl::Result;\n\n # #[cfg(feature=\"blocking\")]\n\n # fn main() -> Result<()> {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass)?;\n\n session.set_module(\"Sibyl DocTest\");\n\n\n\n let stmt = session.prepare(\"\n\n SELECT module\n\n FROM v$session\n\n WHERE sid = SYS_CONTEXT('USERENV', 'SID')\n\n \")?;\n", "file_path": "src/session.rs", "rank": 39, "score": 41889.82992437769 }, { "content": "\n\n 🛈 **Note** that this example is written for `blocking` mode execution. Add `await`s, where needed,\n\n to convert it to a nonblocking variant (or peek at the source to see the hidden nonblocking doctest).\n\n\n\n ```\n\n # use sibyl::Result;\n\n # #[cfg(feature=\"blocking\")]\n\n # fn main() -> Result<()> {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass)?;\n\n session.set_client_info(\"Nothing to see here, move along folks\");\n\n\n\n let stmt = session.prepare(\"\n\n SELECT client_info\n\n FROM v$session\n\n WHERE sid = SYS_CONTEXT('USERENV', 'SID')\n\n \")?;\n", "file_path": "src/session.rs", "rank": 40, "score": 41888.778505244234 }, { "content": " # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass)?;\n\n session.set_stmt_cache_size(100)?;\n\n # let size = session.stmt_cache_size()?;\n\n # assert_eq!(size, 100);\n\n # Ok(())\n\n # }\n\n # #[cfg(feature=\"nonblocking\")]\n\n # fn main() -> Result<()> {\n\n # sibyl::block_on(async {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass).await?;\n\n # session.set_stmt_cache_size(100)?;\n\n # let size = session.stmt_cache_size()?;\n\n # assert_eq!(size, 100);\n", "file_path": "src/session.rs", "rank": 41, "score": 41886.08723635886 }, { "content": " # let session = oracle.connect(&dbname, &dbuser, &dbpass)?;\n\n let size = session.stmt_cache_size()?;\n\n assert_eq!(size, 20);\n\n # Ok(())\n\n # }\n\n # #[cfg(feature=\"nonblocking\")]\n\n # fn main() -> Result<()> {\n\n # sibyl::block_on(async {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass).await?;\n\n # let size = session.stmt_cache_size()?;\n\n # assert_eq!(size, 20);\n\n # Ok(()) })\n\n # }\n\n ```\n\n */\n\n pub fn stmt_cache_size(&self) -> Result<u32> {\n", "file_path": "src/session.rs", "rank": 42, "score": 41885.63050819334 }, { "content": " let row = stmt.query_single(())?.unwrap();\n\n let module : &str = row.get_not_null(0)?;\n\n assert_eq!(module, \"Sibyl DocTest\");\n\n # Ok(())\n\n # }\n\n # #[cfg(feature=\"nonblocking\")]\n\n # fn main() -> Result<()> {\n\n # sibyl::block_on(async {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let oracle = sibyl::env()?;\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass).await?;\n\n # session.set_module(\"Sibyl DocTest\");\n\n # let stmt = session.prepare(\"\n\n # SELECT module\n\n # FROM v$session\n\n # WHERE sid = SYS_CONTEXT('USERENV', 'SID')\n\n # \").await?;\n", "file_path": "src/session.rs", "rank": 43, "score": 41884.00180882935 }, { "content": " \")?;\n\n let row = stmt.query_single(())?.unwrap();\n\n let action : &str = row.get_not_null(0)?;\n\n assert_eq!(action, \"Action Name Test\");\n\n # Ok(())\n\n # }\n\n # #[cfg(feature=\"nonblocking\")]\n\n # fn main() -> Result<()> {\n\n # sibyl::block_on(async {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let oracle = sibyl::env()?;\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass).await?;\n\n # session.set_action(\"Action Name Test\");\n\n # let stmt = session.prepare(\"\n\n # SELECT action\n\n # FROM v$session\n\n # WHERE sid = SYS_CONTEXT('USERENV', 'SID')\n", "file_path": "src/session.rs", "rank": 44, "score": 41883.65615365845 }, { "content": " ```\n\n # use sibyl::Result;\n\n # #[cfg(feature=\"blocking\")]\n\n # fn main() -> Result<()> {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass)?;\n\n session.set_call_timeout(5000)?;\n\n # let time = session.call_timeout()?;\n\n # assert_eq!(time, 5000);\n\n # Ok(())\n\n # }\n\n # #[cfg(feature=\"nonblocking\")]\n\n # fn main() -> Result<()> {\n\n # sibyl::block_on(async {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n", "file_path": "src/session.rs", "rank": 45, "score": 41883.27867698081 }, { "content": " \")?;\n\n let row = stmt.query_single(())?.unwrap();\n\n let client_identifier : &str = row.get_not_null(0)?;\n\n assert_eq!(client_identifier, \"Test Wielder\");\n\n # Ok(())\n\n # }\n\n # #[cfg(feature=\"nonblocking\")]\n\n # fn main() -> Result<()> {\n\n # sibyl::block_on(async {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass).await?;\n\n # session.set_client_identifier(\"Test Wielder\");\n\n # let stmt = session.prepare(\"\n\n # SELECT client_identifier\n\n # FROM v$session\n\n # WHERE sid = SYS_CONTEXT('USERENV', 'SID')\n\n # \").await?;\n", "file_path": "src/session.rs", "rank": 46, "score": 41883.05940028559 }, { "content": " # }\n\n # #[cfg(feature=\"nonblocking\")]\n\n # fn main() -> Result<()> {\n\n # sibyl::block_on(async {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass).await?;\n\n # let orig_name = session.current_schema()?;\n\n # session.set_current_schema(\"HR\")?;\n\n # assert_eq!(session.current_schema()?, \"HR\");\n\n # let stmt = session.prepare(\"\n\n # SELECT schemaname\n\n # FROM v$session\n\n # WHERE sid = SYS_CONTEXT('USERENV', 'SID')\n\n # \").await?;\n\n # let row = stmt.query_single(()).await?.unwrap();\n\n # let schema_name : &str = row.get_not_null(0)?;\n\n # assert_eq!(schema_name, \"HR\");\n", "file_path": "src/session.rs", "rank": 47, "score": 41882.180244335206 }, { "content": " let row = stmt.query_single(())?.unwrap();\n\n let client_info : &str = row.get_not_null(0)?;\n\n assert_eq!(client_info, \"Nothing to see here, move along folks\");\n\n # Ok(())\n\n # }\n\n # #[cfg(feature=\"nonblocking\")]\n\n # fn main() -> Result<()> {\n\n # sibyl::block_on(async {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass).await?;\n\n # session.set_client_info(\"Nothing to see here, move along folks\");\n\n # let stmt = session.prepare(\"\n\n # SELECT client_info\n\n # FROM v$session\n\n # WHERE sid = SYS_CONTEXT('USERENV', 'SID')\n\n # \").await?;\n\n # let row = stmt.query_single(()).await?.unwrap();\n", "file_path": "src/session.rs", "rank": 48, "score": 41881.71739409723 }, { "content": " session.stop_call_time_measurements()?;\n\n assert!(dt > 0);\n\n # Ok(())\n\n # }\n\n # #[cfg(feature=\"nonblocking\")]\n\n # fn main() -> Result<()> {\n\n # sibyl::block_on(async {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass).await?;\n\n # session.start_call_time_measurements()?;\n\n # session.ping().await?;\n\n # let dt = session.call_time()?;\n\n # session.stop_call_time_measurements()?;\n\n # assert!(dt > 0);\n\n # Ok(()) })\n\n # }\n\n ```\n", "file_path": "src/session.rs", "rank": 49, "score": 41880.862081228544 }, { "content": " # #[cfg(feature=\"blocking\")]\n\n # fn main() -> Result<()> {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass)?;\n\n session.set_call_timeout(1000)?;\n\n\n\n let time = session.call_timeout()?;\n\n\n\n assert_eq!(time, 1000);\n\n # Ok(())\n\n # }\n\n # #[cfg(feature=\"nonblocking\")]\n\n # fn main() -> Result<()> {\n\n # sibyl::block_on(async {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n", "file_path": "src/session.rs", "rank": 50, "score": 41880.13709149234 }, { "content": " # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass)?;\n\n let orig_name = session.current_schema()?;\n\n\n\n session.set_current_schema(\"HR\")?;\n\n\n\n assert_eq!(session.current_schema()?, \"HR\");\n\n let stmt = session.prepare(\"\n\n SELECT schemaname\n\n FROM v$session\n\n WHERE sid = SYS_CONTEXT('USERENV', 'SID')\n\n \")?;\n\n let row = stmt.query_single(())?.unwrap();\n\n let schema_name : &str = row.get_not_null(0)?;\n\n assert_eq!(schema_name, \"HR\");\n\n\n\n session.set_current_schema(orig_name)?;\n\n assert_eq!(session.current_schema()?, orig_name);\n\n # Ok(())\n", "file_path": "src/session.rs", "rank": 51, "score": 41877.918771727884 }, { "content": " # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass).await?;\n\n # session.set_call_timeout(5000)?;\n\n # let time = session.call_timeout()?;\n\n # assert_eq!(time, 5000);\n\n # Ok(()) })\n\n # }\n\n */\n\n pub fn set_call_timeout(&self, timeout: u32) -> Result<()> {\n\n let ctx : &OCISvcCtx = self.as_ref();\n\n attr::set(OCI_ATTR_CALL_TIMEOUT, timeout, OCI_HTYPE_SVCCTX, ctx, self.as_ref())\n\n }\n\n\n\n /**\n\n Returns time (in milliseconds) for a database round-trip call to time out.\n\n\n\n # Example\n\n\n\n ```\n\n # use sibyl::Result;\n", "file_path": "src/session.rs", "rank": 52, "score": 41877.17802235935 }, { "content": " # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass)?;\n\n let orig_name = session.current_schema()?;\n\n session.set_current_schema(\"HR\")?;\n\n\n\n let current_schema = session.current_schema()?;\n\n\n\n assert_eq!(current_schema, \"HR\");\n\n session.set_current_schema(orig_name)?;\n\n let current_schema = session.current_schema()?;\n\n assert_eq!(current_schema, orig_name);\n\n # Ok(())\n\n # }\n\n # #[cfg(feature=\"nonblocking\")]\n\n # fn main() -> Result<()> {\n\n # sibyl::block_on(async {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n", "file_path": "src/session.rs", "rank": 53, "score": 41876.01316870274 }, { "content": " # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass).await?;\n\n # session.set_call_timeout(1000)?;\n\n # let time = session.call_timeout()?;\n\n # assert_eq!(time, 1000);\n\n # Ok(()) })\n\n # }\n\n */\n\n pub fn call_timeout(&self) -> Result<u32> {\n\n let ctx : &OCISvcCtx = self.as_ref();\n\n attr::get(OCI_ATTR_CALL_TIMEOUT, OCI_HTYPE_SVCCTX, ctx, self.as_ref())\n\n }\n\n\n\n /**\n\n Causes the server to measure call time, in milliseconds, for each subsequent OCI call.\n\n */\n\n pub fn start_call_time_measurements(&self) -> Result<()> {\n\n self.set_attr(OCI_ATTR_COLLECT_CALL_TIME, 1u32)\n\n }\n\n\n", "file_path": "src/session.rs", "rank": 54, "score": 41876.00289685077 }, { "content": " # Ok(()) })\n\n # }\n\n ```\n\n */\n\n pub fn set_stmt_cache_size(&self, num_stmts: u32) -> Result<()> {\n\n let ctx : &OCISvcCtx = self.as_ref();\n\n attr::set(OCI_ATTR_STMTCACHESIZE, num_stmts, OCI_HTYPE_SVCCTX, ctx, self.as_ref())\n\n }\n\n\n\n /**\n\n Returns the statement cache size.\n\n\n\n ```\n\n # use sibyl::Result;\n\n # #[cfg(feature=\"blocking\")]\n\n # fn main() -> Result<()> {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n", "file_path": "src/session.rs", "rank": 55, "score": 41874.3142734873 }, { "content": " # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass).await?;\n\n # let orig_name = session.current_schema()?;\n\n # session.set_current_schema(\"HR\")?;\n\n # let current_schema = session.current_schema()?;\n\n # assert_eq!(current_schema, \"HR\");\n\n # session.set_current_schema(orig_name)?;\n\n # let current_schema = session.current_schema()?;\n\n # assert_eq!(current_schema, orig_name);\n\n # Ok(()) })\n\n # }\n\n ```\n\n */\n\n pub fn current_schema(&self) -> Result<&str> {\n\n self.get_attr(OCI_ATTR_CURRENT_SCHEMA)\n\n }\n\n\n\n /**\n\n Sets the current schema. It has the same effect as the SQL command `ALTER SESSION SET CURRENT_SCHEMA`\n", "file_path": "src/session.rs", "rank": 56, "score": 41872.79895233882 }, { "content": " if the schema name and the session exist. The schema is altered on the next OCI call that does a\n\n round-trip to the server, avoiding an extra round-trip. If the new schema name does not exist, the\n\n same error is returned as the error returned from ALTER SESSION SET CURRENT_SCHEMA. The new schema\n\n name is placed before database objects in DML or DDL commands that you then enter.\n\n\n\n # Parameters\n\n\n\n * `schema_name` - The new schema name.\n\n\n\n # Example\n\n\n\n 🛈 **Note** that this example is written for `blocking` mode execution. Add `await`s, where needed,\n\n to convert it to a nonblocking variant (or peek at the source to see the hidden nonblocking doctest).\n\n\n\n ```\n\n # use sibyl::Result;\n\n # #[cfg(feature=\"blocking\")]\n\n # fn main() -> Result<()> {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n", "file_path": "src/session.rs", "rank": 57, "score": 41867.61551831028 }, { "content": " # \").await?;\n\n # let row = stmt.query_single(()).await?.unwrap();\n\n # let action : &str = row.get_not_null(0)?;\n\n # assert_eq!(action, \"Action Name Test\");\n\n # Ok(()) })\n\n # }\n\n ```\n\n */\n\n pub fn set_action(&self, action: &str) -> Result<()> {\n\n self.set_attr(OCI_ATTR_ACTION, action)\n\n }\n\n\n\n /**\n\n Sets the user identifier (`V$SESSION.CLIENT_IDENTIFIER`) in the session handle.\n\n Can be up to 64 bytes long.\n\n\n\n # Parameters\n\n\n\n # `id` - The user identifier.\n\n\n", "file_path": "src/session.rs", "rank": 58, "score": 41863.654110107345 }, { "content": " Ok(mode != 0)\n\n }\n\n\n\n /**\n\n Sets the statement cache size.\n\n\n\n The default value of the statement cache size is 20 statements, for a statement cache-enabled session.\n\n Statement caching can be enabled by setting the attribute to a nonzero size and disabled by setting it to zero.\n\n\n\n # Parameters\n\n\n\n * `num_stmts` - Statement cache size\n\n\n\n # Example\n\n\n\n ```\n\n # use sibyl::Result;\n\n # #[cfg(feature=\"blocking\")]\n\n # fn main() -> Result<()> {\n\n # let oracle = sibyl::env()?;\n", "file_path": "src/session.rs", "rank": 59, "score": 41862.50567725359 }, { "content": " # let row = stmt.query_single(()).await?.unwrap();\n\n # let client_identifier : &str = row.get_not_null(0)?;\n\n # assert_eq!(client_identifier, \"Test Wielder\");\n\n # Ok(()) })\n\n # }\n\n ```\n\n */\n\n pub fn set_client_identifier(&self, id: &str) -> Result<()> {\n\n self.set_attr(OCI_ATTR_CLIENT_IDENTIFIER, id)\n\n }\n\n\n\n /**\n\n Sets additional client application information (`V$SESSION.CLIENT_INFO`).\n\n Can be up to 64 bytes long.\n\n\n\n # Parameters\n\n\n\n * `info` - Additional client application information.\n\n\n\n # Example\n", "file_path": "src/session.rs", "rank": 60, "score": 41861.84911047355 }, { "content": "//! User Session\n\n\n\n#[cfg(feature=\"blocking\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature=\"blocking\")))]\n\nmod blocking;\n\n\n\n#[cfg(feature=\"nonblocking\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature=\"nonblocking\")))]\n\nmod nonblocking;\n\n\n\nuse std::{sync::Arc, marker::PhantomData};\n\nuse crate::{Result, Environment, oci::*, types::Ctx};\n\n#[cfg(feature=\"nonblocking\")]\n\nuse crate::task;\n\n\n\n/// Representation of the service context.\n\n/// It will be behinfd `Arc` as it needs to survive the `Session`\n\n/// drop to allow statements and cursors to be dropped asynchronously.\n\npub(crate) struct SvcCtx {\n\n svc: Ptr<OCISvcCtx>,\n", "file_path": "src/session.rs", "rank": 61, "score": 41858.56101041393 }, { "content": " }\n\n}\n\n\n\n/// Represents a user session\n\npub struct Session<'a> {\n\n usr: Ptr<OCISession>,\n\n ctx: Arc<SvcCtx>,\n\n phantom_env: PhantomData<&'a Environment>\n\n}\n\n\n\nimpl AsRef<OCIEnv> for Session<'_> {\n\n fn as_ref(&self) -> &OCIEnv {\n\n self.ctx.as_ref().as_ref()\n\n }\n\n}\n\n\n\nimpl AsRef<OCIError> for Session<'_> {\n\n fn as_ref(&self) -> &OCIError {\n\n self.ctx.as_ref().as_ref()\n\n }\n", "file_path": "src/session.rs", "rank": 62, "score": 41858.521661287356 }, { "content": " */\n\n pub fn call_time(&self) -> Result<u64> {\n\n self.get_attr(OCI_ATTR_CALL_TIME)\n\n }\n\n\n\n /// Terminates call time measurements.\n\n pub fn stop_call_time_measurements(&self) -> Result<()> {\n\n self.set_attr(OCI_ATTR_COLLECT_CALL_TIME, 0u32)\n\n }\n\n\n\n /**\n\n Sets the name of the current module (`V$SESSION.MODULE`) running in the client application.\n\n When the current module terminates, call with the name of the new module, or use empty\n\n string if there is no new module. The name can be up to 48 bytes long.\n\n\n\n # Parameters\n\n\n\n * `name` - The name of the current module running in the client application.\n\n\n\n # Example\n", "file_path": "src/session.rs", "rank": 63, "score": 41857.47363764789 }, { "content": " attr::get(attr_type, OCI_HTYPE_SESSION, self.usr.as_ref(), self.as_ref())\n\n }\n\n\n\n pub(crate) fn get_svc(&self) -> Arc<SvcCtx> {\n\n self.ctx.clone()\n\n }\n\n\n\n\n\n\n\n /// Reports whether self is connected to the server\n\n pub fn is_connected(&self) -> Result<bool> {\n\n let srv : Ptr<OCIServer> = attr::get(OCI_ATTR_SERVER, OCI_HTYPE_SVCCTX, self.ctx.svc.as_ref(), self.as_ref())?;\n\n let status : u32 = attr::get(OCI_ATTR_SERVER_STATUS, OCI_HTYPE_SERVER, srv.as_ref(), self.as_ref())?;\n\n Ok(status == OCI_SERVER_NORMAL)\n\n }\n\n\n\n /// Reports whether connection is established in non-blocking mode.\n\n pub fn is_async(&self) -> Result<bool> {\n\n let srv : Ptr<OCIServer> = attr::get(OCI_ATTR_SERVER, OCI_HTYPE_SVCCTX, self.ctx.svc.as_ref(), self.as_ref())?;\n\n let mode : u8 = attr::get(OCI_ATTR_NONBLOCKING_MODE, OCI_HTYPE_SERVER, srv.as_ref(), self.as_ref())?;\n", "file_path": "src/session.rs", "rank": 64, "score": 41856.61732756631 }, { "content": " # let client_info : &str = row.get_not_null(0)?;\n\n # assert_eq!(client_info, \"Nothing to see here, move along folks\");\n\n # Ok(()) })\n\n # }\n\n ```\n\n */\n\n pub fn set_client_info(&self, info: &str) -> Result<()> {\n\n self.set_attr(OCI_ATTR_CLIENT_INFO, info)\n\n }\n\n\n\n /**\n\n Returns the current schema.\n\n\n\n # Example\n\n\n\n ```\n\n # use sibyl::Result;\n\n # #[cfg(feature=\"blocking\")]\n\n # fn main() -> Result<()> {\n\n # let oracle = sibyl::env()?;\n", "file_path": "src/session.rs", "rank": 65, "score": 41855.67805632942 }, { "content": "}\n\n\n\nimpl AsRef<OCISvcCtx> for Session<'_> {\n\n fn as_ref(&self) -> &OCISvcCtx {\n\n self.ctx.as_ref().as_ref()\n\n }\n\n}\n\n\n\nimpl Ctx for Session<'_> {\n\n fn try_as_session(&self) -> Option<&OCISession> {\n\n Some(&self.usr)\n\n }\n\n}\n\n\n\nimpl Session<'_> {\n\n fn set_attr<T: attr::AttrSet>(&self, attr_type: u32, attr_val: T) -> Result<()> {\n\n attr::set(attr_type, attr_val, OCI_HTYPE_SESSION, self.usr.as_ref(), self.as_ref())\n\n }\n\n\n\n fn get_attr<T: attr::AttrGet>(&self, attr_type: u32) -> Result<T> {\n", "file_path": "src/session.rs", "rank": 66, "score": 41854.969590349465 }, { "content": " # let row = stmt.query_single(()).await?.unwrap();\n\n # let module : &str = row.get_not_null(0)?;\n\n # assert_eq!(module, \"Sibyl DocTest\");\n\n # Ok(()) })\n\n # }\n\n ```\n\n */\n\n pub fn set_module(&self, name: &str) -> Result<()> {\n\n self.set_attr(OCI_ATTR_MODULE, name)\n\n }\n\n\n\n /**\n\n Sets the name of the current action (`V$SESSION.ACTION`) within the current module.\n\n When the current action terminates, set this attribute again with the name of the\n\n next action, or empty string if there is no next action. Can be up to 32 bytes long.\n\n\n\n # Parameters\n\n\n\n * `action` - The name of the current action within the current module.\n\n\n", "file_path": "src/session.rs", "rank": 67, "score": 41853.10181100473 }, { "content": " let ctx : &OCISvcCtx = self.as_ref();\n\n attr::get(OCI_ATTR_STMTCACHESIZE, OCI_HTYPE_SVCCTX, ctx, self.as_ref())\n\n }\n\n\n\n /**\n\n Sets the time (in milliseconds) for a database round-trip call to time out. When the call times out,\n\n a network timeout error is returned. Setting this value stays effective for all subsequent round-trip\n\n calls until a different value is set. To remove the timeout, the value must be set to 0.\n\n\n\n The call timeout is applied to each individual round-trip between OCI and Oracle database. Each OCI\n\n method or operation may require zero or more round-trips to Oracle database. The timeout value applies\n\n to each round-trip individually, not to the sum of all round-trips. Time spent processing in OCI before\n\n or after the completion of each round-trip is not counted.\n\n\n\n # Parameters\n\n\n\n * `timeout` - The time (in milliseconds) for a database round-trip call to time out.\n\n\n\n # Example\n\n\n", "file_path": "src/session.rs", "rank": 68, "score": 41849.58647718693 }, { "content": " let env = self.env.clone();\n\n task::spawn(futures::SessionRelease::new(svc, err, env));\n\n }\n\n}\n\n\n\nimpl AsRef<OCIEnv> for SvcCtx {\n\n fn as_ref(&self) -> &OCIEnv {\n\n &*self.env\n\n }\n\n}\n\n\n\nimpl AsRef<OCIError> for SvcCtx {\n\n fn as_ref(&self) -> &OCIError {\n\n &*self.err\n\n }\n\n}\n\n\n\nimpl AsRef<OCISvcCtx> for SvcCtx {\n\n fn as_ref(&self) -> &OCISvcCtx {\n\n &*self.svc\n", "file_path": "src/session.rs", "rank": 69, "score": 41849.07778931071 }, { "content": " # session.set_current_schema(orig_name)?;\n\n # assert_eq!(session.current_schema()?, orig_name);\n\n # Ok(()) })\n\n # }\n\n ```\n\n */\n\n pub fn set_current_schema(&self, schema_name: &str) -> Result<()> {\n\n self.set_attr(OCI_ATTR_CURRENT_SCHEMA, schema_name)\n\n }\n\n\n\n /**\n\n Sets the default prefetch buffer size for each LOB locator.\n\n\n\n This attribute value enables prefetching for all the LOB locators fetched in the session.\n\n The default value for this attribute is zero (no prefetch of LOB data). This option\n\n relieves the application developer from setting the prefetch LOB size for each LOB column\n\n in each prepared statement.\n\n */\n\n pub fn set_lob_prefetch_size(&self, size: u32) -> Result<()> {\n\n self.set_attr(OCI_ATTR_DEFAULT_LOBPREFETCH_SIZE, size)\n\n }\n\n\n\n /// Returns the default prefetch buffer size for each LOB locator.\n\n pub fn lob_prefetch_size(&self) -> Result<u32> {\n\n self.get_attr(OCI_ATTR_DEFAULT_LOBPREFETCH_SIZE)\n\n }\n\n}\n", "file_path": "src/session.rs", "rank": 70, "score": 41848.553245016 }, { "content": " err: Handle<OCIError>,\n\n env: Arc<Handle<OCIEnv>>,\n\n #[cfg(feature=\"nonblocking\")]\n\n active_future: std::sync::atomic::AtomicUsize,\n\n}\n\n\n\nimpl Drop for SvcCtx {\n\n #[cfg(feature=\"blocking\")]\n\n fn drop(&mut self) {\n\n let svc : &OCISvcCtx = self.as_ref();\n\n let err : &OCIError = self.as_ref();\n\n oci_trans_rollback(svc, err);\n\n oci_session_release(svc, err);\n\n }\n\n\n\n #[cfg(feature=\"nonblocking\")]\n\n fn drop(&mut self) {\n\n let mut svc = Ptr::<OCISvcCtx>::null();\n\n svc.swap(&mut self.svc);\n\n let err = Handle::take(&mut self.err);\n", "file_path": "src/session.rs", "rank": 71, "score": 41846.39012748537 }, { "content": " * `num_rows` The number of top-level rows to be prefetched\n\n\n\n # Example\n\n\n\n 🛈 **Note** that this example is written for `blocking` mode execution. Add `await`s, where needed,\n\n to convert it to a nonblocking variant (or peek at the source to see the hidden nonblocking doctest).\n\n\n\n ```\n\n # use sibyl::Result;\n\n # #[cfg(feature=\"blocking\")]\n\n # fn main() -> Result<()> {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass)?;\n\n let stmt = session.prepare(\"\n\n SELECT employee_id, first_name, last_name\n\n FROM hr.employees\n\n WHERE manager_id = :id\n", "file_path": "src/stmt.rs", "rank": 72, "score": 41253.46067895293 }, { "content": " ```\n\n # use sibyl::Result;\n\n # #[cfg(feature=\"blocking\")]\n\n # fn main() -> Result<()> {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass)?;\n\n let stmt = session.prepare(\"\n\n SELECT employee_id, last_name, first_name\n\n FROM hr.employees\n\n WHERE manager_id = :id\n\n \")?;\n\n let rows = stmt.query(103)?;\n\n let num_cols = stmt.column_count()?;\n\n assert_eq!(num_cols, 3);\n\n # Ok(())\n\n # }\n\n # #[cfg(feature=\"nonblocking\")]\n", "file_path": "src/stmt.rs", "rank": 73, "score": 41253.144992490685 }, { "content": " let row = stmt.query_single(&id)?.unwrap();\n\n let txt : &str = row.get_not_null(0)?;\n\n # assert_eq!(txt, TEXT);\n\n # Ok(())\n\n # }\n\n # #[cfg(feature=\"nonblocking\")]\n\n # fn main() -> Result<()> {\n\n # sibyl::block_on(async {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass).await?;\n\n # let stmt = session.prepare(\"\n\n # DECLARE\n\n # name_already_used EXCEPTION; PRAGMA EXCEPTION_INIT(name_already_used, -955);\n\n # BEGIN\n\n # EXECUTE IMMEDIATE '\n\n # CREATE TABLE long_and_raw_test_data (\n\n # id NUMBER GENERATED ALWAYS AS IDENTITY,\n", "file_path": "src/stmt.rs", "rank": 74, "score": 41252.10306924293 }, { "content": " # use sibyl::Result;\n\n # #[cfg(feature=\"blocking\")]\n\n # fn main() -> Result<()> {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass)?;\n\n let stmt = session.prepare(\"\n\n SELECT employee_id, first_name, last_name\n\n FROM hr.employees\n\n WHERE manager_id = :id\n\n ORDER BY employee_id\n\n \")?;\n\n stmt.set_prefetch_rows(5)?;\n\n let rows = stmt.query(103)?;\n\n let mut ids = Vec::new();\n\n while let Some( row ) = rows.next()? {\n\n // EMPLOYEE_ID is NOT NULL, so we can safely unwrap it\n\n let id : u32 = row.get_not_null(0)?;\n", "file_path": "src/stmt.rs", "rank": 75, "score": 41251.66492693018 }, { "content": "\n\n # use sibyl::Result;\n\n # #[cfg(feature=\"blocking\")]\n\n # fn main() -> Result<()> {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass)?;\n\n let stmt = session.prepare(\"\n\n SELECT employee_id, last_name, first_name\n\n FROM hr.employees\n\n WHERE manager_id = :id\n\n \")?;\n\n let rows = stmt.query(103)?;\n\n let col = stmt.column(0).expect(\"employee_id column info\");\n\n assert_eq!(col.name()?, \"EMPLOYEE_ID\");\n\n assert_eq!(col.data_type()?, ColumnType::Number);\n\n assert_eq!(col.precision()?, 6);\n\n assert_eq!(col.scale()?, 0);\n", "file_path": "src/stmt.rs", "rank": 76, "score": 41250.36908984601 }, { "content": " \")?;\n\n stmt.set_prefetch_rows(5)?;\n\n # Ok(())\n\n # }\n\n # #[cfg(feature=\"nonblocking\")]\n\n # fn main() -> Result<()> {\n\n # sibyl::block_on(async {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass).await?;\n\n # let stmt = session.prepare(\"\n\n # SELECT employee_id, first_name, last_name\n\n # FROM hr.employees\n\n # WHERE manager_id = :id\n\n # \").await?;\n\n # stmt.set_prefetch_rows(5)?;\n\n # Ok(()) })\n\n # }\n", "file_path": "src/stmt.rs", "rank": 77, "score": 41249.82443360972 }, { "content": " # fn main() -> Result<()> {\n\n # sibyl::block_on(async {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass).await?;\n\n # let stmt = session.prepare(\"\n\n # SELECT employee_id, last_name, first_name\n\n # FROM hr.employees\n\n # WHERE manager_id = :id\n\n # \").await?;\n\n # let rows = stmt.query(103).await?;\n\n # let num_cols = stmt.column_count()?;\n\n # assert_eq!(num_cols, 3);\n\n # Ok(()) })\n\n # }\n\n ```\n\n */\n\n pub fn column_count(&self) -> Result<usize> {\n", "file_path": "src/stmt.rs", "rank": 78, "score": 41249.657537632236 }, { "content": " to convert it to a nonblocking variant (or peek at the source to see the hidden nonblocking doctest).\n\n\n\n ```\n\n # use sibyl::Result;\n\n # #[cfg(feature=\"blocking\")]\n\n # fn main() -> Result<()> {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass)?;\n\n let stmt = session.prepare(\"\n\n UPDATE hr.employees\n\n SET manager_id = :new_manager_id\n\n WHERE employee_id = :employee_id\n\n RETURN commission_pct INTO :commission_pct\n\n \")?;\n\n let mut commission_pct = 0f64;\n\n stmt.execute(\n\n (\n", "file_path": "src/stmt.rs", "rank": 79, "score": 41248.73055076553 }, { "content": " ids.push(id);\n\n }\n\n assert_eq!(stmt.row_count()?, 4);\n\n assert_eq!(ids.len(), 4);\n\n assert_eq!(ids.as_slice(), &[104 as u32, 105, 106, 107]);\n\n # Ok(())\n\n # }\n\n # #[cfg(feature=\"nonblocking\")]\n\n # fn main() -> Result<()> {\n\n # sibyl::block_on(async {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass).await?;\n\n # let stmt = session.prepare(\"\n\n # SELECT employee_id, first_name, last_name\n\n # FROM hr.employees\n\n # WHERE manager_id = :id\n\n # ORDER BY employee_id\n", "file_path": "src/stmt.rs", "rank": 80, "score": 41248.306796828736 }, { "content": " assert!(!col.is_null()?);\n\n assert!(col.is_visible()?);\n\n assert!(!col.is_identity()?);\n\n # Ok(())\n\n # }\n\n # #[cfg(feature=\"nonblocking\")]\n\n # fn main() -> Result<()> {\n\n # sibyl::block_on(async {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass).await?;\n\n # let stmt = session.prepare(\"\n\n # SELECT employee_id, last_name, first_name\n\n # FROM hr.employees\n\n # WHERE manager_id = :id\n\n # \").await?;\n\n # let rows = stmt.query(103).await?;\n\n # let col = stmt.column(0).expect(\"employee_id column info\");\n", "file_path": "src/stmt.rs", "rank": 81, "score": 41247.83423794948 }, { "content": " (\":EMPLOYEE_ID\", 133),\n\n (\":NEW_MANAGER_ID\", 120),\n\n (\":COMMISSION_PCT\", &mut commission_pct),\n\n )\n\n )?;\n\n let commission_pct_is_null = stmt.is_null(\":COMMISSION_PCT\")?;\n\n assert!(commission_pct_is_null);\n\n # session.rollback()?;\n\n # Ok(())\n\n # }\n\n # #[cfg(feature=\"nonblocking\")]\n\n # fn main() -> Result<()> {\n\n # sibyl::block_on(async {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass).await?;\n\n # let stmt = session.prepare(\"\n\n # UPDATE hr.employees\n", "file_path": "src/stmt.rs", "rank": 82, "score": 41245.17542914436 }, { "content": " to convert it to a nonblocking variant (or peek at the source to see the hidden nonblocking doctest).\n\n\n\n ```\n\n # use sibyl::Result;\n\n # static TEXT : &str = \"When I have fears that I may cease to be Before my pen has gleaned my teeming brain, Before high-pilèd books, in charactery, Hold like rich garners the full ripened grain; When I behold, upon the night’s starred face, Huge cloudy symbols of a high romance, And think that I may never live to trace Their shadows with the magic hand of chance; And when I feel, fair creature of an hour, That I shall never look upon thee more, Never have relish in the faery power Of unreflecting love—then on the shore Of the wide world I stand alone, and think Till love and fame to nothingness do sink.\";\n\n # #[cfg(feature=\"blocking\")]\n\n # fn main() -> Result<()> {\n\n # let oracle = sibyl::env()?;\n\n # let dbname = std::env::var(\"DBNAME\").expect(\"database name\");\n\n # let dbuser = std::env::var(\"DBUSER\").expect(\"user name\");\n\n # let dbpass = std::env::var(\"DBPASS\").expect(\"password\");\n\n # let session = oracle.connect(&dbname, &dbuser, &dbpass)?;\n\n # let stmt = session.prepare(\"\n\n # DECLARE\n\n # name_already_used EXCEPTION; PRAGMA EXCEPTION_INIT(name_already_used, -955);\n\n # BEGIN\n\n # EXECUTE IMMEDIATE '\n\n # CREATE TABLE long_and_raw_test_data (\n\n # id NUMBER GENERATED ALWAYS AS IDENTITY,\n\n # bin RAW(100),\n", "file_path": "src/stmt.rs", "rank": 83, "score": 41241.363643187455 }, { "content": " # text LONG\n\n # )\n\n # ';\n\n # EXCEPTION\n\n # WHEN name_already_used THEN NULL;\n\n # END;\n\n # \")?;\n\n # stmt.execute(())?;\n\n # let stmt = session.prepare(\"\n\n # INSERT INTO long_and_raw_test_data (text) VALUES (:TEXT)\n\n # RETURNING id INTO :ID\n\n # \")?;\n\n # let mut id = 0;\n\n # let count = stmt.execute(((\":TEXT\", &TEXT), (\":ID\", &mut id)))?;\n\n let mut stmt = session.prepare(\"\n\n SELECT text\n\n FROM long_and_raw_test_data\n\n WHERE id = :id\n\n \")?;\n\n stmt.set_max_long_size(100_000);\n", "file_path": "src/stmt.rs", "rank": 84, "score": 41223.46632635611 }, { "content": " # bin RAW(100),\n\n # text LONG\n\n # )\n\n # ';\n\n # EXCEPTION\n\n # WHEN name_already_used THEN NULL;\n\n # END;\n\n # \").await?;\n\n # stmt.execute(()).await?;\n\n # let stmt = session.prepare(\"\n\n # INSERT INTO long_and_raw_test_data (text) VALUES (:TEXT)\n\n # RETURNING id INTO :ID\n\n # \").await?;\n\n # let mut id = 0;\n\n # let count = stmt.execute(((\":TEXT\", &TEXT), (\":ID\", &mut id))).await?;\n\n # let mut stmt = session.prepare(\"\n\n # SELECT text\n\n # FROM long_and_raw_test_data\n\n # WHERE id = :id\n\n # \").await?;\n", "file_path": "src/stmt.rs", "rank": 85, "score": 41223.07221980457 }, { "content": " # SET manager_id = :new_manager_id\n\n # WHERE employee_id = :employee_id\n\n # RETURN commission_pct INTO :commission_pct\n\n # \").await?;\n\n # let mut commission_pct = 0f64;\n\n # stmt.execute(\n\n # (\n\n # (\":EMPLOYEE_ID\", 133),\n\n # (\":NEW_MANAGER_ID\", 120),\n\n # (\":COMMISSION_PCT\", &mut commission_pct)\n\n # )\n\n # ).await?;\n\n # let commission_pct_is_null = stmt.is_null(\":COMMISSION_PCT\")?;\n\n # assert!(commission_pct_is_null);\n\n # session.rollback().await?;\n\n # Ok(()) })\n\n # }\n\n ```\n\n */\n\n pub fn is_null(&self, pos: impl Position) -> Result<bool> {\n", "file_path": "src/stmt.rs", "rank": 86, "score": 41220.487264989184 }, { "content": "}\n\n\n\nimpl AsRef<OCIStmt> for Statement<'_> {\n\n fn as_ref(&self) -> &OCIStmt {\n\n self.stmt.as_ref()\n\n }\n\n}\n\n\n\nimpl Ctx for Statement<'_> {\n\n fn try_as_session(&self) -> Option<&OCISession> {\n\n self.session.try_as_session()\n\n }\n\n}\n\n\n\nimpl<'a> Statement<'a> {\n\n fn get_attr<T: attr::AttrGet>(&self, attr_type: u32) -> Result<T> {\n\n attr::get(attr_type, OCI_HTYPE_STMT, self.stmt.as_ref(), self.as_ref())\n\n }\n\n\n\n fn set_attr<T: attr::AttrSet>(&self, attr_type: u32, attr_val: T) -> Result<()> {\n", "file_path": "src/stmt.rs", "rank": 87, "score": 41220.47394203025 }, { "content": " // Indicates the number of rows that were successfully fetched into the user's buffers\n\n // in the last fetch or execute with nonzero iterations.\n\n //\n\n // This is not very useful in this implementation as we set up buffers for 1 row only.\n\n //\n\n // pub fn rows_fetched(&self) -> Result<usize> {\n\n // let num_rows = self.get_attr::<u32>(OCI_ATTR_ROWS_FETCHED)? as usize;\n\n // Ok( num_rows )\n\n // }\n\n\n\n /**\n\n Checks whether the value returned for the output parameter is NULL.\n\n\n\n # Parameters\n\n\n\n * `pos` - parameter \"position\" - either the parameter name or a zero-based index\n\n\n\n # Example\n\n\n\n 🛈 **Note** that this example is written for `blocking` mode execution. Add `await`s, where needed,\n", "file_path": "src/stmt.rs", "rank": 88, "score": 41219.683377110116 }, { "content": " # stmt.set_max_long_size(100_000);\n\n # let row = stmt.query_single(&id).await?.unwrap();\n\n # let txt : &str = row.get_not_null(0)?;\n\n # assert_eq!(txt, TEXT);\n\n # Ok(()) })\n\n # }\n\n ```\n\n */\n\n pub fn set_max_long_size(&mut self, size: u32) {\n\n self.max_long = size;\n\n }\n\n\n\n /**\n\n Returns he number of columns in the select-list of this statement.\n\n\n\n # Example\n\n\n\n 🛈 **Note** that this example is written for `blocking` mode execution. Add `await`s, where needed,\n\n to convert it to a nonblocking variant (or peek at the source to see the hidden nonblocking doctest).\n\n\n", "file_path": "src/stmt.rs", "rank": 89, "score": 41215.972577719054 }, { "content": " self.params.as_ref().map(|params| params.read().is_null(pos)).unwrap_or(Ok(true))\n\n }\n\n\n\n /**\n\n Returns column meta data.\n\n\n\n Returns None if the specified position is greater than the number of columns in the query\n\n or if the prepared statement is not a SELECT and has no columns.\n\n\n\n # Parameters\n\n\n\n * `pos` - zero-based column position\n\n\n\n # Example\n\n\n\n 🛈 **Note** that this example is written for `blocking` mode execution. Add `await`s, where needed,\n\n to convert it to a nonblocking variant (or peek at the source to see the hidden nonblocking doctest).\n\n\n\n ```\n\n use sibyl::ColumnType;\n", "file_path": "src/stmt.rs", "rank": 90, "score": 41215.05626919994 }, { "content": " # \").await?;\n\n # stmt.set_prefetch_rows(5)?;\n\n # let rows = stmt.query(103).await?;\n\n # let mut ids = Vec::new();\n\n # while let Some( row ) = rows.next().await? {\n\n # let id : i32 = row.get_not_null(0)?;\n\n # ids.push(id);\n\n # }\n\n # assert_eq!(stmt.row_count()?, 4);\n\n # assert_eq!(ids.len(), 4);\n\n # assert_eq!(ids.as_slice(), &[104, 105, 106, 107]);\n\n # Ok(()) })\n\n # }\n\n ```\n\n */\n\n pub fn row_count(&self) -> Result<usize> {\n\n let num_rows = self.get_attr::<u64>(OCI_ATTR_UB8_ROW_COUNT)? as usize;\n\n Ok( num_rows )\n\n }\n\n\n", "file_path": "src/stmt.rs", "rank": 91, "score": 41214.12472744764 }, { "content": "pub use rows::{Row, Rows};\n\npub use cols::ColumnType;\n\n\n\nuse once_cell::sync::OnceCell;\n\nuse parking_lot::{RwLock, RwLockReadGuard, RwLockWriteGuard};\n\n\n\nuse crate::{Result, session::SvcCtx, oci::*, Session, types::Ctx};\n\n#[cfg(feature=\"nonblocking\")]\n\nuse crate::task;\n\n\n\nuse std::{sync::Arc, fmt::Display};\n\n\n\nuse cols::{Columns, ColumnInfo};\n\n\n\n/// Allows column or output variable identification by either\n\n/// its numeric position or its name.\n", "file_path": "src/stmt.rs", "rank": 92, "score": 41213.38030785912 }, { "content": " attr::set(attr_type, attr_val, OCI_HTYPE_STMT, self.stmt.as_ref(), self.as_ref())\n\n }\n\n\n\n pub(crate) fn read_columns(&self) -> RwLockReadGuard<Columns> {\n\n self.cols.get().expect(\"locked columns\").read()\n\n }\n\n\n\n pub(crate) fn write_columns(&self) -> RwLockWriteGuard<Columns> {\n\n self.cols.get().expect(\"locked columns\").write()\n\n }\n\n\n\n pub(crate) fn session(&self) -> &Session {\n\n self.session\n\n }\n\n\n\n /**\n\n Sets the number of top-level rows to be prefetched. The default value is 10 rows.\n\n\n\n # Parameters\n\n\n", "file_path": "src/stmt.rs", "rank": 93, "score": 41213.10889611848 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl AsRef<OCIEnv> for Statement<'_> {\n\n fn as_ref(&self) -> &OCIEnv {\n\n self.session.as_ref()\n\n }\n\n}\n\n\n\nimpl AsRef<OCIError> for Statement<'_> {\n\n fn as_ref(&self) -> &OCIError {\n\n self.session.as_ref()\n\n }\n\n}\n\n\n\nimpl AsRef<OCISvcCtx> for Statement<'_> {\n\n fn as_ref(&self) -> &OCISvcCtx {\n\n self.session.as_ref()\n\n }\n", "file_path": "src/stmt.rs", "rank": 94, "score": 41211.20128414298 }, { "content": " ```\n\n */\n\n pub fn set_prefetch_rows(&self, num_rows: u32) -> Result<()> {\n\n self.set_attr(OCI_ATTR_PREFETCH_ROWS, num_rows)\n\n }\n\n\n\n /**\n\n Sets the maximum size of data that will be fetched from LONG and LONG RAW.\n\n\n\n By default 32768 bytes are allocated for values from LONG and LONG RAW columns.\n\n If the actual value is expected to be larger than that, then the \"max long size\"\n\n has to be set **before** the `query` is run.\n\n\n\n # Parameters\n\n\n\n * `size` - The maximum sizeof data that will be fetched\n\n\n\n # Example\n\n\n\n 🛈 **Note** that this example is written for `blocking` mode execution. Add `await`s, where needed,\n", "file_path": "src/stmt.rs", "rank": 95, "score": 41211.11021289256 }, { "content": " let num_columns = self.get_attr::<u32>(OCI_ATTR_PARAM_COUNT)? as usize;\n\n Ok( num_columns )\n\n }\n\n\n\n /**\n\n Returns the number of rows processed/seen so far in SELECT statements.\n\n\n\n For INSERT, UPDATE, and DELETE statements, it is the number of rows processed\n\n by the statement.\n\n\n\n For nonscrollable cursors, it is the total number of rows fetched into user buffers\n\n since this statement handle was executed. Because they are forward sequential only,\n\n this also represents the highest row number seen by the application.\n\n\n\n # Example\n\n\n\n 🛈 **Note** that this example is written for `blocking` mode execution. Add `await`s, where needed,\n\n to convert it to a nonblocking variant (or peek at the source to see the hidden nonblocking doctest).\n\n\n\n ```\n", "file_path": "src/stmt.rs", "rank": 96, "score": 41210.92495726126 }, { "content": " err: Handle<OCIError>,\n\n svc: Arc<SvcCtx>,\n\n max_long: u32,\n\n}\n\n\n\nimpl Drop for Statement<'_> {\n\n #[cfg(feature=\"blocking\")]\n\n fn drop(&mut self) {\n\n let _ = self.svc;\n\n oci_stmt_release(&self.stmt, &self.err);\n\n }\n\n\n\n #[cfg(feature=\"nonblocking\")]\n\n fn drop(&mut self) {\n\n if !self.stmt.is_null() {\n\n let mut stmt = Ptr::<OCIStmt>::null();\n\n stmt.swap(&mut self.stmt);\n\n let err = Handle::take(&mut self.err);\n\n let svc = self.svc.clone();\n\n task::spawn(futures::StmtRelease::new(stmt, err, svc));\n", "file_path": "src/stmt.rs", "rank": 97, "score": 41209.62062140104 }, { "content": "//! SQL or PL/SQL statement\n\n\n\nmod args;\n\nmod bind;\n\nmod cols;\n\nmod cursor;\n\nmod rows;\n\nmod data;\n\n\n\n#[cfg(feature=\"blocking\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature=\"blocking\")))]\n\nmod blocking;\n\n\n\n#[cfg(feature=\"nonblocking\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature=\"nonblocking\")))]\n\nmod nonblocking;\n\n\n\npub use args::ToSql;\n\npub use bind::Params;\n\npub use cursor::Cursor;\n", "file_path": "src/stmt.rs", "rank": 98, "score": 41205.313217091905 } ]
Rust
botan/src/utils.rs
JustPretender/botan-rs
bdf1de579913cb0b8a07024e8a4015a3719195ee
use botan_sys::*; use core::fmt; #[cfg(feature = "no-std")] pub(crate) use alloc::{borrow::ToOwned, string::String, string::ToString, vec::Vec}; #[cfg(feature = "no-std")] pub(crate) use cstr_core::{CStr, CString}; #[cfg(not(feature = "no-std"))] pub(crate) use std::ffi::{CStr, CString}; pub(crate) use core::mem; pub(crate) use core::ptr; pub(crate) use cty::{c_char, c_int, c_void}; pub type Result<T> = ::core::result::Result<T, Error>; pub(crate) fn make_cstr(input: &str) -> Result<CString> { let cstr = CString::new(input).map_err(Error::conversion_error)?; Ok(cstr) } pub(crate) fn call_botan_ffi_returning_vec_u8( initial_size: usize, cb: &dyn Fn(*mut u8, *mut usize) -> c_int, ) -> Result<Vec<u8>> { let mut output = vec![0; initial_size]; let mut out_len = output.len(); let rc = cb(output.as_mut_ptr(), &mut out_len); if rc == 0 { assert!(out_len <= output.len()); output.resize(out_len, 0); return Ok(output); } else if rc != BOTAN_FFI_ERROR_INSUFFICIENT_BUFFER_SPACE { return Err(Error::from_rc(rc)); } output.resize(out_len, 0); let rc = cb(output.as_mut_ptr(), &mut out_len); if rc != 0 { return Err(Error::from_rc(rc)); } output.resize(out_len, 0); Ok(output) } fn cstr_slice_to_str(raw_cstr: &[u8]) -> Result<String> { let cstr = CStr::from_bytes_with_nul(raw_cstr).map_err(Error::conversion_error)?; Ok(cstr.to_str().map_err(Error::conversion_error)?.to_owned()) } #[cfg(feature = "botan3")] unsafe fn cstr_to_str(raw_cstr: *const i8) -> Result<String> { let cstr = CStr::from_ptr(raw_cstr); Ok(cstr.to_str().map_err(Error::conversion_error)?.to_owned()) } pub(crate) fn call_botan_ffi_returning_string( initial_size: usize, cb: &dyn Fn(*mut u8, *mut usize) -> c_int, ) -> Result<String> { let v = call_botan_ffi_returning_vec_u8(initial_size, cb)?; cstr_slice_to_str(&v) } #[derive(Clone, Debug, PartialEq, Eq)] pub struct Error { err_type: ErrorType, message: Option<String>, } impl Error { pub fn error_type(&self) -> ErrorType { self.err_type } pub fn error_message(&self) -> Option<&str> { self.message.as_deref() } pub(crate) fn from_rc(rc: c_int) -> Self { let err_type = ErrorType::from(rc); #[cfg(feature = "botan3")] let message = { let cptr = unsafe { botan_sys::botan_error_last_exception_message() }; match unsafe { cstr_to_str(cptr) } { Err(_) => None, Ok(s) if s.len() > 0 => Some(s), Ok(_) => None, } }; #[cfg(not(feature = "botan3"))] let message = None; Self { err_type, message } } pub(crate) fn with_message(err_type: ErrorType, message: String) -> Self { Self { err_type, message: Some(message), } } #[cfg(not(feature = "no-std"))] pub(crate) fn conversion_error<T: std::error::Error>(e: T) -> Self { Self { err_type: ErrorType::ConversionError, message: Some(format!("{}", e)), } } #[cfg(feature = "no-std")] pub(crate) fn conversion_error<T: core::fmt::Display>(e: T) -> Self { Self { err_type: ErrorType::ConversionError, message: Some(format!("{}", e)), } } } impl core::fmt::Display for Error { fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { match &self.message { Some(m) => write!(f, "{} ({})", self.err_type, m), None => write!(f, "{}", self.err_type), } } } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum ErrorType { BadAuthCode, BadFlag, BadParameter, ExceptionThrown, InsufficientBufferSpace, InternalError, InvalidInput, InvalidObject, InvalidObjectState, InvalidVerifier, InvalidKeyLength, KeyNotSet, NotImplemented, NullPointer, OutOfMemory, SystemError, UnknownError, ConversionError, TlsError, HttpError, } impl fmt::Display for ErrorType { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let msg = match self { Self::BadAuthCode => "A provided authentication code was incorrect", Self::BadFlag => "A bad flag was passed to the library", Self::BadParameter => "An invalid parameter was provided to the library", Self::ExceptionThrown => "An exception was thrown while processing this request", Self::InsufficientBufferSpace => { "There was insufficient buffer space to write the output" } Self::InternalError => "An internal error occurred (this is a bug in the library)", Self::InvalidInput => "Something about the input was invalid", Self::InvalidObject => "An invalid object was provided to the library", Self::InvalidObjectState => { "An object was invoked in a way that is invalid for its current state" } Self::InvalidVerifier => "A verifier was incorrect", Self::InvalidKeyLength => "An key of invalid length was provided", Self::KeyNotSet => "An object was invoked without the key being set", Self::NotImplemented => { "Some functionality is not implemented in the current library version" } Self::NullPointer => "A null pointer was incorrectly provided", Self::OutOfMemory => "Memory exhaustion", Self::SystemError => "An error occurred while invoking a system API", Self::UnknownError => "Some unknown error occurred", Self::ConversionError => "An error occured while converting data to C", Self::TlsError => "An error occurred in TLS", Self::HttpError => "An error occurred during an HTTP transaction", }; write!(f, "{}", msg) } } #[cfg(not(feature = "no-std"))] impl std::error::Error for Error {} impl From<i32> for ErrorType { fn from(err: i32) -> Self { match err { BOTAN_FFI_ERROR_BAD_FLAG => Self::BadFlag, BOTAN_FFI_ERROR_BAD_MAC => Self::BadAuthCode, BOTAN_FFI_ERROR_BAD_PARAMETER => Self::BadParameter, BOTAN_FFI_ERROR_EXCEPTION_THROWN => Self::ExceptionThrown, BOTAN_FFI_ERROR_HTTP_ERROR => Self::HttpError, BOTAN_FFI_ERROR_INSUFFICIENT_BUFFER_SPACE => Self::InsufficientBufferSpace, BOTAN_FFI_ERROR_INTERNAL_ERROR => Self::InternalError, BOTAN_FFI_ERROR_INVALID_INPUT => Self::InvalidInput, BOTAN_FFI_ERROR_INVALID_KEY_LENGTH => Self::InvalidKeyLength, BOTAN_FFI_ERROR_INVALID_OBJECT => Self::InvalidObject, BOTAN_FFI_ERROR_INVALID_OBJECT_STATE => Self::InvalidObjectState, BOTAN_FFI_ERROR_KEY_NOT_SET => Self::KeyNotSet, BOTAN_FFI_ERROR_NOT_IMPLEMENTED => Self::NotImplemented, BOTAN_FFI_ERROR_NULL_POINTER => Self::NullPointer, BOTAN_FFI_ERROR_OUT_OF_MEMORY => Self::OutOfMemory, BOTAN_FFI_ERROR_SYSTEM_ERROR => Self::SystemError, BOTAN_FFI_ERROR_TLS_ERROR => Self::TlsError, BOTAN_FFI_ERROR_UNKNOWN_ERROR => Self::UnknownError, BOTAN_FFI_INVALID_VERIFIER => Self::InvalidVerifier, _ => Self::UnknownError, } } } pub struct KeySpec { min_keylen: usize, max_keylen: usize, mod_keylen: usize, } impl KeySpec { pub(crate) fn new(min_keylen: usize, max_keylen: usize, mod_keylen: usize) -> Result<KeySpec> { if min_keylen > max_keylen || mod_keylen == 0 { return Err(Error::with_message( ErrorType::ConversionError, "Bad key spec".to_owned(), )); } Ok(KeySpec { min_keylen, max_keylen, mod_keylen, }) } #[must_use] pub fn is_valid_keylength(&self, keylen: usize) -> bool { keylen >= self.min_keylen && keylen <= self.max_keylen && keylen % self.mod_keylen == 0 } #[must_use] pub fn minimum_keylength(&self) -> usize { self.min_keylen } #[must_use] pub fn maximum_keylength(&self) -> usize { self.max_keylen } #[must_use] pub fn keylength_multiple(&self) -> usize { self.mod_keylen } }
use botan_sys::*; use core::fmt; #[cfg(feature = "no-std")] pub(crate) use alloc::{borrow::ToOwned, string::String, string::ToString, vec::Vec}; #[cfg(feature = "no-std")] pub(crate) use cstr_core::{CStr, CString}; #[cfg(not(feature = "no-std"))] pub(crate) use std::ffi::{CStr, CString}; pub(crate) use core::mem; pub(crate) use core::ptr; pub(crate) use cty::{c_char, c_int, c_void}; pub type Result<T> = ::core::result::Result<T, Error>; pub(crate) fn make_cstr(input: &str) -> Result<CString> { let cstr = CString::new(input).map_err(Error::conversion_error)?; Ok(cstr)
=> Self::NullPointer, BOTAN_FFI_ERROR_OUT_OF_MEMORY => Self::OutOfMemory, BOTAN_FFI_ERROR_SYSTEM_ERROR => Self::SystemError, BOTAN_FFI_ERROR_TLS_ERROR => Self::TlsError, BOTAN_FFI_ERROR_UNKNOWN_ERROR => Self::UnknownError, BOTAN_FFI_INVALID_VERIFIER => Self::InvalidVerifier, _ => Self::UnknownError, } } } pub struct KeySpec { min_keylen: usize, max_keylen: usize, mod_keylen: usize, } impl KeySpec { pub(crate) fn new(min_keylen: usize, max_keylen: usize, mod_keylen: usize) -> Result<KeySpec> { if min_keylen > max_keylen || mod_keylen == 0 { return Err(Error::with_message( ErrorType::ConversionError, "Bad key spec".to_owned(), )); } Ok(KeySpec { min_keylen, max_keylen, mod_keylen, }) } #[must_use] pub fn is_valid_keylength(&self, keylen: usize) -> bool { keylen >= self.min_keylen && keylen <= self.max_keylen && keylen % self.mod_keylen == 0 } #[must_use] pub fn minimum_keylength(&self) -> usize { self.min_keylen } #[must_use] pub fn maximum_keylength(&self) -> usize { self.max_keylen } #[must_use] pub fn keylength_multiple(&self) -> usize { self.mod_keylen } }
} pub(crate) fn call_botan_ffi_returning_vec_u8( initial_size: usize, cb: &dyn Fn(*mut u8, *mut usize) -> c_int, ) -> Result<Vec<u8>> { let mut output = vec![0; initial_size]; let mut out_len = output.len(); let rc = cb(output.as_mut_ptr(), &mut out_len); if rc == 0 { assert!(out_len <= output.len()); output.resize(out_len, 0); return Ok(output); } else if rc != BOTAN_FFI_ERROR_INSUFFICIENT_BUFFER_SPACE { return Err(Error::from_rc(rc)); } output.resize(out_len, 0); let rc = cb(output.as_mut_ptr(), &mut out_len); if rc != 0 { return Err(Error::from_rc(rc)); } output.resize(out_len, 0); Ok(output) } fn cstr_slice_to_str(raw_cstr: &[u8]) -> Result<String> { let cstr = CStr::from_bytes_with_nul(raw_cstr).map_err(Error::conversion_error)?; Ok(cstr.to_str().map_err(Error::conversion_error)?.to_owned()) } #[cfg(feature = "botan3")] unsafe fn cstr_to_str(raw_cstr: *const i8) -> Result<String> { let cstr = CStr::from_ptr(raw_cstr); Ok(cstr.to_str().map_err(Error::conversion_error)?.to_owned()) } pub(crate) fn call_botan_ffi_returning_string( initial_size: usize, cb: &dyn Fn(*mut u8, *mut usize) -> c_int, ) -> Result<String> { let v = call_botan_ffi_returning_vec_u8(initial_size, cb)?; cstr_slice_to_str(&v) } #[derive(Clone, Debug, PartialEq, Eq)] pub struct Error { err_type: ErrorType, message: Option<String>, } impl Error { pub fn error_type(&self) -> ErrorType { self.err_type } pub fn error_message(&self) -> Option<&str> { self.message.as_deref() } pub(crate) fn from_rc(rc: c_int) -> Self { let err_type = ErrorType::from(rc); #[cfg(feature = "botan3")] let message = { let cptr = unsafe { botan_sys::botan_error_last_exception_message() }; match unsafe { cstr_to_str(cptr) } { Err(_) => None, Ok(s) if s.len() > 0 => Some(s), Ok(_) => None, } }; #[cfg(not(feature = "botan3"))] let message = None; Self { err_type, message } } pub(crate) fn with_message(err_type: ErrorType, message: String) -> Self { Self { err_type, message: Some(message), } } #[cfg(not(feature = "no-std"))] pub(crate) fn conversion_error<T: std::error::Error>(e: T) -> Self { Self { err_type: ErrorType::ConversionError, message: Some(format!("{}", e)), } } #[cfg(feature = "no-std")] pub(crate) fn conversion_error<T: core::fmt::Display>(e: T) -> Self { Self { err_type: ErrorType::ConversionError, message: Some(format!("{}", e)), } } } impl core::fmt::Display for Error { fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { match &self.message { Some(m) => write!(f, "{} ({})", self.err_type, m), None => write!(f, "{}", self.err_type), } } } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum ErrorType { BadAuthCode, BadFlag, BadParameter, ExceptionThrown, InsufficientBufferSpace, InternalError, InvalidInput, InvalidObject, InvalidObjectState, InvalidVerifier, InvalidKeyLength, KeyNotSet, NotImplemented, NullPointer, OutOfMemory, SystemError, UnknownError, ConversionError, TlsError, HttpError, } impl fmt::Display for ErrorType { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let msg = match self { Self::BadAuthCode => "A provided authentication code was incorrect", Self::BadFlag => "A bad flag was passed to the library", Self::BadParameter => "An invalid parameter was provided to the library", Self::ExceptionThrown => "An exception was thrown while processing this request", Self::InsufficientBufferSpace => { "There was insufficient buffer space to write the output" } Self::InternalError => "An internal error occurred (this is a bug in the library)", Self::InvalidInput => "Something about the input was invalid", Self::InvalidObject => "An invalid object was provided to the library", Self::InvalidObjectState => { "An object was invoked in a way that is invalid for its current state" } Self::InvalidVerifier => "A verifier was incorrect", Self::InvalidKeyLength => "An key of invalid length was provided", Self::KeyNotSet => "An object was invoked without the key being set", Self::NotImplemented => { "Some functionality is not implemented in the current library version" } Self::NullPointer => "A null pointer was incorrectly provided", Self::OutOfMemory => "Memory exhaustion", Self::SystemError => "An error occurred while invoking a system API", Self::UnknownError => "Some unknown error occurred", Self::ConversionError => "An error occured while converting data to C", Self::TlsError => "An error occurred in TLS", Self::HttpError => "An error occurred during an HTTP transaction", }; write!(f, "{}", msg) } } #[cfg(not(feature = "no-std"))] impl std::error::Error for Error {} impl From<i32> for ErrorType { fn from(err: i32) -> Self { match err { BOTAN_FFI_ERROR_BAD_FLAG => Self::BadFlag, BOTAN_FFI_ERROR_BAD_MAC => Self::BadAuthCode, BOTAN_FFI_ERROR_BAD_PARAMETER => Self::BadParameter, BOTAN_FFI_ERROR_EXCEPTION_THROWN => Self::ExceptionThrown, BOTAN_FFI_ERROR_HTTP_ERROR => Self::HttpError, BOTAN_FFI_ERROR_INSUFFICIENT_BUFFER_SPACE => Self::InsufficientBufferSpace, BOTAN_FFI_ERROR_INTERNAL_ERROR => Self::InternalError, BOTAN_FFI_ERROR_INVALID_INPUT => Self::InvalidInput, BOTAN_FFI_ERROR_INVALID_KEY_LENGTH => Self::InvalidKeyLength, BOTAN_FFI_ERROR_INVALID_OBJECT => Self::InvalidObject, BOTAN_FFI_ERROR_INVALID_OBJECT_STATE => Self::InvalidObjectState, BOTAN_FFI_ERROR_KEY_NOT_SET => Self::KeyNotSet, BOTAN_FFI_ERROR_NOT_IMPLEMENTED => Self::NotImplemented, BOTAN_FFI_ERROR_NULL_POINTER
random
[ { "content": "/// Verify a bcrypt password hash\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// let mut rng = botan::RandomNumberGenerator::new().unwrap();\n\n/// let bcrypt = botan::bcrypt_hash(\"password\", &mut rng, 10).unwrap();\n\n/// assert_eq!(botan::bcrypt_verify(\"not even close\", &bcrypt), Ok(false));\n\n/// assert_eq!(botan::bcrypt_verify(\"password\", &bcrypt), Ok(true));\n\n/// ```\n\npub fn bcrypt_verify(pass: &str, hash: &str) -> Result<bool> {\n\n let rc = unsafe { botan_bcrypt_is_valid(make_cstr(pass)?.as_ptr(), make_cstr(hash)?.as_ptr()) };\n\n\n\n if rc == 0 {\n\n Ok(true)\n\n } else if rc == BOTAN_FFI_INVALID_VERIFIER {\n\n Ok(false)\n\n } else {\n\n Err(Error::from_rc(rc))\n\n }\n\n}\n", "file_path": "botan/src/bcrypt.rs", "rank": 0, "score": 127911.35323897848 }, { "content": "/// Hex decode some data\n\npub fn hex_decode(x: &str) -> Result<Vec<u8>> {\n\n let mut output = vec![0u8; x.len() / 2];\n\n let mut output_len = output.len();\n\n\n\n let input = make_cstr(x)?;\n\n\n\n botan_call!(\n\n botan_hex_decode,\n\n input.as_ptr(),\n\n x.len(),\n\n output.as_mut_ptr(),\n\n &mut output_len\n\n )?;\n\n\n\n output.resize(output_len, 0);\n\n\n\n Ok(output)\n\n}\n\n\n", "file_path": "botan/src/memutils.rs", "rank": 1, "score": 117853.10166657373 }, { "content": "/// Base64 decode some data\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// assert!(botan::base64_decode(\"ThisIsInvalid!\").is_err());\n\n/// assert_eq!(botan::base64_decode(\"YWJjZGVm\").unwrap(), b\"abcdef\");\n\n/// ```\n\npub fn base64_decode(x: &str) -> Result<Vec<u8>> {\n\n // Hard to provide a decent lower bound as it is possible x includes\n\n // lots of spaces or trailing = padding chars\n\n let bin_len = x.len();\n\n\n\n let input = make_cstr(x)?;\n\n\n\n call_botan_ffi_returning_vec_u8(bin_len, &|out_buf, out_len| unsafe {\n\n botan_base64_decode(input.as_ptr(), x.len(), out_buf, out_len)\n\n })\n\n}\n", "file_path": "botan/src/memutils.rs", "rank": 2, "score": 117853.10166657371 }, { "content": "/// Return the identifier used for PKCS1 v1.5 signatures for the specified hash\n\npub fn pkcs_hash_id(hash_algo: &str) -> Result<Vec<u8>> {\n\n let hash_algo = make_cstr(hash_algo)?;\n\n let id_len = 32; // largest currently is 20 bytes\n\n call_botan_ffi_returning_vec_u8(id_len, &|out_buf, out_len| unsafe {\n\n botan_pkcs_hash_id(hash_algo.as_ptr(), out_buf, out_len)\n\n })\n\n}\n", "file_path": "botan/src/pubkey.rs", "rank": 3, "score": 110922.65299919789 }, { "content": "/// Scrypt key derivation\n\n///\n\n/// The n, r, p parameters control how much time and memory is used.\n\n/// As of 2018, n = 32768, r = 8, p = 1 seems sufficient.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// let mut rng = botan::RandomNumberGenerator::new().unwrap();\n\n/// let salt = rng.read(10).unwrap();\n\n/// let n = 32768;\n\n/// let r = 8;\n\n/// let p = 1;\n\n/// let key = botan::scrypt(32, \"passphrase\", &salt, n, r, p).unwrap();\n\n/// assert_eq!(key.len(), 32);\n\n/// ```\n\npub fn scrypt(\n\n out_len: usize,\n\n passphrase: &str,\n\n salt: &[u8],\n\n n: usize,\n\n r: usize,\n\n p: usize,\n\n) -> Result<Vec<u8>> {\n\n derive_key_from_password(\"Scrypt\", out_len, passphrase, salt, n, r, p)\n\n}\n", "file_path": "botan/src/pbkdf.rs", "rank": 5, "score": 91489.90867204394 }, { "content": "/// Key derivation function\n\n///\n\n/// Produces a KDF output of the specified size when run over the\n\n/// provided secret, salt, and label inputs\n\n///\n\n/// # Examples\n\n/// ```\n\n/// let salt = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];\n\n/// let label = vec![0x42, 0x6F, 0x62];\n\n/// let secret = vec![0x4E, 0x6F, 0x74, 0x20, 0x54, 0x65, 0x6C, 0x6C, 0x69, 0x6E, 0x67];\n\n/// let v = botan::kdf(\"HKDF(SHA-256)\", 23, &secret, &salt, &label).unwrap();\n\n/// assert_eq!(v.len(), 23);\n\n/// ```\n\npub fn kdf(\n\n algo: &str,\n\n output_len: usize,\n\n secret: &[u8],\n\n salt: &[u8],\n\n label: &[u8],\n\n) -> Result<Vec<u8>> {\n\n let mut output = vec![0u8; output_len];\n\n\n\n let algo = make_cstr(algo)?;\n\n\n\n botan_call!(\n\n botan_kdf,\n\n algo.as_ptr(),\n\n output.as_mut_ptr(),\n\n output_len,\n\n secret.as_ptr(),\n\n secret.len(),\n\n salt.as_ptr(),\n\n salt.len(),\n\n label.as_ptr(),\n\n label.len()\n\n )?;\n\n\n\n Ok(output)\n\n}\n", "file_path": "botan/src/kdf.rs", "rank": 6, "score": 91486.76591444018 }, { "content": "/// Password based key derivation function\n\n///\n\n/// Note currently only PBKDF2 is supported by this interface.\n\n/// For PBKDF2, iterations >= 100000 is recommended.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// let mut rng = botan::RandomNumberGenerator::new().unwrap();\n\n/// let salt = rng.read(10).unwrap();\n\n/// let key = botan::pbkdf(\"PBKDF2(SHA-256)\", 32, \"passphrase\", &salt, 10000).unwrap();\n\n/// assert_eq!(key.len(), 32);\n\n/// ```\n\npub fn pbkdf(\n\n algo: &str,\n\n out_len: usize,\n\n passphrase: &str,\n\n salt: &[u8],\n\n iterations: usize,\n\n) -> Result<Vec<u8>> {\n\n derive_key_from_password(algo, out_len, passphrase, salt, iterations, 0, 0)\n\n}\n\n\n", "file_path": "botan/src/pbkdf.rs", "rank": 7, "score": 91486.71616936248 }, { "content": "/// Produce a bcrypt password hash\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// let mut rng = botan::RandomNumberGenerator::new().unwrap();\n\n/// let bcrypt1 = botan::bcrypt_hash(\"password\", &mut rng, 10).unwrap();\n\n/// let bcrypt2 = botan::bcrypt_hash(\"password\", &mut rng, 10).unwrap();\n\n/// assert_ne!(bcrypt1, bcrypt2); // different salt each time\n\n/// ```\n\npub fn bcrypt_hash(\n\n pass: &str,\n\n rng: &mut RandomNumberGenerator,\n\n workfactor: usize,\n\n) -> Result<String> {\n\n let mut out = vec![0; BCRYPT_SIZE + 1];\n\n let mut out_len = out.len();\n\n\n\n botan_call!(\n\n botan_bcrypt_generate,\n\n out.as_mut_ptr(),\n\n &mut out_len,\n\n make_cstr(pass)?.as_ptr(),\n\n rng.handle(),\n\n workfactor,\n\n 0u32\n\n )?;\n\n\n\n out.resize(out_len - 1, 0);\n\n String::from_utf8(out).map_err(Error::conversion_error)\n\n}\n\n\n", "file_path": "botan/src/bcrypt.rs", "rank": 8, "score": 89257.26202950344 }, { "content": "/// Password based key derivation function\n\n///\n\n/// # Examples\n\n/// ```\n\n/// let mut rng = botan::RandomNumberGenerator::new().unwrap();\n\n/// let salt = rng.read(10).unwrap();\n\n/// let key = botan::derive_key_from_password(\"Scrypt\", 32, \"passphrase\", &salt, 8192, 8, 1).unwrap();\n\n/// assert_eq!(key.len(), 32);\n\n/// ```\n\npub fn derive_key_from_password(\n\n algo: &str,\n\n out_len: usize,\n\n passphrase: &str,\n\n salt: &[u8],\n\n param1: usize,\n\n param2: usize,\n\n param3: usize,\n\n) -> Result<Vec<u8>> {\n\n let algo = make_cstr(algo)?;\n\n let passphrase = make_cstr(passphrase)?;\n\n\n\n let mut output = vec![0u8; out_len];\n\n\n\n botan_call!(\n\n botan_pwdhash,\n\n algo.as_ptr(),\n\n param1,\n\n param2,\n\n param3,\n", "file_path": "botan/src/pbkdf.rs", "rank": 9, "score": 87174.76714783566 }, { "content": "/// Password based key derivation function, timed variant\n\n///\n\n/// # Examples\n\n/// ```\n\n/// let mut rng = botan::RandomNumberGenerator::new().unwrap();\n\n/// let salt = rng.read(10).unwrap();\n\n/// let msec = 30;\n\n/// let (key,r,p,n) = botan::derive_key_from_password_timed(\"Scrypt\", 32, \"passphrase\", &salt, msec).unwrap();\n\n/// assert_eq!(key.len(), 32);\n\n/// let key2 = botan::derive_key_from_password(\"Scrypt\", 32, \"passphrase\", &salt, n, r, p).unwrap();\n\n/// assert_eq!(key, key2);\n\n/// ```\n\npub fn derive_key_from_password_timed(\n\n algo: &str,\n\n out_len: usize,\n\n passphrase: &str,\n\n salt: &[u8],\n\n msec: u32,\n\n) -> Result<(Vec<u8>, usize, usize, usize)> {\n\n let algo = make_cstr(algo)?;\n\n let passphrase = make_cstr(passphrase)?;\n\n\n\n let mut output = vec![0u8; out_len];\n\n let mut param1 = 0;\n\n let mut param2 = 0;\n\n let mut param3 = 0;\n\n\n\n botan_call!(\n\n botan_pwdhash_timed,\n\n algo.as_ptr(),\n\n msec,\n\n &mut param1,\n", "file_path": "botan/src/pbkdf.rs", "rank": 10, "score": 85225.15836039485 }, { "content": "fn configure(build_dir: &str) {\n\n let mut configure = Command::new(\"python\");\n\n configure.arg(\"configure.py\");\n\n configure.arg(format!(\"--with-build-dir={}\", build_dir));\n\n configure.arg(\"--build-targets=static\");\n\n configure.arg(\"--without-documentation\");\n\n configure.arg(\"--no-install-python-module\");\n\n configure.arg(\"--distribution-info=https://crates.io/crates/botan-src\");\n\n #[cfg(debug_assertions)]\n\n configure.arg(\"--with-debug-info\");\n\n\n\n let args = [\n\n \"--os\",\n\n \"--cpu\",\n\n \"--compiler-cache\",\n\n \"--cc\",\n\n \"--cc-min-version\",\n\n \"--cc-bin\",\n\n \"--cc-abi-flags\",\n\n \"--cxxflags\",\n", "file_path": "botan-src/src/lib.rs", "rank": 11, "score": 84112.95117338486 }, { "content": "fn make(build_dir: &str) {\n\n let mut cmd = Command::new(\"make\");\n\n // Set MAKEFLAGS to the content of CARGO_MAKEFLAGS\n\n // to give jobserver (parallel builds) support to the\n\n // spawned sub-make.\n\n if let Ok(val) = env::var(\"CARGO_MAKEFLAGS\") {\n\n cmd.env(\"MAKEFLAGS\", val);\n\n } else {\n\n eprintln!(\"Can't set MAKEFLAGS as CARGO_MAKEFLAGS couldn't be read\");\n\n }\n\n let status = cmd\n\n .arg(\"-f\")\n\n .arg(format!(\"{}/Makefile\", build_dir))\n\n .arg(\"libs\")\n\n .spawn()\n\n .expect(BUILD_ERROR_MSG)\n\n .wait()\n\n .expect(BUILD_ERROR_MSG);\n\n if !status.success() {\n\n panic!(\"make terminated unsuccessfully\");\n\n }\n\n}\n\n\n", "file_path": "botan-src/src/lib.rs", "rank": 12, "score": 84112.95117338486 }, { "content": "pub fn build() -> (String, String) {\n\n let src_dir = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\")).join(SRC_DIR);\n\n let build_dir = env::var_os(\"OUT_DIR\").map_or(src_dir.to_owned(), PathBuf::from);\n\n let build_dir = build_dir.join(\"botan\");\n\n let include_dir = build_dir.join(INCLUDE_DIR);\n\n let build_dir = pathbuf_to_string!(build_dir);\n\n env::set_current_dir(&src_dir).expect(SRC_DIR_ERROR_MSG);\n\n configure(&build_dir);\n\n make(&build_dir);\n\n (build_dir, pathbuf_to_string!(include_dir))\n\n}\n", "file_path": "botan-src/src/lib.rs", "rank": 13, "score": 81224.84749835666 }, { "content": "#[test]\n\nfn test_pubkey() -> Result<(), botan::Error> {\n\n let mut rng = botan::RandomNumberGenerator::new_system()?;\n\n\n\n let ecdsa_key = botan::Privkey::create(\"ECDSA\", \"secp256r1\", &mut rng)?;\n\n\n\n assert_eq!(ecdsa_key.check_key(&mut rng)?, true);\n\n assert_eq!(ecdsa_key.algo_name()?, \"ECDSA\");\n\n\n\n assert!(ecdsa_key.get_field(\"n\").is_err());\n\n assert_eq!(\n\n ecdsa_key.get_field(\"order\"),\n\n botan::MPI::from_str(\"0xFFFFFFFF00000000FFFFFFFFFFFFFFFFBCE6FAADA7179E84F3B9CAC2FC632551\")\n\n );\n\n\n\n let pub_key = ecdsa_key.pubkey()?;\n\n\n\n assert_eq!(pub_key.algo_name()?, \"ECDSA\");\n\n\n\n let bits = ecdsa_key.der_encode()?;\n\n let pem = ecdsa_key.pem_encode()?;\n", "file_path": "botan/tests/tests.rs", "rank": 14, "score": 78536.79954073601 }, { "content": "#[test]\n\nfn test_hex() -> Result<(), botan::Error> {\n\n let raw = vec![1, 2, 3, 255, 42, 23];\n\n assert_eq!(botan::hex_encode(&raw)?, \"010203FF2A17\");\n\n assert_eq!(botan::hex_decode(\"010203FF2A17\")?, raw);\n\n Ok(())\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 15, "score": 78536.79954073601 }, { "content": "#[test]\n\nfn test_fpe() -> Result<(), botan::Error> {\n\n let modulus = botan::MPI::from_str(\"1000000000\")?;\n\n let input = botan::MPI::from_str(\"939210311\")?;\n\n\n\n let key = vec![0; 32];\n\n let tweak = vec![0; 8];\n\n\n\n let fpe = botan::FPE::new_fe1(&modulus, &key, 8, false)?;\n\n\n\n let ctext = fpe.encrypt(&input, &tweak)?;\n\n\n\n assert_ne!(ctext, input);\n\n\n\n let ptext = fpe.decrypt(&ctext, &tweak)?;\n\n\n\n assert_eq!(ptext, input);\n\n Ok(())\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 16, "score": 78536.79954073601 }, { "content": "#[test]\n\nfn test_rng() -> Result<(), botan::Error> {\n\n let mut rng = botan::RandomNumberGenerator::new_system()?;\n\n\n\n let read1 = rng.read(10)?;\n\n let read2 = rng.read(10)?;\n\n\n\n assert!(read1 != read2);\n\n Ok(())\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 17, "score": 78536.79954073601 }, { "content": "#[test]\n\nfn test_mac() -> Result<(), botan::Error> {\n\n let mac = botan::MsgAuthCode::new(\"HMAC(SHA-384)\")?;\n\n\n\n let key_spec = mac.key_spec()?;\n\n assert_eq!(mac.output_length()?, 48);\n\n assert_eq!(mac.algo_name()?, \"HMAC(SHA-384)\");\n\n\n\n assert!(key_spec.is_valid_keylength(20));\n\n\n\n mac.set_key(&vec![0xAA; 20])?;\n\n\n\n mac.update(&vec![0xDD; 1])?;\n\n mac.update(&vec![0xDD; 29])?;\n\n mac.update(&vec![0xDD; 20])?;\n\n\n\n let r = mac.finish()?;\n\n\n\n assert_eq!(botan::hex_encode(&r)?,\n\n \"88062608D3E6AD8A0AA2ACE014C8A86F0AA635D947AC9FEBE83EF4E55966144B2A5AB39DC13814B94E3AB6E101A34F27\");\n\n Ok(())\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 18, "score": 78536.79954073601 }, { "content": "#[test]\n\nfn test_version() -> Result<(), botan::Error> {\n\n let version = botan::Version::current()?;\n\n\n\n /*\n\n If we are running against a released version we know it must be at\n\n least 2.8 since we require APIs added after the 2.7 release.\n\n */\n\n\n\n #[cfg(feature = \"botan3\")]\n\n {\n\n assert_eq!(version.major, 3);\n\n }\n\n\n\n #[cfg(not(feature = \"botan3\"))]\n\n {\n\n assert_eq!(version.major, 2);\n\n assert!(version.minor >= 8);\n\n }\n\n\n\n assert!(version.release_date == 0 || version.release_date >= 20181001);\n", "file_path": "botan/tests/tests.rs", "rank": 19, "score": 78536.79954073601 }, { "content": "#[test]\n\nfn test_scrypt() -> Result<(), botan::Error> {\n\n let salt = botan::hex_decode(\"4E61436C\")?;\n\n let n = 1024;\n\n let r = 8;\n\n let p = 16;\n\n let passphrase = \"password\";\n\n let expected_output =\n\n botan::hex_decode(\"fdbabe1c9d3472007856e7190d01e9fe7c6ad7cbc8237830e77376634b3731622e\")?;\n\n\n\n let output = botan::scrypt(expected_output.len(), passphrase, &salt, n, r, p)?;\n\n\n\n assert_eq!(output, expected_output);\n\n Ok(())\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 20, "score": 78536.79954073601 }, { "content": "#[test]\n\nfn test_pbkdf() -> Result<(), botan::Error> {\n\n let salt = botan::hex_decode(\"0001020304050607\")?;\n\n let iterations = 10000;\n\n let passphrase = \"xyz\";\n\n let expected_output =\n\n botan::hex_decode(\"DEFD2987FA26A4672F4D16D98398432AD95E896BF619F6A6B8D4ED\")?;\n\n\n\n let output = botan::pbkdf(\n\n \"PBKDF2(SHA-256)\",\n\n expected_output.len(),\n\n passphrase,\n\n &salt,\n\n iterations,\n\n )?;\n\n\n\n assert_eq!(output, expected_output);\n\n Ok(())\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 21, "score": 78536.79954073601 }, { "content": "#[test]\n\nfn test_rsa() -> Result<(), botan::Error> {\n\n let mut rng = botan::RandomNumberGenerator::new_system()?;\n\n\n\n let padding = \"EMSA-PKCS1-v1_5(SHA-256)\";\n\n let msg = rng.read(32)?;\n\n\n\n let privkey = botan::Privkey::create(\"RSA\", \"1024\", &mut rng)?;\n\n let pubkey = privkey.pubkey()?;\n\n\n\n assert_eq!(privkey.get_field(\"e\"), botan::MPI::from_str(\"65537\"));\n\n assert_eq!(privkey.get_field(\"n\")?.bit_count()?, 1024);\n\n\n\n assert_eq!(pubkey.get_field(\"n\"), privkey.get_field(\"n\"));\n\n\n\n let p = privkey.get_field(\"p\")?;\n\n let q = privkey.get_field(\"q\")?;\n\n\n\n assert_eq!(&p * &q, privkey.get_field(\"n\")?);\n\n\n\n let signature = privkey.sign(&msg, padding, &mut rng)?;\n\n\n\n assert!(pubkey.verify(&msg, &signature, padding)?);\n\n\n\n let pubkey = botan::Pubkey::load_rsa(&privkey.get_field(\"n\")?, &privkey.get_field(\"e\")?)?;\n\n assert!(pubkey.verify(&msg, &signature, padding)?);\n\n Ok(())\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 22, "score": 78536.79954073601 }, { "content": "#[test]\n\nfn test_pwdhash() -> Result<(), botan::Error> {\n\n let mut rng = botan::RandomNumberGenerator::new()?;\n\n let salt = rng.read(10)?;\n\n let msec = 30;\n\n let (key, r, p, n) =\n\n botan::derive_key_from_password_timed(\"Scrypt\", 32, \"passphrase\", &salt, msec)?;\n\n assert_eq!(key.len(), 32);\n\n let key2 = botan::derive_key_from_password(\"Scrypt\", 32, \"passphrase\", &salt, n, r, p)?;\n\n assert_eq!(key, key2);\n\n Ok(())\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 23, "score": 78536.79954073601 }, { "content": "#[test]\n\nfn test_totp() -> Result<(), botan::Error> {\n\n let totp = botan::TOTP::new(\n\n b\"1234567890123456789012345678901234567890123456789012345678901234\",\n\n \"SHA-512\",\n\n 8,\n\n 30,\n\n )?;\n\n\n\n assert_eq!(totp.generate(59)?, 90693936);\n\n assert_eq!(totp.generate(1111111109)?, 25091201);\n\n assert_eq!(totp.generate(1111111111)?, 99943326);\n\n\n\n assert!(totp.check(90693936, 59, 0)?);\n\n assert!(!totp.check(90693936, 60, 0)?);\n\n assert!(totp.check(90693936, 59 + 30, 1)?);\n\n assert!(!totp.check(90693936, 59 + 31, 1)?);\n\n Ok(())\n\n}\n", "file_path": "botan/tests/tests.rs", "rank": 24, "score": 78536.79954073601 }, { "content": "#[test]\n\nfn test_kdf() -> Result<(), botan::Error> {\n\n let salt = botan::hex_decode(\"000102030405060708090A0B0C\")?;\n\n let label = botan::hex_decode(\"F0F1F2F3F4F5F6F7F8F9\")?;\n\n let secret = botan::hex_decode(\"0B0B0B0B0B0B0B0B0B0B0B0B0B0B0B0B0B0B0B0B0B0B\")?;\n\n let expected_output = botan::hex_decode(\n\n \"3CB25F25FAACD57A90434F64D0362F2A2D2D0A90CF1A5A4C5DB02D56ECC4C5BF34007208D5B887185865\",\n\n )?;\n\n\n\n let output = botan::kdf(\n\n \"HKDF(SHA-256)\",\n\n expected_output.len(),\n\n &secret,\n\n &salt,\n\n &label,\n\n )?;\n\n\n\n assert_eq!(output, expected_output);\n\n Ok(())\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 25, "score": 78536.79954073601 }, { "content": "#[test]\n\nfn test_x25519() -> Result<(), botan::Error> {\n\n // Test from RFC 8037\n\n let a_pub_bits =\n\n botan::hex_decode(\"de9edb7d7b7dc1b4d35b61c2ece435373f8343c85b78674dadfc7e146f882b4f\")?;\n\n let b_priv_bits =\n\n botan::hex_decode(\"77076d0a7318a57d3c16c17251b26645df4c2f87ebc0992ab177fba51db92c2a\")?;\n\n let b_pub_bits =\n\n botan::hex_decode(\"8520f0098930a754748b7ddcb43ef75a0dbf3a0d26381af4eba4a98eaa9b4e6a\")?;\n\n let expected_shared =\n\n botan::hex_decode(\"4a5d9d5ba4ce2de1728e3bf480350f25e07e21c947d19e3376f09b3c1e161742\")?;\n\n\n\n let a_pub = botan::Pubkey::load_x25519(&a_pub_bits)?;\n\n assert_eq!(a_pub.get_x25519_key()?, a_pub_bits);\n\n\n\n let b_priv = botan::Privkey::load_x25519(&b_priv_bits)?;\n\n assert_eq!(b_priv.get_x25519_key()?, b_priv_bits);\n\n\n\n assert_eq!(b_priv.key_agreement_key()?, b_pub_bits);\n\n assert_eq!(b_priv.pubkey()?.get_x25519_key()?, b_pub_bits);\n\n\n\n let shared = b_priv.agree(&a_pub_bits, 0, &[], \"Raw\")?;\n\n\n\n assert_eq!(shared, expected_shared);\n\n Ok(())\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 26, "score": 78536.79954073601 }, { "content": "#[test]\n\nfn test_hash() -> Result<(), botan::Error> {\n\n let mut hash = botan::HashFunction::new(\"SHA-384\")?;\n\n\n\n assert_eq!(hash.output_length()?, 48);\n\n assert_eq!(hash.block_size()?, 128);\n\n assert_eq!(hash.algo_name()?, \"SHA-384\");\n\n\n\n assert!(hash.update(&[97, 98]).is_ok());\n\n\n\n let mut hash_dup = hash.duplicate()?;\n\n\n\n assert!(hash.update(&[99]).is_ok());\n\n assert!(hash_dup.update(&[100]).is_ok());\n\n\n\n hash.clear()?;\n\n\n\n hash.update(&[97, 98, 99])?;\n\n\n\n let digest = hash.finish()?;\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 27, "score": 78536.79954073601 }, { "content": "#[test]\n\nfn test_bcrypt() -> Result<(), botan::Error> {\n\n let pass = \"password\";\n\n let mut rng = botan::RandomNumberGenerator::new_system()?;\n\n\n\n let bcrypt1 = botan::bcrypt_hash(pass, &mut rng, 10)?;\n\n\n\n assert_eq!(bcrypt1.len(), 60);\n\n\n\n let bcrypt2 = botan::bcrypt_hash(pass, &mut rng, 10)?;\n\n\n\n assert_eq!(bcrypt2.len(), 60);\n\n\n\n assert!(bcrypt1 != bcrypt2);\n\n\n\n assert!(botan::bcrypt_verify(pass, &bcrypt1)?);\n\n assert!(botan::bcrypt_verify(pass, &bcrypt2)?);\n\n\n\n assert_eq!(botan::bcrypt_verify(\"passwurd\", &bcrypt2)?, false);\n\n Ok(())\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 28, "score": 78536.79954073601 }, { "content": "#[test]\n\nfn test_chacha() -> Result<(), botan::Error> {\n\n let mut cipher = botan::Cipher::new(\"ChaCha20\", botan::CipherDirection::Encrypt)?;\n\n\n\n assert_eq!(cipher.tag_length(), 0);\n\n\n\n let key_spec = cipher.key_spec()?;\n\n\n\n assert!(key_spec.is_valid_keylength(0) == false);\n\n assert!(key_spec.is_valid_keylength(16));\n\n assert!(key_spec.is_valid_keylength(32));\n\n assert!(key_spec.is_valid_keylength(48) == false);\n\n\n\n let key = vec![0; 32];\n\n\n\n let expected = botan::hex_decode(\"76B8E0ADA0F13D90405D6AE55386BD28BDD219B8A08DED1AA836EFCC8B770DC7DA41597C5157488D7724E03FB8D84A376A43B8F41518A11CC387B669\")?;\n\n\n\n cipher.set_key(&key)?;\n\n\n\n assert!(cipher.set_associated_data(&[1, 2, 3]).is_err()); // not an AEAD\n\n assert!(cipher.set_associated_data(&[]).is_err());\n\n\n\n let iv = vec![];\n\n let input = vec![0; expected.len()];\n\n\n\n let ctext = cipher.process(&iv, &input)?;\n\n\n\n assert_eq!(ctext, expected);\n\n Ok(())\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 29, "score": 78536.79954073601 }, { "content": "#[test]\n\nfn test_certs() -> Result<(), botan::Error> {\n\n let cert_bits = botan::hex_decode(\"3082035A30820305A003020102020101300C06082A8648CE3D04030105003050310B3009060355040613024445310D300B060355040A0C0462756E64310C300A060355040B0C03627369310D300B06035504051304343536373115301306035504030C0C637363612D6765726D616E79301E170D3037303731393135323731385A170D3238303131393135313830305A3050310B3009060355040613024445310D300B060355040A0C0462756E64310C300A060355040B0C03627369310D300B06035504051304343536373115301306035504030C0C637363612D6765726D616E79308201133081D406072A8648CE3D02013081C8020101302806072A8648CE3D0101021D00D7C134AA264366862A18302575D1D787B09F075797DA89F57EC8C0FF303C041C68A5E62CA9CE6C1C299803A6C1530B514E182AD8B0042A59CAD29F43041C2580F63CCFE44138870713B1A92369E33E2135D266DBB372386C400B0439040D9029AD2C7E5CF4340823B2A87DC68C9E4CE3174C1E6EFDEE12C07D58AA56F772C0726F24C6B89E4ECDAC24354B9E99CAA3F6D3761402CD021D00D7C134AA264366862A18302575D0FB98D116BC4B6DDEBCA3A5A7939F020101033A000401364A4B0F0102E9502AB9DC6855D90B065A6F5E5E48395F8309D57C11ABAFF21756607EF6757EC9886CA222D83CA04B1A99FA43C5A9BCE1A38201103082010C30360603551D11042F302D8118637363612D6765726D616E79406273692E62756E642E646586116661783A2B343932323839353832373232300E0603551D0F0101FF040403020106301D0603551D0E041604140096452DE588F966C4CCDF161DD1F3F5341B71E7301F0603551D230418301680140096452DE588F966C4CCDF161DD1F3F5341B71E730410603551D20043A30383036060904007F0007030101013029302706082B06010505070201161B687474703A2F2F7777772E6273692E62756E642E64652F6373636130120603551D130101FF040830060101FF020100302B0603551D1004243022800F32303037303731393135323731385A810F32303237313131393135313830305A300C06082A8648CE3D0403010500034100303E021D00C6B41E830217FD4C93B59E9E2B13734E09C182FA63FAEE4115A8EDD5021D00D27938DA01B8951A9064A1B696AEDF181B74968829C138F0EB2F623B\")?;\n\n\n\n let cert = botan::Certificate::load(&cert_bits)?;\n\n\n\n let key_id = botan::hex_decode(\"0096452DE588F966C4CCDF161DD1F3F5341B71E7\")?;\n\n assert_eq!(cert.serial_number()?, vec![1]);\n\n assert_eq!(cert.authority_key_id()?, key_id);\n\n assert_eq!(cert.subject_key_id()?, key_id);\n\n\n\n assert_eq!(cert.allows_usage(botan::CertUsage::CertificateSign)?, true);\n\n assert_eq!(cert.allows_usage(botan::CertUsage::CrlSign)?, true);\n\n assert_eq!(cert.allows_usage(botan::CertUsage::KeyEncipherment)?, false);\n\n\n\n let pubkey = cert.public_key()?;\n\n\n\n assert_eq!(pubkey.algo_name()?, \"ECDSA\");\n\n Ok(())\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 30, "score": 78536.79954073601 }, { "content": "#[test]\n\nfn test_cipher() -> Result<(), botan::Error> {\n\n let mut cipher = botan::Cipher::new(\"AES-128/GCM\", botan::CipherDirection::Encrypt)?;\n\n\n\n assert_eq!(cipher.tag_length(), 16);\n\n\n\n let zero16 = vec![0; 16];\n\n let zero12 = vec![0; 12];\n\n\n\n assert!(cipher.set_associated_data(&[1, 2, 3]).is_err()); // trying to set AD before key is set\n\n assert_eq!(\n\n cipher.set_key(&vec![0; 42]).unwrap_err().error_type(),\n\n botan::ErrorType::InvalidKeyLength\n\n );\n\n\n\n cipher.set_key(&zero16)?;\n\n\n\n cipher.set_associated_data(&[1, 2, 3])?;\n\n cipher.set_associated_data(&[])?;\n\n\n\n let ctext = cipher.process(&zero12, &zero16)?;\n", "file_path": "botan/tests/tests.rs", "rank": 31, "score": 78536.79954073601 }, { "content": "#[test]\n\nfn test_mp() -> Result<(), botan::Error> {\n\n let mut a = botan::MPI::new()?;\n\n let mut b = botan::MPI::new()?;\n\n\n\n assert_eq!(a.to_u32()?, 0);\n\n assert_eq!(b.to_u32()?, 0);\n\n\n\n a.set_i32(9)?;\n\n b.set_i32(81)?;\n\n\n\n assert_eq!(a.get_bit(0), Ok(true));\n\n assert_eq!(a.get_bit(1), Ok(false));\n\n\n\n assert_eq!(a.to_u32()?, 9);\n\n assert_eq!(b.to_u32()?, 81);\n\n\n\n let mut c = &a + &b;\n\n assert_eq!(c.to_u32()?, 90);\n\n\n\n let d = botan::MPI::from_str(\"0x5A\")?;\n", "file_path": "botan/tests/tests.rs", "rank": 32, "score": 78536.79954073601 }, { "content": "#[test]\n\nfn test_ed25519() -> Result<(), botan::Error> {\n\n let mut rng = botan::RandomNumberGenerator::new_system()?;\n\n\n\n let msg = vec![23, 42, 69, 6, 66];\n\n let padding = \"Pure\";\n\n\n\n let ed_priv = botan::Privkey::create(\"Ed25519\", \"\", &mut rng)?;\n\n\n\n let signature1 = ed_priv.sign(&msg, padding, &mut rng)?;\n\n\n\n let ed_bits = ed_priv.get_ed25519_key()?;\n\n\n\n let ed_loaded = botan::Privkey::load_ed25519(&ed_bits.1)?;\n\n let signature2 = ed_loaded.sign(&msg, padding, &mut rng)?;\n\n\n\n let ed_pub = ed_priv.pubkey()?;\n\n\n\n assert!(ed_pub.verify(&msg, &signature1, padding)?);\n\n assert!(ed_pub.verify(&msg, &signature2, padding)?);\n\n\n\n let ed_loaded = botan::Pubkey::load_ed25519(&ed_bits.0)?;\n\n assert!(ed_loaded.verify(&msg, &signature1, padding)?);\n\n assert!(ed_loaded.verify(&msg, &signature2, padding)?);\n\n\n\n assert_eq!(ed_loaded.get_ed25519_key()?, ed_pub.get_ed25519_key()?);\n\n\n\n assert_eq!(signature1, signature2);\n\n Ok(())\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 33, "score": 78536.79954073601 }, { "content": "#[test]\n\nfn test_hotp() -> Result<(), botan::Error> {\n\n let hotp = botan::HOTP::new(&[0xFF], \"SHA-1\", 6)?;\n\n assert_eq!(hotp.generate(23)?, 330795);\n\n\n\n assert!(hotp.check(330795, 23)?);\n\n assert!(!hotp.check(330795, 22)?);\n\n assert!(!hotp.check(330796, 23)?);\n\n Ok(())\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 34, "score": 78536.79954073601 }, { "content": "fn env_name_for(opt: &'static str) -> String {\n\n assert!(opt[0..2] == *\"--\");\n\n let to_var = opt[2..].to_uppercase().replace('-', \"_\");\n\n format!(\"BOTAN_CONFIGURE_{}\", to_var)\n\n}\n\n\n", "file_path": "botan-src/src/lib.rs", "rank": 35, "score": 77023.35540368651 }, { "content": "#[test]\n\nfn test_pubkey_encrypt() -> Result<(), botan::Error> {\n\n let msg = vec![1, 23, 42];\n\n\n\n let mut rng = botan::RandomNumberGenerator::new_system()?;\n\n\n\n let priv_key = botan::Privkey::create(\"RSA\", \"2048\", &mut rng)?;\n\n assert!(priv_key.key_agreement_key().is_err());\n\n let pub_key = priv_key.pubkey()?;\n\n\n\n let mut encryptor = botan::Encryptor::new(&pub_key, \"OAEP(SHA-256)\")?;\n\n\n\n let ctext = encryptor.encrypt(&msg, &mut rng)?;\n\n assert_eq!(ctext.len(), 2048 / 8);\n\n\n\n let mut decryptor = botan::Decryptor::new(&priv_key, \"OAEP(SHA-256)\")?;\n\n\n\n let ptext = decryptor.decrypt(&ctext)?;\n\n\n\n assert_eq!(ptext, msg);\n\n Ok(())\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 36, "score": 76719.46804511946 }, { "content": "#[test]\n\nfn test_ct_compare() -> Result<(), botan::Error> {\n\n let a = vec![1, 2, 3];\n\n\n\n assert_eq!(botan::const_time_compare(&a, &[1, 2, 3]), true);\n\n assert_eq!(botan::const_time_compare(&a, &[1, 2, 3, 4]), false);\n\n assert_eq!(botan::const_time_compare(&a, &[1, 2, 4]), false);\n\n assert_eq!(botan::const_time_compare(&a, &a), true);\n\n assert_eq!(botan::const_time_compare(&a, &vec![1, 2, 3]), true);\n\n Ok(())\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 37, "score": 76719.46804511946 }, { "content": "#[test]\n\nfn test_block_cipher() -> Result<(), botan::Error> {\n\n let mut bc = botan::BlockCipher::new(\"AES-128\")?;\n\n\n\n assert_eq!(bc.algo_name()?, \"AES-128\");\n\n assert_eq!(bc.block_size()?, 16);\n\n\n\n let key_spec = bc.key_spec()?;\n\n\n\n assert!(key_spec.is_valid_keylength(20) == false);\n\n assert!(key_spec.is_valid_keylength(16));\n\n\n\n assert_eq!(\n\n bc.set_key(&vec![0; 32]).unwrap_err().error_type(),\n\n botan::ErrorType::InvalidKeyLength\n\n );\n\n\n\n bc.set_key(&vec![0; 16])?;\n\n\n\n let input = vec![0; 16];\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 38, "score": 76719.46804511946 }, { "content": "#[test]\n\nfn test_pubkey_encryption() -> Result<(), botan::Error> {\n\n let padding = \"EMSA-PKCS1-v1_5(SHA-256)\";\n\n let msg = [1, 2, 3];\n\n\n\n let mut rng = botan::RandomNumberGenerator::new_system()?;\n\n let key = botan::Privkey::create(\"RSA\", \"1024\", &mut rng)?;\n\n\n\n let der = key.der_encode_encrypted(\"passphrase\", &mut rng)?;\n\n let pem = key.pem_encode_encrypted(\"pemword\", &mut rng)?;\n\n\n\n assert!(pem.starts_with(\"-----BEGIN ENCRYPTED PRIVATE KEY-----\\n\"));\n\n assert!(pem.ends_with(\"-----END ENCRYPTED PRIVATE KEY-----\\n\"));\n\n\n\n let sig1 = key.sign(&msg, padding, &mut rng)?;\n\n\n\n //assert!(botan::Privkey::load_encrypted_der(&der, \"i forget\").is_err());\n\n\n\n let load = botan::Privkey::load_encrypted_der(&der, \"passphrase\")?;\n\n let sig2 = load.sign(&msg, padding, &mut rng)?;\n\n\n\n assert_eq!(sig1, sig2);\n\n\n\n let load = botan::Privkey::load_encrypted_pem(&pem, \"pemword\")?;\n\n let sig3 = load.sign(&msg, padding, &mut rng)?;\n\n\n\n assert_eq!(sig1, sig3);\n\n Ok(())\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 39, "score": 76719.46804511946 }, { "content": "#[test]\n\nfn test_scrub_mem() -> Result<(), botan::Error> {\n\n let mut v = vec![1, 2, 3];\n\n botan::scrub_mem(&mut v);\n\n assert_eq!(v, vec![0, 0, 0]);\n\n\n\n let mut a = [1u32, 2u32, 3u32, 2049903u32];\n\n botan::scrub_mem(&mut a);\n\n assert_eq!(a, [0, 0, 0, 0]);\n\n Ok(())\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 40, "score": 76719.46804511946 }, { "content": "#[test]\n\nfn test_incremental_cipher() -> Result<(), botan::Error> {\n\n // This test requires Botan 2.9 or higher to work correctly\n\n if !botan::Version::current()?.at_least(2, 9) {\n\n return Ok(());\n\n }\n\n\n\n // Key = 00000000000000000000000000000000\n\n // Nonce = 0AAC82F3E53C2756034F7BD5827C9EDD\n\n // In = 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000\n\n // Out = 38C21B6430D9A3E4BC6749405765653AE91051E96CE0D076141DD7B515EC150FDB8A65EE988D206C9F64874664CDBF61257FFAE521B9A5EB5B35E3745F4232025B269A6CD7DCFE19153ECF7341CE2C6A6A87F95F2109841350DA3D24EEED4E4E32D2BED880737670FFE8ED76DB890FD72A0076300E50914984A777C9F2BC843977396C602B24E7A045F04D15CD2EAC01AD8808064CFE5A2DC1AE9FFFA4BF0A6F0C07668097DEEB9C5CA5EC1F9A52F96A403B73FEA2DBBF44473D355553EE7FB1B4D6630777DAF67804BE213089B9F78652CE970C582FD813F87FF0ECBACCE1CA46247E20D09F3E0B4EF6BFCD13244C6877F25E6646252CAD6EB7DBBA3476AAAC83BC3285FF70B50D6CDEDC8E5921944A\n\n\n\n let key = botan::hex_decode(\"00000000000000000000000000000000\")?;\n\n let nonce = botan::hex_decode(\"0AAC82F3E53C2756034F7BD5827C9EDD\")?;\n\n let input = botan::hex_decode(\"00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000\")?;\n\n let output = botan::hex_decode(\"38C21B6430D9A3E4BC6749405765653AE91051E96CE0D076141DD7B515EC150FDB8A65EE988D206C9F64874664CDBF61257FFAE521B9A5EB5B35E3745F4232025B269A6CD7DCFE19153ECF7341CE2C6A6A87F95F2109841350DA3D24EEED4E4E32D2BED880737670FFE8ED76DB890FD72A0076300E50914984A777C9F2BC843977396C602B24E7A045F04D15CD2EAC01AD8808064CFE5A2DC1AE9FFFA4BF0A6F0C07668097DEEB9C5CA5EC1F9A52F96A403B73FEA2DBBF44473D355553EE7FB1B4D6630777DAF67804BE213089B9F78652CE970C582FD813F87FF0ECBACCE1CA46247E20D09F3E0B4EF6BFCD13244C6877F25E6646252CAD6EB7DBBA3476AAAC83BC3285FF70B50D6CDEDC8E5921944A\")?;\n\n\n\n // encode\n\n let mut cipher = botan::Cipher::new(\"AES-128/GCM\", botan::CipherDirection::Encrypt)?;\n\n cipher.set_key(&key)?;\n\n cipher.start(&nonce)?;\n", "file_path": "botan/tests/tests.rs", "rank": 41, "score": 76719.46804511946 }, { "content": "#[test]\n\nfn test_cert_verify() -> Result<(), botan::Error> {\n\n let ca = b\"-----BEGIN CERTIFICATE-----\n\nMIIBkDCCATegAwIBAgIRANQudMcHu/SmX8470nbNlj0wCgYIKoZIzj0EAwIwEjEQ\n\nMA4GA1UEAxMHVGVzdCBDQTAeFw0xODA4MTYyMjMyNDFaFw00NjAxMDEyMjMyNDFa\n\nMBIxEDAOBgNVBAMTB1Rlc3QgQ0EwWTATBgcqhkjOPQIBBggqhkjOPQMBBwNCAASN\n\n+LHr9ZN72sxZqi4zcYDIg4xzN3DOF3epvlpGHLnju5ogp8dJ46YydTi3g/SfBGOp\n\nj9jrYP5Jgkkmpo0lMh7ho24wbDAhBgNVHQ4EGgQYLg/lfneWJ36rZdGMoVyKD6Zl\n\nmHkST7ZNMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMBAf8ECDAGAQH/AgEBMCMGA1Ud\n\nIwQcMBqAGC4P5X53lid+q2XRjKFcig+mZZh5Ek+2TTAKBggqhkjOPQQDAgNHADBE\n\nAiB30ZIFV1cZbknu5lt1fWrM9tNSgCbj5BN9CI+Q9aq1LQIgD9o/8oGmFgvWLjsx\n\nb39VOu00+Vy9kpNO1Sgx7wSWoIU=\n\n-----END CERTIFICATE-----\";\n\n\n\n let ee = b\"-----BEGIN CERTIFICATE-----\n\nMIIBoDCCAUagAwIBAgIRAK27a2NlSYEH63xIsAbBA1wwCgYIKoZIzj0EAwIwEjEQ\n\nMA4GA1UEAxMHVGVzdCBDQTAeFw0xODA4MTYyMjMzNDBaFw00NjAxMDEyMjMzNDBa\n\nMBoxGDAWBgNVBAMTD1Rlc3QgRW5kIEVudGl0eTBZMBMGByqGSM49AgEGCCqGSM49\n\nAwEHA0IABDykQMvlV7GyIJeANLWEs5bXReqpvTEFu3zYPBjOhyx784VPVl84h8c5\n\nycru3Hk8N/SIITSWzpbjPMp9jRbyDy+jdTBzMCEGA1UdDgQaBBjkPzL+BXHtQJDR\n\nciwvzeHQKuQZOstyM2swGwYDVR0RBBQwEoIQdGVzdC5leGFtcGxlLmNvbTAMBgNV\n", "file_path": "botan/tests/tests.rs", "rank": 42, "score": 76719.46804511946 }, { "content": "#[test]\n\nfn test_pubkey_sign() -> Result<(), botan::Error> {\n\n let msg = vec![1, 23, 42];\n\n\n\n let mut rng = botan::RandomNumberGenerator::new_system()?;\n\n\n\n let ecdsa_key = botan::Privkey::create(\"ECDSA\", \"secp256r1\", &mut rng)?;\n\n assert!(ecdsa_key.key_agreement_key().is_err());\n\n\n\n let signature = ecdsa_key.sign(&msg, \"EMSA1(SHA-256)\", &mut rng)?;\n\n\n\n let pub_key = ecdsa_key.pubkey()?;\n\n\n\n let mut verifier = botan::Verifier::new(&pub_key, \"EMSA1(SHA-256)\")?;\n\n\n\n verifier.update(&[1])?;\n\n verifier.update(&[23, 42])?;\n\n\n\n assert_eq!(verifier.finish(&signature)?, true);\n\n\n\n verifier.update(&[1])?;\n\n assert_eq!(verifier.finish(&signature)?, false);\n\n\n\n verifier.update(&[1])?;\n\n verifier.update(&[23, 42])?;\n\n\n\n assert_eq!(verifier.finish(&signature)?, true);\n\n Ok(())\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 43, "score": 76719.46804511946 }, { "content": "/// Base64 encode some data\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// assert_eq!(botan::base64_encode(&[97,98,99,100,101,102]).unwrap(), \"YWJjZGVm\");\n\n/// assert_eq!(botan::base64_encode(&[0x5A, 0x16, 0xAD, 0x4E, 0x17, 0x87, 0x79, 0xC9]).unwrap(), \"WhatTheHeck=\");\n\n/// ```\n\npub fn base64_encode(x: &[u8]) -> Result<String> {\n\n let b64_len = 1 + ((x.len() + 2) / 3) * 4;\n\n\n\n call_botan_ffi_returning_string(b64_len, &|out_buf, out_len| unsafe {\n\n botan_base64_encode(x.as_ptr(), x.len(), out_buf as *mut c_char, out_len)\n\n })\n\n}\n\n\n", "file_path": "botan/src/memutils.rs", "rank": 44, "score": 76463.1484684406 }, { "content": "/// Hex encode some data\n\npub fn hex_encode(x: &[u8]) -> Result<String> {\n\n let flags = 0u32;\n\n\n\n let mut output = vec![0u8; x.len() * 2];\n\n botan_call!(\n\n botan_hex_encode,\n\n x.as_ptr(),\n\n x.len(),\n\n output.as_mut_ptr() as *mut c_char,\n\n flags\n\n )?;\n\n\n\n String::from_utf8(output).map_err(Error::conversion_error)\n\n}\n\n\n", "file_path": "botan/src/memutils.rs", "rank": 45, "score": 76463.1484684406 }, { "content": "#[test]\n\nfn wycheproof_aead_eax_tests() -> Result<(), botan::Error> {\n\n wycheproof_aead_test(wycheproof::aead::TestName::AesEax, |ks: usize| {\n\n format!(\"AES-{}/EAX\", ks)\n\n })\n\n}\n\n\n", "file_path": "botan/tests/wycheproof.rs", "rank": 46, "score": 75011.10067686097 }, { "content": "#[test]\n\nfn wycheproof_aead_gcm_tests() -> Result<(), botan::Error> {\n\n wycheproof_aead_test(wycheproof::aead::TestName::AesGcm, |ks: usize| {\n\n format!(\"AES-{}/GCM\", ks)\n\n })\n\n}\n\n\n", "file_path": "botan/tests/wycheproof.rs", "rank": 47, "score": 75011.10067686097 }, { "content": "#[test]\n\nfn test_pubkey_key_agreement() -> Result<(), botan::Error> {\n\n let mut rng = botan::RandomNumberGenerator::new_system()?;\n\n\n\n let a_priv = botan::Privkey::create(\"ECDH\", \"secp384r1\", &mut rng)?;\n\n let b_priv = botan::Privkey::create(\"ECDH\", \"secp384r1\", &mut rng)?;\n\n\n\n let a_pub = a_priv.key_agreement_key()?;\n\n let b_pub = b_priv.key_agreement_key()?;\n\n\n\n let mut a_ka = botan::KeyAgreement::new(&a_priv, \"KDF2(SHA-384)\")?;\n\n let mut b_ka = botan::KeyAgreement::new(&b_priv, \"KDF2(SHA-384)\")?;\n\n\n\n let salt = rng.read(16)?;\n\n\n\n let a_key = a_ka.agree(32, &b_pub, &salt)?;\n\n let b_key = b_ka.agree(32, &a_pub, &salt)?;\n\n assert_eq!(a_key, b_key);\n\n\n\n let mut a_ka = botan::KeyAgreement::new(&a_priv, \"Raw\")?;\n\n let mut b_ka = botan::KeyAgreement::new(&b_priv, \"Raw\")?;\n\n\n\n let a_key = a_ka.agree(0, &b_pub, &salt)?;\n\n let b_key = b_ka.agree(0, &a_pub, &vec![])?;\n\n\n\n assert_eq!(a_key, b_key);\n\n assert_eq!(a_key.len(), 384 / 8);\n\n Ok(())\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 48, "score": 75011.10067686097 }, { "content": "#[test]\n\nfn wycheproof_aead_chacha20poly1305_tests() -> Result<(), botan::Error> {\n\n wycheproof_aead_test(\n\n wycheproof::aead::TestName::ChaCha20Poly1305,\n\n |_ks: usize| \"ChaCha20Poly1305\".to_string(),\n\n )\n\n}\n\n\n", "file_path": "botan/tests/wycheproof.rs", "rank": 49, "score": 75011.10067686097 }, { "content": "#[test]\n\nfn wycheproof_aead_xchacha20poly1305_tests() -> Result<(), botan::Error> {\n\n wycheproof_aead_test(\n\n wycheproof::aead::TestName::XChaCha20Poly1305,\n\n |_ks: usize| \"ChaCha20Poly1305\".to_string(),\n\n )\n\n}\n\n\n", "file_path": "botan/tests/wycheproof.rs", "rank": 50, "score": 75011.10067686097 }, { "content": "#[test]\n\nfn wycheproof_aead_siv_tests() -> Result<(), botan::Error> {\n\n wycheproof_aead_test(wycheproof::aead::TestName::AesSivCmac, |ks: usize| {\n\n format!(\"AES-{}/SIV\", ks / 2)\n\n })\n\n}\n\n\n", "file_path": "botan/tests/wycheproof.rs", "rank": 51, "score": 75011.10067686097 }, { "content": "#[test]\n\nfn test_pkcs_hash_id() -> Result<(), botan::Error> {\n\n assert!(botan::pkcs_hash_id(\"SHA-192\").is_err());\n\n\n\n let id = botan::pkcs_hash_id(\"SHA-384\")?;\n\n\n\n assert_eq!(\n\n botan::hex_encode(&id)?,\n\n \"3041300D060960864801650304020205000430\"\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 52, "score": 75011.10067686097 }, { "content": "#[test]\n\nfn test_aes_key_wrap() -> Result<(), botan::Error> {\n\n let kek =\n\n botan::hex_decode(\"000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F\")?;\n\n let key =\n\n botan::hex_decode(\"00112233445566778899AABBCCDDEEFF000102030405060708090A0B0C0D0E0F\")?;\n\n\n\n let wrapped = botan::nist_key_wrap(&kek, &key)?;\n\n\n\n assert_eq!(\n\n botan::hex_encode(&wrapped)?,\n\n \"28C9F404C4B810F4CBCCB35CFB87F8263F5786E2D80ED326CBC7F0E71A99F43BFB988B9B7A02DD21\"\n\n );\n\n\n\n let unwrapped = botan::nist_key_unwrap(&kek, &wrapped)?;\n\n\n\n assert_eq!(unwrapped, key);\n\n Ok(())\n\n}\n\n\n", "file_path": "botan/tests/tests.rs", "rank": 53, "score": 75011.10067686097 }, { "content": "/// Securely zeroize memory\n\n///\n\n/// Write zeros to the array (eg to clear out a key) in a way that is\n\n/// unlikely to be removed by the compiler.\n\npub fn scrub_mem<T: Copy>(a: &mut [T]) {\n\n let bytes = mem::size_of::<T>() * a.len();\n\n unsafe { botan_scrub_mem(a.as_mut_ptr() as *mut c_void, bytes) };\n\n}\n\n\n", "file_path": "botan/src/memutils.rs", "rank": 54, "score": 73551.76084870967 }, { "content": "#[must_use]\n\npub fn const_time_compare<T: Copy>(a: &[T], b: &[T]) -> bool {\n\n if a.len() != b.len() {\n\n return false;\n\n }\n\n\n\n let bytes = mem::size_of::<T>() * a.len();\n\n let rc = unsafe {\n\n botan_constant_time_compare(a.as_ptr() as *const u8, b.as_ptr() as *const u8, bytes)\n\n };\n\n\n\n rc == 0\n\n}\n\n\n", "file_path": "botan/src/memutils.rs", "rank": 55, "score": 68421.52594611773 }, { "content": "/// Unwrap a key encrypted using NIST's AES key wrap algorithm\n\n/// # Examples\n\n///\n\n/// ```\n\n/// // Wrap a 128-bit key with a 256-bit key:\n\n/// let key = vec![0; 16];\n\n/// let kek = vec![0; 32];\n\n/// let wrapped = botan::nist_key_wrap(&kek, &key).unwrap();\n\n/// let unwrapped = botan::nist_key_unwrap(&kek, &wrapped).unwrap();\n\n/// assert_eq!(unwrapped, key);\n\n/// ```\n\npub fn nist_key_unwrap(kek: &[u8], wrapped: &[u8]) -> Result<Vec<u8>> {\n\n if kek.len() != 16 && kek.len() != 24 && kek.len() != 32 {\n\n return Err(Error::with_message(\n\n ErrorType::InvalidKeyLength,\n\n \"Invalid AES key length\".to_string(),\n\n ));\n\n }\n\n\n\n if wrapped.len() % 8 != 0 {\n\n return Err(Error::with_message(\n\n ErrorType::InvalidInput,\n\n \"Invalid keywrap input length\".to_string(),\n\n ));\n\n }\n\n\n\n let mut output = vec![0; wrapped.len() - 8];\n\n let mut output_len = output.len();\n\n\n\n botan_call!(\n\n botan_key_unwrap3394,\n", "file_path": "botan/src/keywrap.rs", "rank": 56, "score": 65613.80925256993 }, { "content": "/// Wrap a key using NIST's AES key wrap algorithm.\n\n///\n\n/// The kek (key-encryption-key) must be a valid length for an AES\n\n/// key. The wrapped key must be a multiple of 8 bytes.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// // Wrap a 128-bit key with a 256-bit key:\n\n/// let key = vec![0; 16];\n\n/// let kek = vec![0; 32];\n\n/// let wrapped = botan::nist_key_wrap(&kek, &key).unwrap();\n\n/// ```\n\npub fn nist_key_wrap(kek: &[u8], key: &[u8]) -> Result<Vec<u8>> {\n\n if kek.len() != 16 && kek.len() != 24 && kek.len() != 32 {\n\n return Err(Error::with_message(\n\n ErrorType::InvalidKeyLength,\n\n \"Invalid AES key length\".to_string(),\n\n ));\n\n }\n\n\n\n if key.len() % 8 != 0 {\n\n return Err(Error::with_message(\n\n ErrorType::InvalidInput,\n\n \"Invalid keywrap input length\".to_string(),\n\n ));\n\n }\n\n\n\n let mut output = vec![0; key.len() + 8];\n\n let mut output_len = output.len();\n\n\n\n botan_call!(\n\n botan_key_wrap3394,\n", "file_path": "botan/src/keywrap.rs", "rank": 57, "score": 65613.48206006906 }, { "content": "#[cfg(feature = \"vendored\")]\n\nfn os_uses_gnu_libstdcpp() -> bool {\n\n /*\n\n * Possibly other OSes should default to libstdc++ as well. But\n\n * given macOS, iOS, Android, FreeBSD, etc should all use libc++\n\n * probably defaulting to libc++ when in doubt is the correct move.\n\n */\n\n if cfg!(any(target_os = \"linux\")) {\n\n true\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "botan-sys/build.rs", "rank": 58, "score": 63359.276710598104 }, { "content": "fn main() {\n\n #[cfg(feature = \"vendored\")]\n\n {\n\n let (lib_dir, _) = botan_src::build();\n\n println!(\"cargo:vendored=1\");\n\n println!(\"cargo:rustc-link-search=native={}\", &lib_dir);\n\n println!(\n\n \"cargo:rustc-link-lib=static=botan-{}\",\n\n botan_lib_major_version()\n\n );\n\n\n\n if os_uses_gnu_libstdcpp() {\n\n println!(\"cargo:rustc-flags=-l dylib=stdc++\");\n\n } else {\n\n println!(\"cargo:rustc-flags=-l dylib=c++\");\n\n }\n\n }\n\n #[cfg(not(feature = \"vendored\"))]\n\n {\n\n println!(\"cargo:rustc-link-lib=botan-{}\", botan_lib_major_version());\n\n }\n\n}\n", "file_path": "botan-sys/build.rs", "rank": 59, "score": 37684.49623265206 }, { "content": "fn main() {\n\n let (lib_dir, include_dir) = botan_src::build();\n\n println!(\"Library directory: {}\", lib_dir);\n\n println!(\"Include directory: {}\", include_dir);\n\n}\n", "file_path": "botan-src/examples/build.rs", "rank": 60, "score": 36506.88017746783 }, { "content": "#[test]\n\nfn test_hash() {\n\n unsafe {\n\n let mut hash = std::ptr::null_mut();\n\n let hash_name = CString::new(\"SHA-384\").unwrap();\n\n assert_eq!(botan_hash_init(&mut hash, hash_name.as_ptr(), 0u32), 0);\n\n\n\n let input = vec![97, 98, 99];\n\n assert_eq!(botan_hash_update(hash, input.as_ptr(), input.len()), 0);\n\n assert_eq!(botan_hash_update(hash, input.as_ptr(), input.len()), 0);\n\n\n\n let mut output_len = 0;\n\n assert_eq!(botan_hash_output_length(hash, &mut output_len), 0);\n\n assert!(output_len == 48);\n\n\n\n let mut digest = vec![0u8; output_len];\n\n assert_eq!(botan_hash_final(hash, digest.as_mut_ptr()), 0);\n\n\n\n assert_eq!(digest[0], 0xCA);\n\n assert_eq!(digest[1], 0xF3);\n\n assert_eq!(digest[47], 0x8D);\n\n\n\n assert_eq!(botan_hash_destroy(hash), 0);\n\n }\n\n}\n\n\n", "file_path": "botan-sys/tests/tests.rs", "rank": 61, "score": 35409.49273664087 }, { "content": "fn wycheproof_aead_test(\n\n test_set_name: wycheproof::aead::TestName,\n\n botan_cipher_name: impl Fn(usize) -> String,\n\n) -> Result<(), botan::Error> {\n\n let test_set = wycheproof::aead::TestSet::load(test_set_name).unwrap();\n\n\n\n for test_group in test_set.test_groups {\n\n let cipher_name = botan_cipher_name(test_group.key_size);\n\n\n\n let tag_first = cipher_name.contains(\"/SIV\");\n\n\n\n for test in &test_group.tests {\n\n // Cipher object must be created each time to avoid a bug in EAX encryption\n\n let mut cipher = botan::Cipher::new(&cipher_name, botan::CipherDirection::Encrypt)?;\n\n\n\n cipher.set_key(&test.key).unwrap();\n\n cipher.set_associated_data(&test.aad).unwrap();\n\n\n\n if test.result == wycheproof::TestResult::Invalid\n\n && test\n", "file_path": "botan/tests/wycheproof.rs", "rank": 62, "score": 35409.49273664087 }, { "content": "#[test]\n\nfn test_rng() {\n\n unsafe {\n\n let mut rng = std::ptr::null_mut();\n\n botan_rng_init(&mut rng, std::ptr::null());\n\n\n\n let mut rng1 = vec![0u8; 16];\n\n let mut rng2 = vec![0u8; 16];\n\n assert_eq!(botan_rng_get(rng, rng1.as_mut_ptr(), rng1.len()), 0);\n\n assert_eq!(botan_rng_get(rng, rng2.as_mut_ptr(), rng2.len()), 0);\n\n\n\n assert!(rng1 != rng2);\n\n\n\n assert_eq!(botan_rng_destroy(rng), 0);\n\n }\n\n}\n", "file_path": "botan-sys/tests/tests.rs", "rank": 63, "score": 35409.49273664087 }, { "content": "#[test]\n\nfn test_hex() {\n\n let bin = vec![0x42, 0x23, 0x45, 0x8F];\n\n let mut out = Vec::new();\n\n out.resize(bin.len() * 2, 0);\n\n\n\n unsafe {\n\n assert_eq!(\n\n botan_hex_encode(bin.as_ptr(), bin.len(), out.as_mut_ptr(), 0),\n\n 0\n\n );\n\n }\n\n\n\n assert_eq!(out[0], '4' as _);\n\n assert_eq!(out[1], '2' as _);\n\n assert_eq!(out[2], '2' as _);\n\n assert_eq!(out[3], '3' as _);\n\n assert_eq!(out[4], '4' as _);\n\n assert_eq!(out[5], '5' as _);\n\n assert_eq!(out[6], '8' as _);\n\n assert_eq!(out[7], 'F' as _);\n", "file_path": "botan-sys/tests/tests.rs", "rank": 64, "score": 35409.49273664087 }, { "content": "#[test]\n\nfn test_version() {\n\n unsafe {\n\n let api_version = botan_ffi_api_version();\n\n\n\n assert!(botan_ffi_supports_api(api_version) == 0);\n\n assert!(botan_ffi_supports_api(api_version + 1) != 0);\n\n\n\n #[cfg(feature = \"botan3\")]\n\n {\n\n assert_eq!(botan_version_major(), 3);\n\n }\n\n\n\n #[cfg(not(feature = \"botan3\"))]\n\n {\n\n assert_eq!(botan_version_major(), 2);\n\n assert!(botan_version_minor() > 8);\n\n }\n\n }\n\n}\n\n\n", "file_path": "botan-sys/tests/tests.rs", "rank": 65, "score": 35409.49273664087 }, { "content": "use cty::{c_char, c_int};\n\n\n\n#[allow(clippy::upper_case_acronyms)]\n\n\n\npub type BOTAN_FFI_ERROR = c_int;\n\n\n\npub const BOTAN_FFI_SUCCESS: BOTAN_FFI_ERROR = 0;\n\npub const BOTAN_FFI_INVALID_VERIFIER: BOTAN_FFI_ERROR = 1;\n\npub const BOTAN_FFI_ERROR_INVALID_INPUT: BOTAN_FFI_ERROR = -1;\n\npub const BOTAN_FFI_ERROR_BAD_MAC: BOTAN_FFI_ERROR = -2;\n\npub const BOTAN_FFI_ERROR_INSUFFICIENT_BUFFER_SPACE: BOTAN_FFI_ERROR = -10;\n\npub const BOTAN_FFI_ERROR_EXCEPTION_THROWN: BOTAN_FFI_ERROR = -20;\n\npub const BOTAN_FFI_ERROR_OUT_OF_MEMORY: BOTAN_FFI_ERROR = -21;\n\npub const BOTAN_FFI_ERROR_SYSTEM_ERROR: BOTAN_FFI_ERROR = -22;\n\npub const BOTAN_FFI_ERROR_INTERNAL_ERROR: BOTAN_FFI_ERROR = -23;\n\npub const BOTAN_FFI_ERROR_BAD_FLAG: BOTAN_FFI_ERROR = -30;\n\npub const BOTAN_FFI_ERROR_NULL_POINTER: BOTAN_FFI_ERROR = -31;\n\npub const BOTAN_FFI_ERROR_BAD_PARAMETER: BOTAN_FFI_ERROR = -32;\n\npub const BOTAN_FFI_ERROR_KEY_NOT_SET: BOTAN_FFI_ERROR = -33;\n\npub const BOTAN_FFI_ERROR_INVALID_KEY_LENGTH: BOTAN_FFI_ERROR = -34;\n", "file_path": "botan-sys/src/errors.rs", "rank": 66, "score": 34017.59551148943 }, { "content": "pub const BOTAN_FFI_ERROR_INVALID_OBJECT_STATE: BOTAN_FFI_ERROR = -35;\n\npub const BOTAN_FFI_ERROR_NOT_IMPLEMENTED: BOTAN_FFI_ERROR = -40;\n\npub const BOTAN_FFI_ERROR_INVALID_OBJECT: BOTAN_FFI_ERROR = -50;\n\npub const BOTAN_FFI_ERROR_TLS_ERROR: BOTAN_FFI_ERROR = -75;\n\npub const BOTAN_FFI_ERROR_HTTP_ERROR: BOTAN_FFI_ERROR = -76;\n\npub const BOTAN_FFI_ERROR_UNKNOWN_ERROR: BOTAN_FFI_ERROR = -100;\n\n\n\nextern \"C\" {\n\n\n\n pub fn botan_error_description(err: BOTAN_FFI_ERROR) -> *const c_char;\n\n\n\n #[cfg(feature = \"botan3\")]\n\n pub fn botan_error_last_exception_message() -> *const c_char;\n\n\n\n}\n", "file_path": "botan-sys/src/errors.rs", "rank": 67, "score": 34016.33539441544 }, { "content": "fn botan_lib_major_version() -> i32 {\n\n #[cfg(feature = \"botan3\")]\n\n {\n\n 3\n\n }\n\n #[cfg(not(feature = \"botan3\"))]\n\n {\n\n 2\n\n }\n\n}\n\n\n", "file_path": "botan-sys/build.rs", "rank": 68, "score": 33035.61226971031 }, { "content": "use std::env;\n\nuse std::path::PathBuf;\n\nuse std::process::Command;\n\n\n\nconst BUILD_ERROR_MSG: &str = \"Unable to build botan.\";\n\nconst SRC_DIR_ERROR_MSG: &str = \"Unable to find the source directory.\";\n\nconst SRC_DIR: &str = \"botan\";\n\nconst INCLUDE_DIR: &str = \"build/include/botan\";\n\n\n\nmacro_rules! pathbuf_to_string {\n\n ($s: ident) => {\n\n $s.to_str().expect(BUILD_ERROR_MSG).to_string()\n\n };\n\n}\n\n\n", "file_path": "botan-src/src/lib.rs", "rank": 73, "score": 11.846930217922402 }, { "content": "use crate::utils::*;\n\nuse botan_sys::*;\n\n\n\n#[derive(Debug)]\n\n/// A cryptographic random number generator\n\npub struct RandomNumberGenerator {\n\n obj: botan_rng_t,\n\n}\n\n\n\nbotan_impl_drop!(RandomNumberGenerator, botan_rng_destroy);\n\n\n\nimpl RandomNumberGenerator {\n\n fn new_of_type(typ: &str) -> Result<RandomNumberGenerator> {\n\n let typ = make_cstr(typ)?;\n\n let obj = botan_init!(botan_rng_init, typ.as_ptr())?;\n\n Ok(RandomNumberGenerator { obj })\n\n }\n\n\n\n pub(crate) fn handle(&self) -> botan_rng_t {\n\n self.obj\n", "file_path": "botan/src/rng.rs", "rank": 74, "score": 11.748600721944268 }, { "content": "mod version;\n\nmod x509;\n\n\n\npub use block::*;\n\npub use cipher::*;\n\npub use errors::*;\n\npub use fpe::*;\n\npub use hash::*;\n\npub use kdf::*;\n\npub use keywrap::*;\n\npub use mac::*;\n\npub use mp::*;\n\npub use otp::*;\n\npub use passhash::*;\n\npub use pk_ops::*;\n\npub use pubkey::*;\n\npub use rng::*;\n\npub use utils::*;\n\npub use version::*;\n\npub use x509::*;\n", "file_path": "botan-sys/src/lib.rs", "rank": 75, "score": 11.704244839669093 }, { "content": " pub fn matches_hostname(&self, hostname: &str) -> Result<bool> {\n\n let hostname = make_cstr(hostname)?;\n\n let rc = unsafe { botan_x509_cert_hostname_match(self.obj, hostname.as_ptr()) };\n\n\n\n if rc == 0 {\n\n Ok(true)\n\n } else if rc == -1 {\n\n Ok(false)\n\n } else {\n\n Err(Error::from_rc(rc))\n\n }\n\n }\n\n}\n", "file_path": "botan/src/x509.rs", "rank": 76, "score": 10.879645915505343 }, { "content": " fn to_string(&self) -> String {\n\n let code = match self {\n\n CertValidationStatus::Success(x) => x,\n\n CertValidationStatus::Failed(x) => x,\n\n };\n\n\n\n unsafe {\n\n let result_str = botan_x509_cert_validation_status(*code);\n\n\n\n let cstr = CStr::from_ptr(result_str);\n\n cstr.to_str().unwrap().to_owned()\n\n }\n\n }\n\n}\n\n\n\nimpl Certificate {\n\n pub(crate) fn handle(&self) -> botan_x509_cert_t {\n\n self.obj\n\n }\n\n\n", "file_path": "botan/src/x509.rs", "rank": 78, "score": 9.953881731175471 }, { "content": " /// Crate a new MPI duplicating the value of self\n\n pub fn duplicate(&self) -> Result<MPI> {\n\n let mpi = MPI::new()?;\n\n botan_call!(botan_mp_set_from_mp, mpi.obj, self.obj)?;\n\n Ok(mpi)\n\n }\n\n\n\n /// Set self to value specified with an i32\n\n pub fn set_i32(&mut self, val: i32) -> Result<()> {\n\n botan_call!(botan_mp_set_from_int, self.obj, val)\n\n }\n\n\n\n /// Set self to value specified with a string\n\n pub fn set_str(&mut self, val: &str) -> Result<()> {\n\n let cstr = make_cstr(val)?;\n\n botan_call!(botan_mp_set_from_str, self.obj, cstr.as_ptr())\n\n }\n\n\n\n /// Set self to value specified with an array of bytes (big-endian)\n\n pub fn set_bytes(&mut self, val: &[u8]) -> Result<()> {\n", "file_path": "botan/src/mp.rs", "rank": 79, "score": 9.749042532927554 }, { "content": "pub use crate::mp::*;\n\npub use crate::rng::*;\n\npub use crate::utils::*;\n\npub use bcrypt::*;\n\npub use block::*;\n\npub use cipher::*;\n\npub use fpe::*;\n\npub use hash::*;\n\npub use kdf::*;\n\npub use keywrap::*;\n\npub use mac::*;\n\npub use memutils::*;\n\npub use otp::*;\n\npub use pbkdf::*;\n\npub use pk_ops::*;\n\npub use pubkey::*;\n\npub use version::*;\n\npub use x509::*;\n", "file_path": "botan/src/lib.rs", "rank": 81, "score": 9.571791823555834 }, { "content": "\n\nbotan_impl_drop!(Privkey, botan_privkey_destroy);\n\n\n\nimpl Privkey {\n\n pub(crate) fn handle(&self) -> botan_privkey_t {\n\n self.obj\n\n }\n\n\n\n /// Create a new private key\n\n ///\n\n pub fn create(alg: &str, params: &str, rng: &mut RandomNumberGenerator) -> Result<Privkey> {\n\n let obj = botan_init!(\n\n botan_privkey_create,\n\n make_cstr(alg)?.as_ptr(),\n\n make_cstr(params)?.as_ptr(),\n\n rng.handle()\n\n )?;\n\n\n\n Ok(Privkey { obj })\n\n }\n", "file_path": "botan/src/pubkey.rs", "rank": 82, "score": 9.439301947717638 }, { "content": "use crate::utils::*;\n\nuse botan_sys::*;\n\n\n\nuse crate::rng::RandomNumberGenerator;\n\n\n\nuse core::cmp::{Eq, Ord, Ordering};\n\nuse core::fmt;\n\nuse core::str::FromStr;\n\n\n\nuse core::ops::{\n\n Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Rem, RemAssign, Shl, ShlAssign, Shr,\n\n ShrAssign, Sub, SubAssign,\n\n};\n\n\n\n/// A big integer type\n\n#[allow(clippy::upper_case_acronyms)]\n\npub struct MPI {\n\n obj: botan_mp_t,\n\n}\n\n\n", "file_path": "botan/src/mp.rs", "rank": 83, "score": 9.19287837419099 }, { "content": " /// PEM encode the key (encrypted), specifying cipher/hash options\n\n pub fn pem_encode_encrypted_with_options(\n\n &self,\n\n passphrase: &str,\n\n cipher: &str,\n\n pbkdf: &str,\n\n pbkdf_iter: usize,\n\n rng: &mut RandomNumberGenerator,\n\n ) -> Result<String> {\n\n let pem_len = 4096; // fixme\n\n\n\n let passphrase = make_cstr(passphrase)?;\n\n let cipher = make_cstr(cipher)?;\n\n let pbkdf = make_cstr(pbkdf)?;\n\n\n\n call_botan_ffi_returning_string(pem_len, &|out_buf, out_len| unsafe {\n\n botan_privkey_export_encrypted_pbkdf_iter(\n\n self.obj,\n\n out_buf,\n\n out_len,\n", "file_path": "botan/src/pubkey.rs", "rank": 84, "score": 8.984070622724943 }, { "content": "\n\nimpl Version {\n\n /// Read the version information of the currently linked lib\n\n pub fn current() -> Result<Version> {\n\n unsafe {\n\n let version_str = CStr::from_ptr(botan_version_string())\n\n .to_str()\n\n .map_err(Error::conversion_error)?;\n\n\n\n Ok(Version {\n\n major: botan_version_major(),\n\n minor: botan_version_minor(),\n\n patch: botan_version_patch(),\n\n release_date: botan_version_datestamp(),\n\n ffi_api: botan_ffi_api_version(),\n\n string: version_str.to_string(),\n\n })\n\n }\n\n }\n\n\n", "file_path": "botan/src/version.rs", "rank": 85, "score": 8.97642034348269 }, { "content": " r => Err(Error::with_message(\n\n ErrorType::ConversionError,\n\n format!(\"Unexpected botan_mp_cmp result {}\", r),\n\n )),\n\n }\n\n }\n\n\n\n /// Flip the sign of self\n\n pub fn flip_sign(&mut self) -> Result<()> {\n\n botan_call!(botan_mp_flip_sign, self.obj)\n\n }\n\n\n\n /// Addition operator\n\n pub fn mp_add(&self, other: &MPI) -> Result<MPI> {\n\n let r = MPI::new()?;\n\n botan_call!(botan_mp_add, r.obj, self.obj, other.obj)?;\n\n Ok(r)\n\n }\n\n\n\n /// Addition operator, assignment version\n", "file_path": "botan/src/mp.rs", "rank": 86, "score": 8.928640992469633 }, { "content": " Ok(out)\n\n }\n\n\n\n /// Encrypt a message using the specified padding method\n\n pub fn encrypt(\n\n &self,\n\n message: &[u8],\n\n padding: &str,\n\n rng: &mut RandomNumberGenerator,\n\n ) -> Result<Vec<u8>> {\n\n let mut op = Encryptor::new(self, padding)?;\n\n op.encrypt(message, rng)\n\n }\n\n\n\n /// Verify a message that was signed using the specified padding method\n\n pub fn verify(&self, message: &[u8], signature: &[u8], padding: &str) -> Result<bool> {\n\n let mut op = Verifier::new(self, padding)?;\n\n op.update(message)?;\n\n op.finish(signature)\n\n }\n\n}\n\n\n\n/// Return the identifier used for PKCS1 v1.5 signatures for the specified hash\n", "file_path": "botan/src/pubkey.rs", "rank": 87, "score": 8.894984359087807 }, { "content": "\n\n /// Sign a message using the specified padding method\n\n pub fn sign(\n\n &self,\n\n message: &[u8],\n\n padding: &str,\n\n rng: &mut RandomNumberGenerator,\n\n ) -> Result<Vec<u8>> {\n\n let mut signer = Signer::new(self, padding)?;\n\n signer.update(message)?;\n\n signer.finish(rng)\n\n }\n\n\n\n /// Decrypt a message that was encrypted using the specified padding method\n\n pub fn decrypt(&self, ctext: &[u8], padding: &str) -> Result<Vec<u8>> {\n\n let mut decryptor = Decryptor::new(self, padding)?;\n\n decryptor.decrypt(ctext)\n\n }\n\n\n\n /// Perform key agreement\n", "file_path": "botan/src/pubkey.rs", "rank": 88, "score": 8.773654389002365 }, { "content": "\n\n /// Load PEM string as an unencrypted PKCS#8 private key\n\n pub fn load_pem(pem: &str) -> Result<Privkey> {\n\n let cpem = make_cstr(pem)?;\n\n let obj = botan_init!(\n\n botan_privkey_load,\n\n ptr::null_mut(),\n\n cpem.as_ptr() as *const u8,\n\n pem.len(),\n\n ptr::null()\n\n )?;\n\n\n\n Ok(Privkey { obj })\n\n }\n\n\n\n /// Load DER bytes as an encrypted PKCS#8 private key\n\n pub fn load_encrypted_der(der: &[u8], passphrase: &str) -> Result<Privkey> {\n\n let passphrase = make_cstr(passphrase)?;\n\n let obj = botan_init!(\n\n botan_privkey_load,\n", "file_path": "botan/src/pubkey.rs", "rank": 89, "score": 8.725914188623111 }, { "content": " /// ```\n\n pub fn is_prime(&self, rng: &mut RandomNumberGenerator, test_prob: usize) -> Result<bool> {\n\n botan_bool_in_rc!(botan_mp_is_prime, self.obj, rng.handle(), test_prob)\n\n }\n\n\n\n /// Return the greatest common divisor of x and y\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use core::str::FromStr;\n\n /// let x = botan::MPI::from_str(\"1111111111111111\").unwrap();\n\n /// let y = botan::MPI::from_str(\"111111111111\").unwrap();\n\n /// assert_eq!(botan::MPI::gcd(&x, &y).unwrap(), botan::MPI::from_str(\"1111\").unwrap());\n\n /// ```\n\n pub fn gcd(x: &MPI, y: &MPI) -> Result<MPI> {\n\n let r = MPI::new()?;\n\n botan_call!(botan_mp_gcd, r.obj, x.obj, y.obj)?;\n\n Ok(r)\n\n }\n\n\n", "file_path": "botan/src/mp.rs", "rank": 90, "score": 8.538331679982752 }, { "content": "\n\n /// Load an RSA private key (p,q,e)\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use std::str::FromStr;\n\n /// let p = botan::MPI::from_str(\"289698020102256958291511331409682926199\").unwrap();\n\n /// let q = botan::MPI::from_str(\"293497288893125842977275290547344412783\").unwrap();\n\n /// let e = botan::MPI::from_str(\"65537\").unwrap();\n\n /// let rsa = botan::Privkey::load_rsa(&p, &q, &e).unwrap();\n\n /// ```\n\n pub fn load_rsa(p: &MPI, q: &MPI, e: &MPI) -> Result<Privkey> {\n\n let obj = botan_init!(botan_privkey_load_rsa, p.handle(), q.handle(), e.handle())?;\n\n Ok(Privkey { obj })\n\n }\n\n\n\n /// Load an Ed25519 private key\n\n ///\n\n /// # Examples\n", "file_path": "botan/src/pubkey.rs", "rank": 92, "score": 8.48948768173775 }, { "content": " ptr::null_mut(),\n\n der.as_ptr(),\n\n der.len(),\n\n passphrase.as_ptr()\n\n )?;\n\n Ok(Privkey { obj })\n\n }\n\n\n\n /// Load PEM string as an encrypted PKCS#8 private key\n\n pub fn load_encrypted_pem(pem: &str, passphrase: &str) -> Result<Privkey> {\n\n let passphrase = make_cstr(passphrase)?;\n\n let cpem = make_cstr(pem)?;\n\n let obj = botan_init!(\n\n botan_privkey_load,\n\n ptr::null_mut(),\n\n cpem.as_ptr() as *const u8,\n\n pem.len(),\n\n passphrase.as_ptr()\n\n )?;\n\n\n", "file_path": "botan/src/pubkey.rs", "rank": 93, "score": 8.468520855897419 }, { "content": " /// Decrypt in place\n\n ///\n\n /// # Errors\n\n ///\n\n /// Fails if the input is not a multiple of the block size, or if the\n\n /// key was not set on the object.\n\n pub fn decrypt_in_place(&self, buf: &mut [u8]) -> Result<()> {\n\n if buf.len() % self.block_size != 0 {\n\n return Err(Error::with_message(\n\n ErrorType::InvalidInput,\n\n \"Invalid input size\".to_string(),\n\n ));\n\n }\n\n\n\n let blocks = buf.len() / self.block_size;\n\n\n\n botan_call!(\n\n botan_block_cipher_decrypt_blocks,\n\n self.obj,\n\n buf.as_ptr(),\n", "file_path": "botan/src/block.rs", "rank": 94, "score": 8.270526392643777 }, { "content": " Err(Error::from_rc(rc))\n\n }\n\n }\n\n\n\n /// Return hash of the public key data\n\n pub fn fingerprint(&self, hash: &str) -> Result<String> {\n\n let hash = make_cstr(hash)?;\n\n let fprint_len = 64; // hashes > 512 bits are rare\n\n call_botan_ffi_returning_string(fprint_len, &|out_buf, out_len| unsafe {\n\n botan_pubkey_fingerprint(self.obj, hash.as_ptr(), out_buf, out_len)\n\n })\n\n }\n\n\n\n /// DER encode this public key\n\n pub fn der_encode(&self) -> Result<Vec<u8>> {\n\n let der_len = 4096; // fixme\n\n call_botan_ffi_returning_vec_u8(der_len, &|out_buf, out_len| unsafe {\n\n botan_pubkey_export(self.obj, out_buf, out_len, 0u32)\n\n })\n\n }\n", "file_path": "botan/src/pubkey.rs", "rank": 95, "score": 8.245825048535357 }, { "content": "extern crate botan;\n\n\n\nuse std::str::FromStr;\n\n\n\n#[test]\n", "file_path": "botan/tests/tests.rs", "rank": 96, "score": 8.196574222505877 }, { "content": "use cty::{c_char, c_int};\n\n\n\nuse rng::botan_rng_t;\n\n\n\npub enum botan_mp_struct {}\n\npub type botan_mp_t = *mut botan_mp_struct;\n\n\n\nextern \"C\" {\n\n\n\n pub fn botan_mp_init(mp: *mut botan_mp_t) -> c_int;\n\n pub fn botan_mp_destroy(mp: botan_mp_t) -> c_int;\n\n pub fn botan_mp_to_hex(mp: botan_mp_t, out: *mut c_char) -> c_int;\n\n pub fn botan_mp_to_str(\n\n mp: botan_mp_t,\n\n base: u8,\n\n out: *mut c_char,\n\n out_len: *mut usize,\n\n ) -> c_int;\n\n pub fn botan_mp_clear(mp: botan_mp_t) -> c_int;\n\n pub fn botan_mp_set_from_int(mp: botan_mp_t, initial_value: c_int) -> c_int;\n", "file_path": "botan-sys/src/mp.rs", "rank": 97, "score": 8.180743205958844 }, { "content": " /// assert!(cipher.encrypt_blocks(&vec![0; 17]).is_err());\n\n /// // Key is set and multiple of block size - ok\n\n /// assert!(cipher.encrypt_blocks(&vec![0; 16]).is_ok());\n\n /// ```\n\n pub fn encrypt_blocks(&self, input: &[u8]) -> Result<Vec<u8>> {\n\n let mut ivec = input.to_vec();\n\n self.encrypt_in_place(&mut ivec)?;\n\n Ok(ivec)\n\n }\n\n\n\n /// Encrypt in place\n\n ///\n\n /// # Errors\n\n ///\n\n /// Fails if the input is not a multiple of the block size, or if the\n\n /// key was not set on the object.\n\n pub fn encrypt_in_place(&self, buf: &mut [u8]) -> Result<()> {\n\n if buf.len() % self.block_size != 0 {\n\n return Err(Error::with_message(\n\n ErrorType::InvalidInput,\n", "file_path": "botan/src/block.rs", "rank": 98, "score": 8.157062970967914 }, { "content": " /// # Examples\n\n ///\n\n /// ```\n\n /// let cipher = botan::BlockCipher::new(\"AES-128\");\n\n /// assert!(cipher.is_ok());\n\n /// let no_such_cipher = botan::BlockCipher::new(\"SuperCipher9000\");\n\n /// assert!(no_such_cipher.is_err());\n\n /// ```\n\n pub fn new(name: &str) -> Result<BlockCipher> {\n\n let obj = botan_init!(botan_block_cipher_init, make_cstr(name)?.as_ptr())?;\n\n\n\n let block_size = {\n\n let rc = unsafe { botan_block_cipher_block_size(obj) };\n\n if rc < 0 {\n\n return Err(Error::from_rc(rc));\n\n }\n\n rc as usize\n\n };\n\n\n\n let (min_keylen, max_keylen, mod_keylen) =\n", "file_path": "botan/src/block.rs", "rank": 99, "score": 8.061866051408854 } ]
Rust
src/ledc/lstimer3_conf.rs
ForsakenHarmony/esp32c3-pac
7d9eb9a5b5a51077d1d1eb6c6efd186064b7149b
#[doc = "Reader of register LSTIMER3_CONF"] pub type R = crate::R<u32, super::LSTIMER3_CONF>; #[doc = "Writer for register LSTIMER3_CONF"] pub type W = crate::W<u32, super::LSTIMER3_CONF>; #[doc = "Register LSTIMER3_CONF `reset()`'s with value 0"] impl crate::ResetValue for super::LSTIMER3_CONF { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Write proxy for field `LSTIMER3_PARA_UP`"] pub struct LSTIMER3_PARA_UP_W<'a> { w: &'a mut W, } impl<'a> LSTIMER3_PARA_UP_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25); self.w } } #[doc = "Reader of field `TICK_SEL_LSTIMER3`"] pub type TICK_SEL_LSTIMER3_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TICK_SEL_LSTIMER3`"] pub struct TICK_SEL_LSTIMER3_W<'a> { w: &'a mut W, } impl<'a> TICK_SEL_LSTIMER3_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24); self.w } } #[doc = "Reader of field `LSTIMER3_RST`"] pub type LSTIMER3_RST_R = crate::R<bool, bool>; #[doc = "Write proxy for field `LSTIMER3_RST`"] pub struct LSTIMER3_RST_W<'a> { w: &'a mut W, } impl<'a> LSTIMER3_RST_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 23)) | (((value as u32) & 0x01) << 23); self.w } } #[doc = "Reader of field `LSTIMER3_PAUSE`"] pub type LSTIMER3_PAUSE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `LSTIMER3_PAUSE`"] pub struct LSTIMER3_PAUSE_W<'a> { w: &'a mut W, } impl<'a> LSTIMER3_PAUSE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 22)) | (((value as u32) & 0x01) << 22); self.w } } #[doc = "Reader of field `CLK_DIV_LSTIMER3`"] pub type CLK_DIV_LSTIMER3_R = crate::R<u32, u32>; #[doc = "Write proxy for field `CLK_DIV_LSTIMER3`"] pub struct CLK_DIV_LSTIMER3_W<'a> { w: &'a mut W, } impl<'a> CLK_DIV_LSTIMER3_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0003_ffff << 4)) | (((value as u32) & 0x0003_ffff) << 4); self.w } } #[doc = "Reader of field `LSTIMER3_DUTY_RES`"] pub type LSTIMER3_DUTY_RES_R = crate::R<u8, u8>; #[doc = "Write proxy for field `LSTIMER3_DUTY_RES`"] pub struct LSTIMER3_DUTY_RES_W<'a> { w: &'a mut W, } impl<'a> LSTIMER3_DUTY_RES_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f); self.w } } impl R { #[doc = "Bit 24"] #[inline(always)] pub fn tick_sel_lstimer3(&self) -> TICK_SEL_LSTIMER3_R { TICK_SEL_LSTIMER3_R::new(((self.bits >> 24) & 0x01) != 0) } #[doc = "Bit 23"] #[inline(always)] pub fn lstimer3_rst(&self) -> LSTIMER3_RST_R { LSTIMER3_RST_R::new(((self.bits >> 23) & 0x01) != 0) } #[doc = "Bit 22"] #[inline(always)] pub fn lstimer3_pause(&self) -> LSTIMER3_PAUSE_R { LSTIMER3_PAUSE_R::new(((self.bits >> 22) & 0x01) != 0) } #[doc = "Bits 4:21"] #[inline(always)] pub fn clk_div_lstimer3(&self) -> CLK_DIV_LSTIMER3_R { CLK_DIV_LSTIMER3_R::new(((self.bits >> 4) & 0x0003_ffff) as u32) } #[doc = "Bits 0:3"] #[inline(always)] pub fn lstimer3_duty_res(&self) -> LSTIMER3_DUTY_RES_R { LSTIMER3_DUTY_RES_R::new((self.bits & 0x0f) as u8) } } impl W { #[doc = "Bit 25"] #[inline(always)] pub fn lstimer3_para_up(&mut self) -> LSTIMER3_PARA_UP_W { LSTIMER3_PARA_UP_W { w: self } } #[doc = "Bit 24"] #[inline(always)] pub fn tick_sel_lstimer3(&mut self) -> TICK_SEL_LSTIMER3_W { TICK_SEL_LSTIMER3_W { w: self } } #[doc = "Bit 23"] #[inline(always)] pub fn lstimer3_rst(&mut self) -> LSTIMER3_RST_W { LSTIMER3_RST_W { w: self } } #[doc = "Bit 22"] #[inline(always)] pub fn lstimer3_pause(&mut self) -> LSTIMER3_PAUSE_W { LSTIMER3_PAUSE_W { w: self } } #[doc = "Bits 4:21"] #[inline(always)] pub fn clk_div_lstimer3(&mut self) -> CLK_DIV_LSTIMER3_W { CLK_DIV_LSTIMER3_W { w: self } } #[doc = "Bits 0:3"] #[inline(always)] pub fn lstimer3_duty_res(&mut self) -> LSTIMER3_DUTY_RES_W { LSTIMER3_DUTY_RES_W { w: self } } }
#[doc = "Reader of register LSTIMER3_CONF"] pub type R = crate::R<u32, super::LSTIMER3_CONF>; #[doc = "Writer for register LSTIMER3_CONF"] pub type W = crate::W<u32, super::LSTIMER3_CONF>; #[doc = "Register LSTIMER3_CONF `reset()`'s with value 0"] impl crate::ResetValue for super::LSTIMER3_CONF { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Write proxy for field `LSTIMER3_PARA_UP`"] pub struct LSTIMER3_PARA_UP_W<'a> { w: &'a mut W, } impl<'a> LSTIMER3_PARA_UP_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn
[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 22)) | (((value as u32) & 0x01) << 22); self.w } } #[doc = "Reader of field `CLK_DIV_LSTIMER3`"] pub type CLK_DIV_LSTIMER3_R = crate::R<u32, u32>; #[doc = "Write proxy for field `CLK_DIV_LSTIMER3`"] pub struct CLK_DIV_LSTIMER3_W<'a> { w: &'a mut W, } impl<'a> CLK_DIV_LSTIMER3_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0003_ffff << 4)) | (((value as u32) & 0x0003_ffff) << 4); self.w } } #[doc = "Reader of field `LSTIMER3_DUTY_RES`"] pub type LSTIMER3_DUTY_RES_R = crate::R<u8, u8>; #[doc = "Write proxy for field `LSTIMER3_DUTY_RES`"] pub struct LSTIMER3_DUTY_RES_W<'a> { w: &'a mut W, } impl<'a> LSTIMER3_DUTY_RES_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f); self.w } } impl R { #[doc = "Bit 24"] #[inline(always)] pub fn tick_sel_lstimer3(&self) -> TICK_SEL_LSTIMER3_R { TICK_SEL_LSTIMER3_R::new(((self.bits >> 24) & 0x01) != 0) } #[doc = "Bit 23"] #[inline(always)] pub fn lstimer3_rst(&self) -> LSTIMER3_RST_R { LSTIMER3_RST_R::new(((self.bits >> 23) & 0x01) != 0) } #[doc = "Bit 22"] #[inline(always)] pub fn lstimer3_pause(&self) -> LSTIMER3_PAUSE_R { LSTIMER3_PAUSE_R::new(((self.bits >> 22) & 0x01) != 0) } #[doc = "Bits 4:21"] #[inline(always)] pub fn clk_div_lstimer3(&self) -> CLK_DIV_LSTIMER3_R { CLK_DIV_LSTIMER3_R::new(((self.bits >> 4) & 0x0003_ffff) as u32) } #[doc = "Bits 0:3"] #[inline(always)] pub fn lstimer3_duty_res(&self) -> LSTIMER3_DUTY_RES_R { LSTIMER3_DUTY_RES_R::new((self.bits & 0x0f) as u8) } } impl W { #[doc = "Bit 25"] #[inline(always)] pub fn lstimer3_para_up(&mut self) -> LSTIMER3_PARA_UP_W { LSTIMER3_PARA_UP_W { w: self } } #[doc = "Bit 24"] #[inline(always)] pub fn tick_sel_lstimer3(&mut self) -> TICK_SEL_LSTIMER3_W { TICK_SEL_LSTIMER3_W { w: self } } #[doc = "Bit 23"] #[inline(always)] pub fn lstimer3_rst(&mut self) -> LSTIMER3_RST_W { LSTIMER3_RST_W { w: self } } #[doc = "Bit 22"] #[inline(always)] pub fn lstimer3_pause(&mut self) -> LSTIMER3_PAUSE_W { LSTIMER3_PAUSE_W { w: self } } #[doc = "Bits 4:21"] #[inline(always)] pub fn clk_div_lstimer3(&mut self) -> CLK_DIV_LSTIMER3_W { CLK_DIV_LSTIMER3_W { w: self } } #[doc = "Bits 0:3"] #[inline(always)] pub fn lstimer3_duty_res(&mut self) -> LSTIMER3_DUTY_RES_W { LSTIMER3_DUTY_RES_W { w: self } } }
bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25); self.w } } #[doc = "Reader of field `TICK_SEL_LSTIMER3`"] pub type TICK_SEL_LSTIMER3_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TICK_SEL_LSTIMER3`"] pub struct TICK_SEL_LSTIMER3_W<'a> { w: &'a mut W, } impl<'a> TICK_SEL_LSTIMER3_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24); self.w } } #[doc = "Reader of field `LSTIMER3_RST`"] pub type LSTIMER3_RST_R = crate::R<bool, bool>; #[doc = "Write proxy for field `LSTIMER3_RST`"] pub struct LSTIMER3_RST_W<'a> { w: &'a mut W, } impl<'a> LSTIMER3_RST_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 23)) | (((value as u32) & 0x01) << 23); self.w } } #[doc = "Reader of field `LSTIMER3_PAUSE`"] pub type LSTIMER3_PAUSE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `LSTIMER3_PAUSE`"] pub struct LSTIMER3_PAUSE_W<'a> { w: &'a mut W, } impl<'a> LSTIMER3_PAUSE_W<'a> { #[doc = r"Sets the field bit"] #
random
[ { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "src/generic.rs", "rank": 0, "score": 153083.51901650874 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "build.rs", "rank": 1, "score": 58583.6630925218 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "src/generic.rs", "rank": 2, "score": 53767.823776449186 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "src/generic.rs", "rank": 3, "score": 53756.44927678981 }, { "content": "#[doc = \"Reader of register RESET_STATE\"]\n\npub type R = crate::R<u32, super::RESET_STATE>;\n\n#[doc = \"Writer for register RESET_STATE\"]\n\npub type W = crate::W<u32, super::RESET_STATE>;\n\n#[doc = \"Register RESET_STATE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::RESET_STATE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DRESET_MASK_PROCPU`\"]\n\npub type DRESET_MASK_PROCPU_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `DRESET_MASK_PROCPU`\"]\n\npub struct DRESET_MASK_PROCPU_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DRESET_MASK_PROCPU_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 4, "score": 52553.76983904776 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `OCD_HALT_ON_RESET_APPCPU`\"]\n\npub type OCD_HALT_ON_RESET_APPCPU_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OCD_HALT_ON_RESET_APPCPU`\"]\n\npub struct OCD_HALT_ON_RESET_APPCPU_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OCD_HALT_ON_RESET_APPCPU_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 5, "score": 52543.79243576746 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RESET_CAUSE_APPCPU`\"]\n\npub type RESET_CAUSE_APPCPU_R = crate::R<u8, u8>;\n\n#[doc = \"Reader of field `RESET_CAUSE_PROCPU`\"]\n\npub type RESET_CAUSE_PROCPU_R = crate::R<u8, u8>;\n\nimpl R {\n\n #[doc = \"Bit 25\"]\n\n #[inline(always)]\n\n pub fn dreset_mask_procpu(&self) -> DRESET_MASK_PROCPU_R {\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 6, "score": 52539.4226859637 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `STAT_VECTOR_SEL_APPCPU`\"]\n\npub type STAT_VECTOR_SEL_APPCPU_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `STAT_VECTOR_SEL_APPCPU`\"]\n\npub struct STAT_VECTOR_SEL_APPCPU_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> STAT_VECTOR_SEL_APPCPU_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 7, "score": 52537.54650596362 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 22)) | (((value as u32) & 0x01) << 22);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `JTAG_RESET_FLAG_APPCPU`\"]\n\npub type JTAG_RESET_FLAG_APPCPU_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `JTAG_RESET_FLAG_PROCPU`\"]\n\npub type JTAG_RESET_FLAG_PROCPU_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `OCD_HALT_ON_RESET_PROCPU`\"]\n\npub type OCD_HALT_ON_RESET_PROCPU_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OCD_HALT_ON_RESET_PROCPU`\"]\n\npub struct OCD_HALT_ON_RESET_PROCPU_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OCD_HALT_ON_RESET_PROCPU_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 8, "score": 52537.53629328451 }, { "content": "impl<'a> ALL_RESET_FLAG_CLR_APPCPU_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ALL_RESET_FLAG_CLR_PROCPU`\"]\n\npub struct ALL_RESET_FLAG_CLR_PROCPU_W<'a> {\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 9, "score": 52537.00239890746 }, { "content": "#[doc = \"Write proxy for field `JTAG_RESET_FLAG_CLR_APPCPU`\"]\n\npub struct JTAG_RESET_FLAG_CLR_APPCPU_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> JTAG_RESET_FLAG_CLR_APPCPU_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 23)) | (((value as u32) & 0x01) << 23);\n\n self.w\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 10, "score": 52536.60390921234 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `DRESET_MASK_APPCPU`\"]\n\npub type DRESET_MASK_APPCPU_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `DRESET_MASK_APPCPU`\"]\n\npub struct DRESET_MASK_APPCPU_W<'a> {\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 11, "score": 52534.930134467686 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ALL_RESET_FLAG_CLR_APPCPU`\"]\n\npub struct ALL_RESET_FLAG_CLR_APPCPU_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 12, "score": 52534.5268334017 }, { "content": "#[doc = \"Reader of register LSTIMER3_VALUE\"]\n\npub type R = crate::R<u32, super::LSTIMER3_VALUE>;\n\n#[doc = \"Reader of field `LSTIMER3_CNT`\"]\n\npub type LSTIMER3_CNT_R = crate::R<u16, u16>;\n\nimpl R {\n\n #[doc = \"Bits 0:13\"]\n\n #[inline(always)]\n\n pub fn lstimer3_cnt(&self) -> LSTIMER3_CNT_R {\n\n LSTIMER3_CNT_R::new((self.bits & 0x3fff) as u16)\n\n }\n\n}\n", "file_path": "src/ledc/lstimer3_value.rs", "rank": 13, "score": 52533.81184300065 }, { "content": "#[doc = \"Reader of register LSTIMER1_VALUE\"]\n\npub type R = crate::R<u32, super::LSTIMER1_VALUE>;\n\n#[doc = \"Reader of field `LSTIMER1_CNT`\"]\n\npub type LSTIMER1_CNT_R = crate::R<u16, u16>;\n\nimpl R {\n\n #[doc = \"Bits 0:13\"]\n\n #[inline(always)]\n\n pub fn lstimer1_cnt(&self) -> LSTIMER1_CNT_R {\n\n LSTIMER1_CNT_R::new((self.bits & 0x3fff) as u16)\n\n }\n\n}\n", "file_path": "src/ledc/lstimer1_value.rs", "rank": 14, "score": 52533.81184300065 }, { "content": "#[doc = \"Reader of register LSTIMER2_VALUE\"]\n\npub type R = crate::R<u32, super::LSTIMER2_VALUE>;\n\n#[doc = \"Reader of field `LSTIMER2_CNT`\"]\n\npub type LSTIMER2_CNT_R = crate::R<u16, u16>;\n\nimpl R {\n\n #[doc = \"Bits 0:13\"]\n\n #[inline(always)]\n\n pub fn lstimer2_cnt(&self) -> LSTIMER2_CNT_R {\n\n LSTIMER2_CNT_R::new((self.bits & 0x3fff) as u16)\n\n }\n\n}\n", "file_path": "src/ledc/lstimer2_value.rs", "rank": 15, "score": 52533.81184300065 }, { "content": "#[doc = \"Reader of register LSTIMER0_VALUE\"]\n\npub type R = crate::R<u32, super::LSTIMER0_VALUE>;\n\n#[doc = \"Reader of field `LSTIMER0_CNT`\"]\n\npub type LSTIMER0_CNT_R = crate::R<u16, u16>;\n\nimpl R {\n\n #[doc = \"Bits 0:13\"]\n\n #[inline(always)]\n\n pub fn lstimer0_cnt(&self) -> LSTIMER0_CNT_R {\n\n LSTIMER0_CNT_R::new((self.bits & 0x3fff) as u16)\n\n }\n\n}\n", "file_path": "src/ledc/lstimer0_value.rs", "rank": 16, "score": 52533.81184300065 }, { "content": " }\n\n}\n\n#[doc = \"Write proxy for field `JTAG_RESET_FLAG_CLR_PROCPU`\"]\n\npub struct JTAG_RESET_FLAG_CLR_PROCPU_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> JTAG_RESET_FLAG_CLR_PROCPU_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 17, "score": 52532.78007568858 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> ALL_RESET_FLAG_CLR_PROCPU_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);\n\n self.w\n\n }\n\n}\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 18, "score": 52532.10544363614 }, { "content": "#[doc = \"Reader of field `ALL_RESET_FLAG_APPCPU`\"]\n\npub type ALL_RESET_FLAG_APPCPU_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `ALL_RESET_FLAG_PROCPU`\"]\n\npub type ALL_RESET_FLAG_PROCPU_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `STAT_VECTOR_SEL_PROCPU`\"]\n\npub type STAT_VECTOR_SEL_PROCPU_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `STAT_VECTOR_SEL_PROCPU`\"]\n\npub struct STAT_VECTOR_SEL_PROCPU_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> STAT_VECTOR_SEL_PROCPU_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 19, "score": 52531.37456772528 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> DRESET_MASK_APPCPU_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24);\n\n self.w\n\n }\n\n}\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 20, "score": 52528.04757901661 }, { "content": " STAT_VECTOR_SEL_PROCPU_R::new(((self.bits >> 13) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 12\"]\n\n #[inline(always)]\n\n pub fn stat_vector_sel_appcpu(&self) -> STAT_VECTOR_SEL_APPCPU_R {\n\n STAT_VECTOR_SEL_APPCPU_R::new(((self.bits >> 12) & 0x01) != 0)\n\n }\n\n #[doc = \"Bits 6:11\"]\n\n #[inline(always)]\n\n pub fn reset_cause_appcpu(&self) -> RESET_CAUSE_APPCPU_R {\n\n RESET_CAUSE_APPCPU_R::new(((self.bits >> 6) & 0x3f) as u8)\n\n }\n\n #[doc = \"Bits 0:5\"]\n\n #[inline(always)]\n\n pub fn reset_cause_procpu(&self) -> RESET_CAUSE_PROCPU_R {\n\n RESET_CAUSE_PROCPU_R::new((self.bits & 0x3f) as u8)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bit 25\"]\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 21, "score": 52514.47528120327 }, { "content": " #[inline(always)]\n\n pub fn ocd_halt_on_reset_procpu(&mut self) -> OCD_HALT_ON_RESET_PROCPU_W {\n\n OCD_HALT_ON_RESET_PROCPU_W { w: self }\n\n }\n\n #[doc = \"Bit 18\"]\n\n #[inline(always)]\n\n pub fn ocd_halt_on_reset_appcpu(&mut self) -> OCD_HALT_ON_RESET_APPCPU_W {\n\n OCD_HALT_ON_RESET_APPCPU_W { w: self }\n\n }\n\n #[doc = \"Bit 17\"]\n\n #[inline(always)]\n\n pub fn all_reset_flag_clr_appcpu(&mut self) -> ALL_RESET_FLAG_CLR_APPCPU_W {\n\n ALL_RESET_FLAG_CLR_APPCPU_W { w: self }\n\n }\n\n #[doc = \"Bit 16\"]\n\n #[inline(always)]\n\n pub fn all_reset_flag_clr_procpu(&mut self) -> ALL_RESET_FLAG_CLR_PROCPU_W {\n\n ALL_RESET_FLAG_CLR_PROCPU_W { w: self }\n\n }\n\n #[doc = \"Bit 13\"]\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 22, "score": 52513.55006972652 }, { "content": " #[inline(always)]\n\n pub fn dreset_mask_procpu(&mut self) -> DRESET_MASK_PROCPU_W {\n\n DRESET_MASK_PROCPU_W { w: self }\n\n }\n\n #[doc = \"Bit 24\"]\n\n #[inline(always)]\n\n pub fn dreset_mask_appcpu(&mut self) -> DRESET_MASK_APPCPU_W {\n\n DRESET_MASK_APPCPU_W { w: self }\n\n }\n\n #[doc = \"Bit 23\"]\n\n #[inline(always)]\n\n pub fn jtag_reset_flag_clr_appcpu(&mut self) -> JTAG_RESET_FLAG_CLR_APPCPU_W {\n\n JTAG_RESET_FLAG_CLR_APPCPU_W { w: self }\n\n }\n\n #[doc = \"Bit 22\"]\n\n #[inline(always)]\n\n pub fn jtag_reset_flag_clr_procpu(&mut self) -> JTAG_RESET_FLAG_CLR_PROCPU_W {\n\n JTAG_RESET_FLAG_CLR_PROCPU_W { w: self }\n\n }\n\n #[doc = \"Bit 19\"]\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 23, "score": 52512.83079287005 }, { "content": " OCD_HALT_ON_RESET_PROCPU_R::new(((self.bits >> 19) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 18\"]\n\n #[inline(always)]\n\n pub fn ocd_halt_on_reset_appcpu(&self) -> OCD_HALT_ON_RESET_APPCPU_R {\n\n OCD_HALT_ON_RESET_APPCPU_R::new(((self.bits >> 18) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 15\"]\n\n #[inline(always)]\n\n pub fn all_reset_flag_appcpu(&self) -> ALL_RESET_FLAG_APPCPU_R {\n\n ALL_RESET_FLAG_APPCPU_R::new(((self.bits >> 15) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 14\"]\n\n #[inline(always)]\n\n pub fn all_reset_flag_procpu(&self) -> ALL_RESET_FLAG_PROCPU_R {\n\n ALL_RESET_FLAG_PROCPU_R::new(((self.bits >> 14) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 13\"]\n\n #[inline(always)]\n\n pub fn stat_vector_sel_procpu(&self) -> STAT_VECTOR_SEL_PROCPU_R {\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 24, "score": 52508.77244111089 }, { "content": " DRESET_MASK_PROCPU_R::new(((self.bits >> 25) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 24\"]\n\n #[inline(always)]\n\n pub fn dreset_mask_appcpu(&self) -> DRESET_MASK_APPCPU_R {\n\n DRESET_MASK_APPCPU_R::new(((self.bits >> 24) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 21\"]\n\n #[inline(always)]\n\n pub fn jtag_reset_flag_appcpu(&self) -> JTAG_RESET_FLAG_APPCPU_R {\n\n JTAG_RESET_FLAG_APPCPU_R::new(((self.bits >> 21) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 20\"]\n\n #[inline(always)]\n\n pub fn jtag_reset_flag_procpu(&self) -> JTAG_RESET_FLAG_PROCPU_R {\n\n JTAG_RESET_FLAG_PROCPU_R::new(((self.bits >> 20) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 19\"]\n\n #[inline(always)]\n\n pub fn ocd_halt_on_reset_procpu(&self) -> OCD_HALT_ON_RESET_PROCPU_R {\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 25, "score": 52508.47685936761 }, { "content": " #[inline(always)]\n\n pub fn stat_vector_sel_procpu(&mut self) -> STAT_VECTOR_SEL_PROCPU_W {\n\n STAT_VECTOR_SEL_PROCPU_W { w: self }\n\n }\n\n #[doc = \"Bit 12\"]\n\n #[inline(always)]\n\n pub fn stat_vector_sel_appcpu(&mut self) -> STAT_VECTOR_SEL_APPCPU_W {\n\n STAT_VECTOR_SEL_APPCPU_W { w: self }\n\n }\n\n}\n", "file_path": "src/rtccntl/reset_state.rs", "rank": 26, "score": 52504.01901501371 }, { "content": "#[doc = \"Reader of register INT_RAW\"]\n\npub type R = crate::R<u32, super::INT_RAW>;\n\n#[doc = \"Writer for register INT_RAW\"]\n\npub type W = crate::W<u32, super::INT_RAW>;\n\n#[doc = \"Register INT_RAW `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::INT_RAW {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `WAKEUP_INT_RAW`\"]\n\npub type WAKEUP_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `WAKEUP_INT_RAW`\"]\n\npub struct WAKEUP_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> WAKEUP_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/uart/int_raw.rs", "rank": 27, "score": 52342.56391807873 }, { "content": "#[doc = \"Reader of register INT_RAW\"]\n\npub type R = crate::R<u32, super::INT_RAW>;\n\n#[doc = \"Writer for register INT_RAW\"]\n\npub type W = crate::W<u32, super::INT_RAW>;\n\n#[doc = \"Register INT_RAW `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::INT_RAW {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `GENERAL_CALL_INT_RAW`\"]\n\npub type GENERAL_CALL_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `GENERAL_CALL_INT_RAW`\"]\n\npub struct GENERAL_CALL_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> GENERAL_CALL_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/i2c/int_raw.rs", "rank": 28, "score": 52341.26932567825 }, { "content": "#[doc = \"Reader of register INT_RAW\"]\n\npub type R = crate::R<u32, super::INT_RAW>;\n\n#[doc = \"Writer for register INT_RAW\"]\n\npub type W = crate::W<u32, super::INT_RAW>;\n\n#[doc = \"Register INT_RAW `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::INT_RAW {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `APP_CTRL1_INT_RAW`\"]\n\npub type APP_CTRL1_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `APP_CTRL1_INT_RAW`\"]\n\npub struct APP_CTRL1_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> APP_CTRL1_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/uhci/int_raw.rs", "rank": 29, "score": 52341.26932567825 }, { "content": "#[doc = \"Reader of register INT_RAW\"]\n\npub type R = crate::R<u32, super::INT_RAW>;\n\n#[doc = \"Writer for register INT_RAW\"]\n\npub type W = crate::W<u32, super::INT_RAW>;\n\n#[doc = \"Register INT_RAW `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::INT_RAW {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `CH1_TX_LOOP_INT_RAW`\"]\n\npub type CH1_TX_LOOP_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH1_TX_LOOP_INT_RAW`\"]\n\npub struct CH1_TX_LOOP_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CH1_TX_LOOP_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/rmt/int_raw.rs", "rank": 30, "score": 52340.02688219789 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TIME_OUT_INT_RAW`\"]\n\npub type TIME_OUT_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TIME_OUT_INT_RAW`\"]\n\npub struct TIME_OUT_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TIME_OUT_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/i2c/int_raw.rs", "rank": 31, "score": 52324.910192425894 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `END_DETECT_INT_RAW`\"]\n\npub type END_DETECT_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `END_DETECT_INT_RAW`\"]\n\npub struct END_DETECT_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> END_DETECT_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/i2c/int_raw.rs", "rank": 32, "score": 52323.83631708117 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CH0_ERR_INT_RAW`\"]\n\npub type CH0_ERR_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH0_ERR_INT_RAW`\"]\n\npub struct CH0_ERR_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CH0_ERR_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/rmt/int_raw.rs", "rank": 33, "score": 52323.83631708117 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `DSR_CHG_INT_RAW`\"]\n\npub type DSR_CHG_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `DSR_CHG_INT_RAW`\"]\n\npub struct DSR_CHG_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DSR_CHG_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/uart/int_raw.rs", "rank": 34, "score": 52323.83631708117 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SW_XOFF_INT_RAW`\"]\n\npub type SW_XOFF_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `SW_XOFF_INT_RAW`\"]\n\npub struct SW_XOFF_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SW_XOFF_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/uart/int_raw.rs", "rank": 35, "score": 52323.83631708117 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SCL_ST_TO_INT_RAW`\"]\n\npub type SCL_ST_TO_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `SCL_ST_TO_INT_RAW`\"]\n\npub struct SCL_ST_TO_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SCL_ST_TO_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/i2c/int_raw.rs", "rank": 36, "score": 52323.83631708117 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RXFIFO_FULL_INT_RAW`\"]\n\npub type RXFIFO_FULL_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RXFIFO_FULL_INT_RAW`\"]\n\npub struct RXFIFO_FULL_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RXFIFO_FULL_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/uart/int_raw.rs", "rank": 37, "score": 52323.83631708117 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TRANS_COMPLETE_INT_RAW`\"]\n\npub type TRANS_COMPLETE_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TRANS_COMPLETE_INT_RAW`\"]\n\npub struct TRANS_COMPLETE_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TRANS_COMPLETE_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/i2c/int_raw.rs", "rank": 38, "score": 52323.32979761053 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RXFIFO_UDF_INT_RAW`\"]\n\npub type RXFIFO_UDF_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RXFIFO_UDF_INT_RAW`\"]\n\npub struct RXFIFO_UDF_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RXFIFO_UDF_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/i2c/int_raw.rs", "rank": 39, "score": 52323.32979761053 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RXFIFO_OVF_INT_RAW`\"]\n\npub type RXFIFO_OVF_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RXFIFO_OVF_INT_RAW`\"]\n\npub struct RXFIFO_OVF_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RXFIFO_OVF_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/uart/int_raw.rs", "rank": 40, "score": 52323.32979761053 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RXFIFO_OVF_INT_RAW`\"]\n\npub type RXFIFO_OVF_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RXFIFO_OVF_INT_RAW`\"]\n\npub struct RXFIFO_OVF_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RXFIFO_OVF_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/i2c/int_raw.rs", "rank": 41, "score": 52323.32979761053 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TX_HUNG_INT_RAW`\"]\n\npub type TX_HUNG_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TX_HUNG_INT_RAW`\"]\n\npub struct TX_HUNG_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TX_HUNG_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/uhci/int_raw.rs", "rank": 42, "score": 52323.32979761053 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SW_XON_INT_RAW`\"]\n\npub type SW_XON_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `SW_XON_INT_RAW`\"]\n\npub struct SW_XON_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SW_XON_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/uart/int_raw.rs", "rank": 43, "score": 52323.32979761053 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TX_DONE_INT_RAW`\"]\n\npub type TX_DONE_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TX_DONE_INT_RAW`\"]\n\npub struct TX_DONE_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TX_DONE_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/uart/int_raw.rs", "rank": 44, "score": 52323.32979761053 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RS485_PARITY_ERR_INT_RAW`\"]\n\npub type RS485_PARITY_ERR_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RS485_PARITY_ERR_INT_RAW`\"]\n\npub struct RS485_PARITY_ERR_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RS485_PARITY_ERR_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/uart/int_raw.rs", "rank": 45, "score": 52322.81130667641 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SEND_S_Q_INT_RAW`\"]\n\npub type SEND_S_Q_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `SEND_S_Q_INT_RAW`\"]\n\npub struct SEND_S_Q_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SEND_S_Q_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/uhci/int_raw.rs", "rank": 46, "score": 52322.81130667641 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CH3_RX_END_INT_RAW`\"]\n\npub type CH3_RX_END_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH3_RX_END_INT_RAW`\"]\n\npub struct CH3_RX_END_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CH3_RX_END_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/rmt/int_raw.rs", "rank": 47, "score": 52322.33667171628 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CH1_TX_THR_EVENT_INT_RAW`\"]\n\npub type CH1_TX_THR_EVENT_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH1_TX_THR_EVENT_INT_RAW`\"]\n\npub struct CH1_TX_THR_EVENT_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CH1_TX_THR_EVENT_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/rmt/int_raw.rs", "rank": 48, "score": 52321.83153230491 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CH0_TX_THR_EVENT_INT_RAW`\"]\n\npub type CH0_TX_THR_EVENT_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH0_TX_THR_EVENT_INT_RAW`\"]\n\npub struct CH0_TX_THR_EVENT_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CH0_TX_THR_EVENT_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/rmt/int_raw.rs", "rank": 49, "score": 52321.38663386117 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `GLITCH_DET_INT_RAW`\"]\n\npub type GLITCH_DET_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `GLITCH_DET_INT_RAW`\"]\n\npub struct GLITCH_DET_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> GLITCH_DET_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/uart/int_raw.rs", "rank": 50, "score": 52321.04353693204 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CH1_ERR_INT_RAW`\"]\n\npub type CH1_ERR_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH1_ERR_INT_RAW`\"]\n\npub struct CH1_ERR_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CH1_ERR_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/rmt/int_raw.rs", "rank": 51, "score": 52321.04353693204 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TXFIFO_EMPTY_INT_RAW`\"]\n\npub type TXFIFO_EMPTY_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TXFIFO_EMPTY_INT_RAW`\"]\n\npub struct TXFIFO_EMPTY_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXFIFO_EMPTY_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/uart/int_raw.rs", "rank": 52, "score": 52321.04353693204 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SEND_A_Q_INT_RAW`\"]\n\npub type SEND_A_Q_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `SEND_A_Q_INT_RAW`\"]\n\npub struct SEND_A_Q_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SEND_A_Q_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/uhci/int_raw.rs", "rank": 53, "score": 52321.04353693204 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RX_START_INT_RAW`\"]\n\npub type RX_START_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RX_START_INT_RAW`\"]\n\npub struct RX_START_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RX_START_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/uhci/int_raw.rs", "rank": 54, "score": 52321.04353693204 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CTS_CHG_INT_RAW`\"]\n\npub type CTS_CHG_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CTS_CHG_INT_RAW`\"]\n\npub struct CTS_CHG_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CTS_CHG_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/uart/int_raw.rs", "rank": 55, "score": 52321.04353693204 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TRANS_START_INT_RAW`\"]\n\npub type TRANS_START_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TRANS_START_INT_RAW`\"]\n\npub struct TRANS_START_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TRANS_START_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/i2c/int_raw.rs", "rank": 56, "score": 52321.04353693204 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TXFIFO_OVF_INT_RAW`\"]\n\npub type TXFIFO_OVF_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TXFIFO_OVF_INT_RAW`\"]\n\npub struct TXFIFO_OVF_INT_RAW_W<'a> {\n", "file_path": "src/i2c/int_raw.rs", "rank": 57, "score": 52320.202012592876 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CH3_ERR_INT_RAW`\"]\n\npub type CH3_ERR_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH3_ERR_INT_RAW`\"]\n\npub struct CH3_ERR_INT_RAW_W<'a> {\n", "file_path": "src/rmt/int_raw.rs", "rank": 58, "score": 52320.202012592876 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TXFIFO_WM_INT_RAW`\"]\n\npub type TXFIFO_WM_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TXFIFO_WM_INT_RAW`\"]\n\npub struct TXFIFO_WM_INT_RAW_W<'a> {\n", "file_path": "src/i2c/int_raw.rs", "rank": 59, "score": 52320.202012592876 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SLAVE_STRETCH_INT_RAW`\"]\n\npub type SLAVE_STRETCH_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `SLAVE_STRETCH_INT_RAW`\"]\n\npub struct SLAVE_STRETCH_INT_RAW_W<'a> {\n", "file_path": "src/i2c/int_raw.rs", "rank": 60, "score": 52320.202012592876 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RXFIFO_TOUT_INT_RAW`\"]\n\npub type RXFIFO_TOUT_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RXFIFO_TOUT_INT_RAW`\"]\n\npub struct RXFIFO_TOUT_INT_RAW_W<'a> {\n", "file_path": "src/uart/int_raw.rs", "rank": 61, "score": 52320.202012592876 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `APP_CTRL0_INT_RAW`\"]\n\npub type APP_CTRL0_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `APP_CTRL0_INT_RAW`\"]\n\npub struct APP_CTRL0_INT_RAW_W<'a> {\n", "file_path": "src/uhci/int_raw.rs", "rank": 62, "score": 52320.202012592876 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `FRM_ERR_INT_RAW`\"]\n\npub type FRM_ERR_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `FRM_ERR_INT_RAW`\"]\n\npub struct FRM_ERR_INT_RAW_W<'a> {\n", "file_path": "src/uart/int_raw.rs", "rank": 63, "score": 52320.202012592876 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RX_HUNG_INT_RAW`\"]\n\npub type RX_HUNG_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RX_HUNG_INT_RAW`\"]\n\npub struct RX_HUNG_INT_RAW_W<'a> {\n", "file_path": "src/uhci/int_raw.rs", "rank": 64, "score": 52320.202012592876 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `BYTE_TRANS_DONE_INT_RAW`\"]\n\npub type BYTE_TRANS_DONE_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `BYTE_TRANS_DONE_INT_RAW`\"]\n\npub struct BYTE_TRANS_DONE_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> BYTE_TRANS_DONE_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/i2c/int_raw.rs", "rank": 65, "score": 52320.00871537642 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SCL_MAIN_ST_TO_INT_RAW`\"]\n\npub type SCL_MAIN_ST_TO_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `SCL_MAIN_ST_TO_INT_RAW`\"]\n\npub struct SCL_MAIN_ST_TO_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SCL_MAIN_ST_TO_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/i2c/int_raw.rs", "rank": 66, "score": 52320.00871537642 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CH0_TX_END_INT_RAW`\"]\n\npub type CH0_TX_END_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH0_TX_END_INT_RAW`\"]\n\npub struct CH0_TX_END_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CH0_TX_END_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/rmt/int_raw.rs", "rank": 67, "score": 52320.00871537642 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RS485_FRM_ERR_INT_RAW`\"]\n\npub type RS485_FRM_ERR_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RS485_FRM_ERR_INT_RAW`\"]\n\npub struct RS485_FRM_ERR_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RS485_FRM_ERR_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/uart/int_raw.rs", "rank": 68, "score": 52320.00871537642 }, { "content": "#[doc = \"Reader of field `NACK_INT_RAW`\"]\n\npub type NACK_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `NACK_INT_RAW`\"]\n\npub struct NACK_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> NACK_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/i2c/int_raw.rs", "rank": 69, "score": 52319.78220853642 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CH2_RX_END_INT_RAW`\"]\n\npub type CH2_RX_END_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH2_RX_END_INT_RAW`\"]\n\npub struct CH2_RX_END_INT_RAW_W<'a> {\n", "file_path": "src/rmt/int_raw.rs", "rank": 70, "score": 52319.462191538325 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `MST_TXFIFO_UDF_INT_RAW`\"]\n\npub type MST_TXFIFO_UDF_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `MST_TXFIFO_UDF_INT_RAW`\"]\n\npub struct MST_TXFIFO_UDF_INT_RAW_W<'a> {\n", "file_path": "src/i2c/int_raw.rs", "rank": 71, "score": 52319.462191538325 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CH0_TX_LOOP_INT_RAW`\"]\n\npub type CH0_TX_LOOP_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH0_TX_LOOP_INT_RAW`\"]\n\npub struct CH0_TX_LOOP_INT_RAW_W<'a> {\n", "file_path": "src/rmt/int_raw.rs", "rank": 72, "score": 52319.462191538325 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `AT_CMD_CHAR_DET_INT_RAW`\"]\n\npub type AT_CMD_CHAR_DET_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `AT_CMD_CHAR_DET_INT_RAW`\"]\n\npub struct AT_CMD_CHAR_DET_INT_RAW_W<'a> {\n", "file_path": "src/uart/int_raw.rs", "rank": 73, "score": 52319.462191538325 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CH2_RX_THR_EVENT_INT_RAW`\"]\n\npub type CH2_RX_THR_EVENT_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH2_RX_THR_EVENT_INT_RAW`\"]\n\npub struct CH2_RX_THR_EVENT_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CH2_RX_THR_EVENT_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "src/rmt/int_raw.rs", "rank": 74, "score": 52319.02334423624 }, { "content": "#[doc = \"Reader of field `ARBITRATION_LOST_INT_RAW`\"]\n\npub type ARBITRATION_LOST_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ARBITRATION_LOST_INT_RAW`\"]\n\npub struct ARBITRATION_LOST_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ARBITRATION_LOST_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/i2c/int_raw.rs", "rank": 75, "score": 52318.89117799336 }, { "content": "#[doc = \"Reader of field `RXFIFO_WM_INT_RAW`\"]\n\npub type RXFIFO_WM_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RXFIFO_WM_INT_RAW`\"]\n\npub struct RXFIFO_WM_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RXFIFO_WM_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/i2c/int_raw.rs", "rank": 76, "score": 52318.89117799336 }, { "content": "#[doc = \"Reader of field `BRK_DET_INT_RAW`\"]\n\npub type BRK_DET_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `BRK_DET_INT_RAW`\"]\n\npub struct BRK_DET_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> BRK_DET_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/uart/int_raw.rs", "rank": 77, "score": 52318.89117799336 }, { "content": "#[doc = \"Reader of field `CH2_ERR_INT_RAW`\"]\n\npub type CH2_ERR_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH2_ERR_INT_RAW`\"]\n\npub struct CH2_ERR_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CH2_ERR_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/rmt/int_raw.rs", "rank": 78, "score": 52318.89117799336 }, { "content": "#[doc = \"Reader of field `PARITY_ERR_INT_RAW`\"]\n\npub type PARITY_ERR_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `PARITY_ERR_INT_RAW`\"]\n\npub struct PARITY_ERR_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> PARITY_ERR_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/uart/int_raw.rs", "rank": 79, "score": 52318.89117799336 }, { "content": "#[doc = \"Reader of field `RS485_CLASH_INT_RAW`\"]\n\npub type RS485_CLASH_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RS485_CLASH_INT_RAW`\"]\n\npub struct RS485_CLASH_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RS485_CLASH_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/uart/int_raw.rs", "rank": 80, "score": 52318.89117799336 }, { "content": "#[doc = \"Reader of field `TX_START_INT_RAW`\"]\n\npub type TX_START_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TX_START_INT_RAW`\"]\n\npub struct TX_START_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TX_START_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/uhci/int_raw.rs", "rank": 81, "score": 52318.89117799336 }, { "content": "#[doc = \"Reader of field `DET_START_INT_RAW`\"]\n\npub type DET_START_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `DET_START_INT_RAW`\"]\n\npub struct DET_START_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DET_START_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/i2c/int_raw.rs", "rank": 82, "score": 52318.89117799336 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TX_BRK_IDLE_DONE_INT_RAW`\"]\n\npub type TX_BRK_IDLE_DONE_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TX_BRK_IDLE_DONE_INT_RAW`\"]\n\npub struct TX_BRK_IDLE_DONE_INT_RAW_W<'a> {\n", "file_path": "src/uart/int_raw.rs", "rank": 83, "score": 52318.74772349692 }, { "content": "#[doc = \"Reader of field `CH1_TX_END_INT_RAW`\"]\n\npub type CH1_TX_END_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH1_TX_END_INT_RAW`\"]\n\npub struct CH1_TX_END_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CH1_TX_END_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/rmt/int_raw.rs", "rank": 84, "score": 52318.03912017424 }, { "content": "#[doc = \"Reader of field `OUTLINK_EOF_ERR_INT_RAW`\"]\n\npub type OUTLINK_EOF_ERR_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OUTLINK_EOF_ERR_INT_RAW`\"]\n\npub struct OUTLINK_EOF_ERR_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OUTLINK_EOF_ERR_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/uhci/int_raw.rs", "rank": 85, "score": 52318.03912017424 }, { "content": "#[doc = \"Reader of field `TX_BRK_DONE_INT_RAW`\"]\n\npub type TX_BRK_DONE_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TX_BRK_DONE_INT_RAW`\"]\n\npub struct TX_BRK_DONE_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TX_BRK_DONE_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/uart/int_raw.rs", "rank": 86, "score": 52318.03912017424 }, { "content": "#[doc = \"Reader of field `CH3_RX_THR_EVENT_INT_RAW`\"]\n\npub type CH3_RX_THR_EVENT_INT_RAW_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CH3_RX_THR_EVENT_INT_RAW`\"]\n\npub struct CH3_RX_THR_EVENT_INT_RAW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CH3_RX_THR_EVENT_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/rmt/int_raw.rs", "rank": 87, "score": 52317.22318408221 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bit 19\"]\n\n #[inline(always)]\n\n pub fn wakeup_int_raw(&self) -> WAKEUP_INT_RAW_R {\n\n WAKEUP_INT_RAW_R::new(((self.bits >> 19) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 18\"]\n\n #[inline(always)]\n", "file_path": "src/uart/int_raw.rs", "rank": 88, "score": 52316.1623948121 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bit 8\"]\n\n #[inline(always)]\n\n pub fn app_ctrl1_int_raw(&self) -> APP_CTRL1_INT_RAW_R {\n\n APP_CTRL1_INT_RAW_R::new(((self.bits >> 8) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 7\"]\n\n #[inline(always)]\n\n pub fn app_ctrl0_int_raw(&self) -> APP_CTRL0_INT_RAW_R {\n\n APP_CTRL0_INT_RAW_R::new(((self.bits >> 7) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 6\"]\n", "file_path": "src/uhci/int_raw.rs", "rank": 89, "score": 52313.09253957422 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> RX_HUNG_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n", "file_path": "src/uhci/int_raw.rs", "rank": 90, "score": 52312.38535683998 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> TXFIFO_WM_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n", "file_path": "src/i2c/int_raw.rs", "rank": 91, "score": 52312.38535683998 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> CH3_ERR_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);\n\n self.w\n\n }\n\n}\n", "file_path": "src/rmt/int_raw.rs", "rank": 92, "score": 52312.38535683998 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> TXFIFO_OVF_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);\n\n self.w\n\n }\n\n}\n", "file_path": "src/i2c/int_raw.rs", "rank": 93, "score": 52312.38535683998 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> APP_CTRL0_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);\n\n self.w\n\n }\n\n}\n", "file_path": "src/uhci/int_raw.rs", "rank": 94, "score": 52312.38535683998 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> SLAVE_STRETCH_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);\n\n self.w\n\n }\n\n}\n", "file_path": "src/i2c/int_raw.rs", "rank": 95, "score": 52312.38535683998 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> FRM_ERR_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n", "file_path": "src/uart/int_raw.rs", "rank": 96, "score": 52312.38535683998 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> RXFIFO_TOUT_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);\n\n self.w\n\n }\n\n}\n", "file_path": "src/uart/int_raw.rs", "rank": 97, "score": 52312.38535683998 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> CH2_RX_END_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n", "file_path": "src/rmt/int_raw.rs", "rank": 98, "score": 52312.20832489181 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> CH0_TX_LOOP_INT_RAW_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);\n\n self.w\n\n }\n\n}\n", "file_path": "src/rmt/int_raw.rs", "rank": 99, "score": 52312.20832489181 } ]
Rust
src/node_state/mod.rs
frugalos/raftlog
25d663b3e8eda35224cd666e1f1ef05b44ace884
use futures::{Async, Poll, Stream}; use std::time::Instant; pub use self::common::Common; use self::candidate::Candidate; use self::common::HandleMessageResult; use self::follower::Follower; use self::leader::Leader; use self::loader::Loader; use crate::cluster::ClusterConfig; use crate::message::Message; use crate::metrics::NodeStateMetrics; use crate::node::NodeId; use crate::{Error, Event, Io, Result}; mod candidate; mod common; mod follower; mod leader; mod loader; type NextState<IO> = Option<RoleState<IO>>; pub struct NodeState<IO: Io> { pub common: Common<IO>, pub role: RoleState<IO>, started_at: Instant, pub metrics: NodeStateMetrics, } impl<IO: Io> NodeState<IO> { pub fn load(node_id: NodeId, config: ClusterConfig, io: IO, metrics: NodeStateMetrics) -> Self { let mut common = Common::new(node_id, io, config, metrics.clone()); let role = RoleState::Loader(Loader::new(&mut common)); let started_at = Instant::now(); NodeState { common, role, started_at, metrics, } } pub fn is_loading(&self) -> bool { self.role.is_loader() } pub fn start_election(&mut self) { if let RoleState::Follower(follower) = &mut self.role { let next = follower.handle_timeout(&mut self.common); let next = track_try_unwrap!(next); if let Some(next) = next { self.handle_role_change(next); } } } fn handle_timeout(&mut self) -> Result<Option<RoleState<IO>>> { match self.role { RoleState::Loader(ref mut t) => track!(t.handle_timeout(&mut self.common)), RoleState::Follower(ref mut t) => track!(t.handle_timeout(&mut self.common)), RoleState::Candidate(ref mut t) => track!(t.handle_timeout(&mut self.common)), RoleState::Leader(ref mut t) => track!(t.handle_timeout(&mut self.common)), } } fn handle_message(&mut self, message: Message) -> Result<Option<RoleState<IO>>> { if let RoleState::Loader(_) = self.role { return Ok(None); } match self.common.handle_message(message) { HandleMessageResult::Handled(next) => Ok(next), HandleMessageResult::Unhandled(message) => match self.role { RoleState::Loader(_) => unreachable!(), RoleState::Follower(ref mut t) => { track!(t.handle_message(&mut self.common, message)) } RoleState::Candidate(ref mut t) => { track!(t.handle_message(&mut self.common, &message)) } RoleState::Leader(ref mut t) => track!(t.handle_message(&mut self.common, message)), }, } } fn handle_role_change(&mut self, next: RoleState<IO>) { match (&self.role, &next) { (RoleState::Candidate(_), RoleState::Leader(_)) => { let elapsed = prometrics::timestamp::duration_to_seconds(self.started_at.elapsed()); self.metrics .candidate_to_leader_duration_seconds .observe(elapsed); self.started_at = Instant::now(); } (RoleState::Candidate(_), RoleState::Follower(_)) => { let elapsed = prometrics::timestamp::duration_to_seconds(self.started_at.elapsed()); self.metrics .candidate_to_follower_duration_seconds .observe(elapsed); self.started_at = Instant::now(); } (RoleState::Loader(_), RoleState::Candidate(_)) => { let elapsed = prometrics::timestamp::duration_to_seconds(self.started_at.elapsed()); self.metrics .loader_to_candidate_duration_seconds .observe(elapsed); self.started_at = Instant::now(); } (RoleState::Leader(_), RoleState::Leader(_)) | (RoleState::Follower(_), RoleState::Follower(_)) | (RoleState::Candidate(_), RoleState::Candidate(_)) | (RoleState::Loader(_), RoleState::Loader(_)) => {} _ => self.started_at = Instant::now(), } self.role = next; } } impl<IO: Io> Stream for NodeState<IO> { type Item = Event; type Error = Error; fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> { let mut did_something = true; while did_something { did_something = false; if let Some(e) = self.common.next_event() { return Ok(Async::Ready(Some(e))); } if let Async::Ready(()) = track!(self.common.poll_timeout())? { did_something = true; self.metrics.poll_timeout_total.increment(); if let Some(next) = track!(self.handle_timeout())? { self.handle_role_change(next); } if let Some(e) = self.common.next_event() { return Ok(Async::Ready(Some(e))); } } if let Some(next) = track!(self.common.run_once())? { did_something = true; self.handle_role_change(next); } if let Some(e) = self.common.next_event() { return Ok(Async::Ready(Some(e))); } let result = match self.role { RoleState::Loader(ref mut t) => track!(t.run_once(&mut self.common))?, RoleState::Follower(ref mut t) => track!(t.run_once(&mut self.common))?, RoleState::Candidate(ref mut t) => track!(t.run_once(&mut self.common))?, RoleState::Leader(ref mut t) => track!(t.run_once(&mut self.common))?, }; if let Some(next) = result { did_something = true; self.handle_role_change(next); } if let Some(e) = self.common.next_event() { return Ok(Async::Ready(Some(e))); } if let Some(message) = track!(self.common.try_recv_message())? { did_something = true; if let Some(next) = track!(self.handle_message(message))? { self.handle_role_change(next); } if let Some(e) = self.common.next_event() { return Ok(Async::Ready(Some(e))); } } } Ok(Async::NotReady) } } pub enum RoleState<IO: Io> { Loader(Loader<IO>), Follower(Follower<IO>), Candidate(Candidate<IO>), Leader(Leader<IO>), } impl<IO: Io> RoleState<IO> { pub fn is_loader(&self) -> bool { matches!(self, RoleState::Loader(_)) } #[cfg(test)] pub fn is_candidate(&self) -> bool { matches!(self, RoleState::Candidate(_)) } } #[cfg(test)] mod tests { use super::*; use prometrics::metrics::MetricBuilder; use crate::test_util::tests::TestIoBuilder; #[test] fn node_state_is_loading_works() { let metrics = NodeStateMetrics::new(&MetricBuilder::new()).expect("Never fails"); let io = TestIoBuilder::new().finish(); let cluster = io.cluster.clone(); let node = NodeState::load("test".into(), cluster, io, metrics); assert!(node.is_loading()); } #[test] fn role_state_is_loader_works() { let metrics = NodeStateMetrics::new(&MetricBuilder::new()).expect("Never fails"); let io = TestIoBuilder::new().finish(); let cluster = io.cluster.clone(); let mut common = Common::new("test".into(), io, cluster, metrics); let state = RoleState::Loader(Loader::new(&mut common)); assert!(state.is_loader()); assert!(!state.is_candidate()); } #[test] fn role_state_is_candidate_works() { let metrics = NodeStateMetrics::new(&MetricBuilder::new()).expect("Never fails"); let io = TestIoBuilder::new().finish(); let cluster = io.cluster.clone(); let mut common = Common::new("test".into(), io, cluster, metrics); let state = RoleState::Candidate(Candidate::new(&mut common)); assert!(!state.is_loader()); assert!(state.is_candidate()); } }
use futures::{Async, Poll, Stream}; use std::time::Instant; pub use self::common::Common; use self::candidate::Candidate; use self::common::HandleMessageResult; use self::follower::Follower; use self::leader::Leader; use self::loader::Loader; use crate::cluster::ClusterConfig; use crate::message::Message; use crate::metrics::NodeStateMetrics; use crate::node::NodeId; use crate::{Error, Event, Io, Result}; mod candidate; mod common; mod follower; mod leader; mod loader; type NextState<IO> = Option<RoleState<IO>>; pub struct NodeState<IO: Io> { pub common: Common<IO>, pub role: RoleState<IO>, started_at: Instant, pub metrics: NodeStateMetrics, } impl<IO: Io> NodeState<IO> { pub fn load(node_id: NodeId, config: ClusterConfig, io: IO, metrics: NodeStateMetrics) -> Self { let mut common = Common::new(node_id, io, config, metrics.clone()); let role = RoleState::Loader(Loader::new(&mut common)); let started_at = Instant::now(); NodeState { common, role, started_at, metrics, } } pub fn is_loading(&self) -> bool { self.role.is_loader() } pub fn start_election(&mut self) { if let RoleState::Follower(follower) = &mut self.role { let next = follower.handle_timeout(&mut self.common); let next = track_try_unwrap!(next); if let Some(next) = next { self.handle_role_change(next); } } } fn handle_timeout(&mut self) -> Result<Option<RoleState<IO>>> { match self.role { RoleState::Loader(ref mut t) => track!(t.handle_timeout(&mut self.common)), RoleState::Follower(ref mut t) => track!(t.handle_timeout(&mut self.common)), RoleState::Candidate(ref mut t) => track!(t.handle_timeout(&mut self.common)), RoleState::Leader(ref mut t) => track!(t.handle_timeout(&mut self.common)), } } fn handle_message(&mut self, message: Message) -> Result<Option<RoleState<IO>>> { if let RoleState::Loader(_) = self.role { return Ok(None); } match self.common.handle_message(message) { HandleMessageResult::Handled(next) => Ok(next), HandleMessageResult::Unhandled(message) => match self.role { RoleState::Loader(_) => unreachable!(), RoleState::Follower(ref mut t) => { track!(t.handle_message(&mut self.common, message)) } RoleState::Candidate(ref mut t) => { track!(t.handle_message(&mut self.common, &message)) } RoleState::Leader(ref mut t) => track!(t.handle_message(&mut self.common, message)), }, } } fn handle_role_change(&mut self, next: RoleState<IO>) { match (&self.role, &next) { (RoleState::Candidate(_), RoleState::Leader(_)) => { let elapsed = prometrics::timestamp::duration_to_seconds(self.started_at.elapsed()); self.metrics .candidate_to_leader_duration_seconds .observe(elapsed); self.started_at = Instant::now(); } (RoleState::Candidate(_), RoleState::Follower(_)) => { let elapsed = prometrics::timestamp::duration_to_seconds(self.started_at.elapsed()); self.metrics .candidate_to_follower_duration_seconds .observe(elapsed); self.started_at = Instant::now(); } (RoleState::Loader(_), RoleState::Candidate(_)) => { let elapsed = prometrics::timestamp::duration_to_seconds(self.started_at.elapsed()); self.metrics .loader_to_candidate_duration_seconds .observe(elapsed); self.started_at = Instant::now(); } (RoleState::Leader(_), RoleState::Leader(_)) | (RoleState::Follower(_), RoleState::Follower(_)) | (RoleState::Candidate(_), RoleState::Candidate(_)) | (RoleState::Loader(_), RoleState::Loader(_)) => {} _ => self.started_at = Instant::now(), } self.role = next; } } impl<IO: Io> Stream for NodeState<IO> { type Item = Event; type Error = Error; fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> { let mut did_something = true; while did_something { did_something = false; if let Some(e) = self.common.next_event() { return Ok(Async::Ready(Some(e))); } if let Async::Ready(()) = track!(self.common.poll_timeout())? { did_something = true; self.metrics.poll_timeout_total.increment(); if let Some(next) = track!(self.handle_timeout())? { self.handle_role_change(next); } if let Some(e) = self.common.next_event() { return Ok(Async::Ready(Some(e))); } } if let Some(next) = track!(self.common.run_once())? { did_something = true; self.handle_role_change(next); } if let Some(e) = self.common.next_event() { return Ok(Async::Ready(Some(e))); } let result = match self.role { RoleState::Loader(ref mut t) => track!(t.run_once(&mut self.common))?, RoleState::Follower(ref mut t) => track!(t.run_once(&mut self.common))?, RoleState::Candidate(ref mut t) => track!(t.run_once(&mut self.common))?, RoleState::Leader(ref mut t) => track!(t.run_once(&mut self.common))?, }; if let Some(next) = result { did_something = true; self.handle_role_change(next); } if let Some(e) = self.common.next_event() { return Ok(Async::Ready(Some(e))); } if l
Leader(Leader<IO>), } impl<IO: Io> RoleState<IO> { pub fn is_loader(&self) -> bool { matches!(self, RoleState::Loader(_)) } #[cfg(test)] pub fn is_candidate(&self) -> bool { matches!(self, RoleState::Candidate(_)) } } #[cfg(test)] mod tests { use super::*; use prometrics::metrics::MetricBuilder; use crate::test_util::tests::TestIoBuilder; #[test] fn node_state_is_loading_works() { let metrics = NodeStateMetrics::new(&MetricBuilder::new()).expect("Never fails"); let io = TestIoBuilder::new().finish(); let cluster = io.cluster.clone(); let node = NodeState::load("test".into(), cluster, io, metrics); assert!(node.is_loading()); } #[test] fn role_state_is_loader_works() { let metrics = NodeStateMetrics::new(&MetricBuilder::new()).expect("Never fails"); let io = TestIoBuilder::new().finish(); let cluster = io.cluster.clone(); let mut common = Common::new("test".into(), io, cluster, metrics); let state = RoleState::Loader(Loader::new(&mut common)); assert!(state.is_loader()); assert!(!state.is_candidate()); } #[test] fn role_state_is_candidate_works() { let metrics = NodeStateMetrics::new(&MetricBuilder::new()).expect("Never fails"); let io = TestIoBuilder::new().finish(); let cluster = io.cluster.clone(); let mut common = Common::new("test".into(), io, cluster, metrics); let state = RoleState::Candidate(Candidate::new(&mut common)); assert!(!state.is_loader()); assert!(state.is_candidate()); } }
et Some(message) = track!(self.common.try_recv_message())? { did_something = true; if let Some(next) = track!(self.handle_message(message))? { self.handle_role_change(next); } if let Some(e) = self.common.next_event() { return Ok(Async::Ready(Some(e))); } } } Ok(Async::NotReady) } } pub enum RoleState<IO: Io> { Loader(Loader<IO>), Follower(Follower<IO>), Candidate(Candidate<IO>),
random
[ { "content": "fn make_role_change_histogram(builder: &mut HistogramBuilder) -> Result<Histogram> {\n\n builder\n\n .bucket(0.001)\n\n .bucket(0.005)\n\n .bucket(0.01)\n\n .bucket(0.05)\n\n .bucket(0.1)\n\n .bucket(0.2)\n\n .bucket(0.4)\n\n .bucket(0.6)\n\n .bucket(0.8)\n\n .bucket(1.0)\n\n .bucket(2.0)\n\n .bucket(4.0)\n\n .bucket(6.0)\n\n .bucket(8.0)\n\n .bucket(10.0)\n\n .bucket(20.0)\n\n .bucket(50.0)\n\n .bucket(80.0)\n\n .bucket(320.0)\n\n .bucket(640.0)\n\n .finish()\n\n .map_err(|e| track!(Error::from(e)))\n\n}\n", "file_path": "src/metrics.rs", "rank": 1, "score": 139025.8377108673 }, { "content": "struct InstallSnapshot<IO: Io> {\n\n future: IO::SaveLog,\n\n summary: SnapshotSummary,\n\n}\n\nimpl<IO: Io> InstallSnapshot<IO> {\n\n pub fn new(common: &mut Common<IO>, prefix: LogPrefix) -> Self {\n\n let summary = SnapshotSummary {\n\n tail: prefix.tail,\n\n config: prefix.config.clone(),\n\n };\n\n let future = common.io.save_log_prefix(prefix);\n\n InstallSnapshot { future, summary }\n\n }\n\n}\n\nimpl<IO: Io> Future for InstallSnapshot<IO> {\n\n type Item = SnapshotSummary;\n\n type Error = Error;\n\n fn poll(&mut self) -> Poll<Self::Item, Self::Error> {\n\n Ok(track!(self.future.poll())?.map(|()| self.summary.clone()))\n\n }\n", "file_path": "src/node_state/common/mod.rs", "rank": 2, "score": 137201.41226334873 }, { "content": "#[derive(Debug)]\n\nstruct Follower {\n\n pub obsolete_seq_no: SequenceNumber,\n\n\n\n pub log_tail: LogIndex,\n\n pub last_seq_no: SequenceNumber,\n\n pub synced: bool,\n\n}\n\nimpl Follower {\n\n pub fn new() -> Self {\n\n Follower {\n\n obsolete_seq_no: SequenceNumber::new(0),\n\n\n\n log_tail: LogIndex::new(0),\n\n last_seq_no: SequenceNumber::new(0),\n\n synced: false,\n\n }\n\n }\n\n}\n", "file_path": "src/node_state/leader/follower.rs", "rank": 3, "score": 108950.49814097074 }, { "content": "/// Raftの実行に必要なI/O機能を提供するためのトレイト.\n\n///\n\n/// 機能としてはおおまかに以下の三つに区分される:\n\n///\n\n/// - **ストレージ**\n\n/// - ローカルノードの状態やログを保存するための永続ストレージ\n\n/// - Raftが完全に正しく動作するためには、このストレージは完全に信頼できるものである必要がある\n\n/// - 一度書き込まれたデータは(明示的に削除されない限り)失われたり、壊れたりすることは無い\n\n/// - 実際には、それを達成するのは困難なので、信頼性とストレージコストのトレードオフとなる\n\n/// - **チャンネル**\n\n/// - ノード間通信(RPC)用のメッセージ送受信チャンネル\n\n/// - このチャンネルの信頼性はある程度低くても良い\n\n/// - メッセージ群の順番の入れ替わりや、欠損、重複配送、は許容される\n\n/// - ただし、メッセージの改竄や捏造、はNG\n\n/// - **タイマー**\n\n/// - タイムアウト管理用のタイマー\n\npub trait Io {\n\n /// ローカルノードの投票状況を保存するための`Future`.\n\n type SaveBallot: Future<Item = (), Error = Error>;\n\n\n\n /// ノーカルノードの投票情報を取得ための`Future`.\n\n type LoadBallot: Future<Item = Option<Ballot>, Error = Error>;\n\n\n\n /// ローカルログを保存するための`Future`.\n\n type SaveLog: Future<Item = (), Error = Error>;\n\n\n\n /// ローカルログを取得するための`Future`.\n\n type LoadLog: Future<Item = Log, Error = Error>;\n\n\n\n /// ローカルログの末尾部分を削除するための`Future`.\n\n type DeleteLog: Future<Item = (), Error = Error>;\n\n\n\n /// タイムアウトを表現するための`Future`.\n\n type Timeout: Future<Item = (), Error = Error>;\n\n\n\n /// ローカルノードに対して送信されたメッセージの受信を試みる.\n", "file_path": "src/io.rs", "rank": 4, "score": 101459.91179784937 }, { "content": "/// DSLのコマンド列(プログラム)を実行する関数\n\npub fn interpret(cs: &[Command], service: &mut Service) {\n\n for c in cs {\n\n interpret_command(c.clone(), service);\n\n }\n\n}\n\n\n", "file_path": "src/test_dsl/dsl.rs", "rank": 5, "score": 95593.68786313906 }, { "content": "struct DelayedMessage {\n\n arrival_time: u64,\n\n message: Message,\n\n}\n\nimpl Ord for DelayedMessage {\n\n fn cmp(&self, other: &Self) -> Ordering {\n\n other.arrival_time.cmp(&self.arrival_time)\n\n }\n\n}\n\nimpl PartialOrd for DelayedMessage {\n\n fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n\n other.arrival_time.partial_cmp(&self.arrival_time)\n\n }\n\n}\n\nimpl Eq for DelayedMessage {}\n\nimpl PartialEq for DelayedMessage {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.arrival_time == other.arrival_time\n\n }\n\n}\n", "file_path": "raftlog_simu/src/io/transport.rs", "rank": 6, "score": 94971.0941576854 }, { "content": "#[derive(Debug, Clone)]\n\nstruct SnapshotSummary {\n\n tail: LogPosition,\n\n config: ClusterConfig,\n\n}\n\n\n", "file_path": "src/node_state/common/mod.rs", "rank": 7, "score": 94499.0613769824 }, { "content": "fn message_to_string(m: &Message) -> String {\n\n use Message::*;\n\n match m {\n\n RequestVoteCall(vcall) => {\n\n format!(\n\n \"[Vcall] {:?} {:?} {:?}\",\n\n vcall.header.seq_no, vcall.header.term, vcall.log_tail\n\n )\n\n }\n\n RequestVoteReply(vreply) => {\n\n format!(\n\n \"[Vrep] {:?} {:?} voted={:?}\",\n\n vreply.header.seq_no, vreply.header.term, vreply.voted\n\n )\n\n }\n\n AppendEntriesCall(ecall) => {\n\n format!(\n\n \"[AEcall] {:?} {:?} commited={:?}, suffix={:?}\",\n\n ecall.header.seq_no, ecall.header.term, ecall.committed_log_tail, ecall.suffix\n\n )\n", "file_path": "src/test_dsl/impl_io.rs", "rank": 8, "score": 93605.68174149326 }, { "content": "#[derive(Default, Serialize, Deserialize)]\n\nstruct Config {\n\n simulator: raftlog_simu::SimulatorConfig,\n\n}\n\n\n", "file_path": "raftlog_simu/src/main.rs", "rank": 9, "score": 92714.62722308128 }, { "content": "fn over_write(now: &mut LogSuffix, new: &LogSuffix) {\n\n /*\n\n * 次のような上書き不能な形をしていないか検査する\n\n * now = [...)\n\n * [...) = new\n\n */\n\n assert!(new.head.index <= now.tail().index);\n\n\n\n let (offset, entries_offset) = if now.head.index <= new.head.index {\n\n /*\n\n * [ self ...\n\n * [ next ...\n\n * ^--offset\n\n */\n\n\n\n (new.head.index - now.head.index, 0)\n\n } else {\n\n /*\n\n * Strange case:\n\n * [ self ...\n", "file_path": "src/test_dsl/impl_io.rs", "rank": 10, "score": 81672.86930744267 }, { "content": "/// 引数`rlog`で表される特定のノードが、述語`pred`を満たすかどうかを調べる\n\n/// `pred`を満足する場合は`true`を返し、\n\n/// そうでない場合は`false`を返す。\n\nfn check(rlog: &ReplicatedLog<TestIo>, pred: Pred) -> bool {\n\n use Pred::*;\n\n\n\n match pred {\n\n Not(pred) => !check(rlog, *pred),\n\n IsLeader => rlog.local_node().role == Role::Leader,\n\n IsFollower => rlog.local_node().role == Role::Follower,\n\n LogTermConsistency => {\n\n let mut valid_glue = true;\n\n\n\n // snapshotとrawlogが両方ある場合は、\n\n // snapshotに続く形でrawlogが存在することを確認する\n\n if let Some(snapshot) = &rlog.io().snapshot() {\n\n if let Some(rawlog) = &rlog.io().rawlog() {\n\n valid_glue = snapshot.tail.prev_term == rawlog.head.prev_term;\n\n }\n\n }\n\n\n\n // rawlogが存在する場合は、termが昇順になっていることを確認する\n\n let is_sorted = if let Some(rawlog) = &rlog.io().rawlog() {\n", "file_path": "src/test_dsl/dsl.rs", "rank": 11, "score": 80660.72681458906 }, { "content": "struct Channel {\n\n clock: u64,\n\n queue: BinaryHeap<DelayedMessage>,\n\n}\n\nimpl Channel {\n\n pub fn new() -> Self {\n\n Channel {\n\n clock: 0,\n\n queue: BinaryHeap::new(),\n\n }\n\n }\n\n pub fn try_recv_message(&mut self) -> Option<Message> {\n\n self.clock += 1;\n\n if let Some(m) = self.queue.pop() {\n\n if m.arrival_time <= self.clock {\n\n Some(m.message)\n\n } else {\n\n self.queue.push(m);\n\n None\n\n }\n", "file_path": "raftlog_simu/src/io/transport.rs", "rank": 12, "score": 70896.14353440289 }, { "content": "/// DSLの一つのコマンドを実行する関数\n\nfn interpret_command(c: Command, service: &mut Service) {\n\n use futures::Stream;\n\n use Command::*;\n\n\n\n println!(\"\\n Now executing {:?} ...\", &c);\n\n\n\n match c {\n\n TakeSnapshot(node) => {\n\n let rlog = service.get_mut(&node).unwrap();\n\n let index = rlog.local_history().tail().index;\n\n rlog.install_snapshot(index, Vec::new()).unwrap();\n\n }\n\n Check(node, pred) => {\n\n let rlog = service.get_mut(&node).unwrap();\n\n assert!(check(rlog, pred));\n\n }\n\n Heartbeat(node) => {\n\n let rlog = service.get_mut(&node).unwrap();\n\n rlog.heartbeat().unwrap();\n\n }\n", "file_path": "src/test_dsl/dsl.rs", "rank": 13, "score": 66570.51447399502 }, { "content": "/// ノードネームの列が与えられた時に\n\n/// 丁度それらを構成要素として含むようなraft clusterを構成する。\n\n///\n\n/// 全点間通信ができる状態にしてあるので\n\n/// complete graph(w.r.t. ネットワークトポロジー)という語を用いている。\n\npub fn build_complete_graph(names: &[NodeName]) -> (Service, ClusterMembers) {\n\n let nodes: BTreeSet<NodeId> = names.iter().map(|s| NodeId::new(s.to_string())).collect();\n\n\n\n let mut ios = BTreeMap::new();\n\n let mut service = BTreeMap::new();\n\n\n\n for node in &nodes {\n\n ios.insert(node.clone(), TestIo::new(node.clone(), false));\n\n }\n\n\n\n for src in &nodes {\n\n let mut io_src = ios.remove(src).unwrap();\n\n for dst in &nodes {\n\n if src != dst {\n\n let io_dst = ios.get(dst).unwrap();\n\n io_src.set_channel(dst.clone(), io_dst.copy_sender());\n\n }\n\n }\n\n ios.insert(src.clone(), io_src);\n\n }\n", "file_path": "src/test_dsl/dsl.rs", "rank": 14, "score": 60192.87269735464 }, { "content": "#[allow(dead_code)]\n\nfn ballot_to_str(b: &Ballot) -> String {\n\n format!(\n\n \"ballot(term: {}, for: {})\",\n\n b.term.as_u64(),\n\n b.voted_for.as_str()\n\n )\n\n}\n\n\n", "file_path": "src/test_dsl/impl_io.rs", "rank": 15, "score": 59088.17132434524 }, { "content": "use futures::{Async, Future};\n\nuse std::collections::BTreeMap;\n\nuse std::mem;\n\nuse trackable::error::ErrorKindExt;\n\n\n\nuse super::super::Common;\n\nuse crate::cluster::ClusterConfig;\n\nuse crate::log::{Log, LogIndex};\n\nuse crate::message::{AppendEntriesReply, SequenceNumber};\n\nuse crate::node::NodeId;\n\nuse crate::{ErrorKind, Io, Result};\n\n\n\n/// フォロワーの管理者.\n\n///\n\n/// フォロワー一覧と、それぞれのローカルログの状態の把握が主責務.\n\n/// フォロワーのローカルログがリーダのものよりも遅れている場合には、\n\n/// その同期(差分送信)も実施する.\n\npub struct FollowersManager<IO: Io> {\n\n followers: BTreeMap<NodeId, Follower>,\n\n config: ClusterConfig,\n", "file_path": "src/node_state/leader/follower.rs", "rank": 16, "score": 58041.78424847584 }, { "content": " }\n\n pub fn run_once(&mut self, common: &mut Common<IO>) -> Result<()> {\n\n // バックグランドタスク(ログ同期用の読み込み処理)を実行する.\n\n let mut dones = Vec::new();\n\n for (follower, task) in &mut self.tasks {\n\n if let Async::Ready(log) = track!(task.poll())? {\n\n dones.push((follower.clone(), log));\n\n }\n\n }\n\n for (follower, log) in dones {\n\n let rpc = common.rpc_caller();\n\n match log {\n\n Log::Prefix(snapshot) => rpc.send_install_snapshot(&follower, snapshot),\n\n Log::Suffix(slice) => rpc.send_append_entries(&follower, slice),\n\n }\n\n self.tasks.remove(&follower);\n\n }\n\n Ok(())\n\n }\n\n pub fn latest_hearbeat_ack(&self) -> SequenceNumber {\n", "file_path": "src/node_state/leader/follower.rs", "rank": 17, "score": 58037.94537748795 }, { "content": " .filter(|&(ref id, _)| config.is_known_node(id))\n\n .collect();\n\n\n\n self.config = config.clone();\n\n }\n\n\n\n fn update_follower_state(&mut self, common: &Common<IO>, reply: &AppendEntriesReply) -> bool {\n\n let follower = &mut self\n\n .followers\n\n .get_mut(&reply.header.sender)\n\n .expect(\"Never fails\");\n\n if follower.last_seq_no < reply.header.seq_no {\n\n follower.last_seq_no = reply.header.seq_no;\n\n }\n\n match *reply {\n\n AppendEntriesReply { busy: true, .. } => false,\n\n AppendEntriesReply { log_tail, .. } if follower.synced => {\n\n let updated = follower.log_tail < log_tail.index;\n\n if updated {\n\n follower.log_tail = log_tail.index;\n", "file_path": "src/node_state/leader/follower.rs", "rank": 18, "score": 58036.92544949854 }, { "content": " /// 基本的には`committed_log_tail`と同じ動作となるが、\n\n /// 「構成変更中」かつ「`ClusterState`の値が`CatchUp`」の場合でも、\n\n /// こちらの関数は常に新旧両方から「過半数以上」を要求する点が異なる.\n\n pub fn joint_committed_log_tail(&self) -> LogIndex {\n\n self.config.full_consensus_value(|node_id| {\n\n let f = &self.followers[node_id];\n\n if f.synced {\n\n f.log_tail\n\n } else {\n\n LogIndex::new(0)\n\n }\n\n })\n\n }\n\n\n\n pub fn handle_append_entries_reply(\n\n &mut self,\n\n common: &Common<IO>,\n\n reply: &AppendEntriesReply,\n\n ) -> bool {\n\n let updated = self.update_follower_state(common, reply);\n", "file_path": "src/node_state/leader/follower.rs", "rank": 19, "score": 58034.96700987258 }, { "content": " if self.latest_hearbeat_ack < reply.header.seq_no {\n\n self.latest_hearbeat_ack = self\n\n .config\n\n .consensus_value(|node_id| self.followers[node_id].last_seq_no);\n\n }\n\n updated\n\n }\n\n\n\n pub fn set_last_broadcast_seq_no(&mut self, seq_no: SequenceNumber) {\n\n self.last_broadcast_seq_no = seq_no;\n\n }\n\n\n\n /// フォロワーのローカルログとの同期処理を実行する.\n\n pub fn log_sync(&mut self, common: &mut Common<IO>, reply: &AppendEntriesReply) -> Result<()> {\n\n if reply.busy || self.tasks.contains_key(&reply.header.sender) {\n\n // フォロワーが忙しい or 既に同期処理が進行中\n\n return Ok(());\n\n }\n\n\n\n let follower = track!(self\n", "file_path": "src/node_state/leader/follower.rs", "rank": 20, "score": 58034.42714041459 }, { "content": " // フォロワーのログとリーダのログの同期(合流)点を探索中\n\n follower.log_tail\n\n };\n\n let future = common.load_log(follower.log_tail, Some(end));\n\n self.tasks.insert(reply.header.sender.clone(), future);\n\n Ok(())\n\n }\n\n\n\n /// クラスタ構成の変更に追従する.\n\n pub fn handle_config_updated(&mut self, config: &ClusterConfig) {\n\n // Add\n\n for id in config.members() {\n\n if !self.followers.contains_key(id) {\n\n self.followers.insert(id.clone(), Follower::new());\n\n }\n\n }\n\n\n\n // Delete\n\n self.followers = mem::take(&mut self.followers)\n\n .into_iter()\n", "file_path": "src/node_state/leader/follower.rs", "rank": 21, "score": 58032.26113010061 }, { "content": " latest_hearbeat_ack: SequenceNumber,\n\n last_broadcast_seq_no: SequenceNumber,\n\n\n\n // `raft_test_simu`のために非決定的な要素は排除したいので、\n\n // `HashMap`ではなく`BTreeMap`を使用している.\n\n tasks: BTreeMap<NodeId, IO::LoadLog>,\n\n}\n\nimpl<IO: Io> FollowersManager<IO> {\n\n pub fn new(config: ClusterConfig) -> Self {\n\n let followers = config\n\n .members()\n\n .map(|n| (n.clone(), Follower::new()))\n\n .collect();\n\n FollowersManager {\n\n followers,\n\n config,\n\n tasks: BTreeMap::new(),\n\n latest_hearbeat_ack: SequenceNumber::new(0),\n\n last_broadcast_seq_no: SequenceNumber::new(0),\n\n }\n", "file_path": "src/node_state/leader/follower.rs", "rank": 22, "score": 58031.09723659044 }, { "content": " .followers\n\n .get_mut(&reply.header.sender)\n\n .ok_or_else(|| ErrorKind::InconsistentState.error()))?;\n\n if reply.header.seq_no <= follower.obsolete_seq_no {\n\n // 平行度が高くなりすぎるのを防止するために、\n\n // propose(broadcast)が重なった場合には、\n\n // `obsolete_seq_no`以前のbroadcastに対する応答は古いものとして処理を省く.\n\n return Ok(());\n\n }\n\n follower.obsolete_seq_no = self.last_broadcast_seq_no;\n\n\n\n if common.log().tail().index <= follower.log_tail {\n\n // The follower is up-to-date\n\n return Ok(());\n\n }\n\n\n\n let end = if follower.synced {\n\n // フォロワーのログとリーダのログの差分を送信\n\n common.log().tail().index\n\n } else {\n", "file_path": "src/node_state/leader/follower.rs", "rank": 23, "score": 58027.88016646529 }, { "content": " self.latest_hearbeat_ack\n\n }\n\n\n\n /// コミット済みログ領域の終端を返す.\n\n ///\n\n /// \"コミット済み\"とは「投票権を有するメンバの過半数以上のローカルログに存在する」ということを意味する.\n\n /// (構成変更中で、新旧構成の両方に投票権が存在する場合には、そのそれぞれの過半数以上)\n\n pub fn committed_log_tail(&self) -> LogIndex {\n\n self.config.consensus_value(|node_id| {\n\n let f = &self.followers[node_id];\n\n if f.synced {\n\n f.log_tail\n\n } else {\n\n LogIndex::new(0)\n\n }\n\n })\n\n }\n\n\n\n /// ジョイントコミット済みのログ領域の終端を返す.\n\n ///\n", "file_path": "src/node_state/leader/follower.rs", "rank": 24, "score": 58024.77206584015 }, { "content": " } else if log_tail.index.as_u64() == 0 && follower.log_tail.as_u64() != 0 {\n\n // NOTE: followerのデータがクリアされたものと判断する\n\n // FIXME: ちゃんとした実装にする(e.g., ノードに再起動毎に替わるようなIDを付与して、その一致を確認する)\n\n follower.synced = false;\n\n }\n\n updated\n\n }\n\n AppendEntriesReply { log_tail, .. } => {\n\n let leader_term = common\n\n .log()\n\n .get_record(log_tail.index)\n\n .map(|r| r.head.prev_term);\n\n follower.synced = leader_term == Some(log_tail.prev_term);\n\n if follower.synced {\n\n follower.log_tail = log_tail.index;\n\n } else {\n\n follower.log_tail = log_tail.index.as_u64().saturating_sub(1).into();\n\n }\n\n follower.synced\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "src/node_state/leader/follower.rs", "rank": 25, "score": 58024.44264087765 }, { "content": " Probability { prob: 0.05 }\n\n }\n\n\n\n /// `0.01`\n\n pub fn default_duplicate() -> Probability {\n\n Probability { prob: 0.01 }\n\n }\n\n}\n\nimpl Default for ChannelConfig {\n\n fn default() -> Self {\n\n ChannelConfig {\n\n delay: ChannelConfig::default_delay(),\n\n drop: ChannelConfig::default_drop(),\n\n duplicate: ChannelConfig::default_duplicate(),\n\n }\n\n }\n\n}\n", "file_path": "raftlog_simu/src/io/configs.rs", "rank": 26, "score": 57661.509198472544 }, { "content": " pub fn default_election_timeout() -> LogicalDuration {\n\n 1000\n\n }\n\n\n\n /// `heartbeat_interval`フィールドのデフォルト値 (`100`).\n\n pub fn default_heartbeat_interval() -> LogicalDuration {\n\n 100\n\n }\n\n}\n\nimpl Default for TimerConfig {\n\n fn default() -> Self {\n\n TimerConfig {\n\n election_timeout: TimerConfig::default_election_timeout(),\n\n heartbeat_interval: TimerConfig::default_heartbeat_interval(),\n\n }\n\n }\n\n}\n\n\n\n/// `Storage`用の構成設定.\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n", "file_path": "raftlog_simu/src/io/configs.rs", "rank": 27, "score": 57661.36362689659 }, { "content": "//! I/O関連の構成設定を集めたモジュール.\n\nuse crate::types::{LogicalDuration, Probability, Range};\n\n\n\n/// `Timer`用の構成設定.\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct TimerConfig {\n\n /// 一つの選挙期間のタイムアウト尺.\n\n ///\n\n /// リーダからのハートビートを受信しない期間が、\n\n /// ここで指定された尺を超えた場合には、\n\n /// リーダがダウンしたものと判断されて、次の選挙が始まる.\n\n #[serde(default = \"TimerConfig::default_election_timeout\")]\n\n pub election_timeout: LogicalDuration,\n\n\n\n /// リーダがハートビートを発行する間隔.\n\n #[serde(default = \"TimerConfig::default_heartbeat_interval\")]\n\n pub heartbeat_interval: LogicalDuration,\n\n}\n\nimpl TimerConfig {\n\n /// `election_timeout`フィールドのデフォルト値 (`1000`).\n", "file_path": "raftlog_simu/src/io/configs.rs", "rank": 28, "score": 57660.99409767755 }, { "content": " pub drop: Probability,\n\n\n\n /// メッセージの重複率.\n\n ///\n\n /// `1.0`なら(消失しなかった)全てのメッセージが複製される.\n\n #[serde(default = \"ChannelConfig::default_duplicate\")]\n\n pub duplicate: Probability,\n\n\n\n /// メッセージ遅延.\n\n #[serde(default = \"ChannelConfig::default_delay\")]\n\n pub delay: Range<LogicalDuration>,\n\n}\n\nimpl ChannelConfig {\n\n /// `10..50`\n\n pub fn default_delay() -> Range<LogicalDuration> {\n\n Range { min: 10, max: 50 }\n\n }\n\n\n\n /// `0.05`\n\n pub fn default_drop() -> Probability {\n", "file_path": "raftlog_simu/src/io/configs.rs", "rank": 29, "score": 57659.6748234707 }, { "content": "impl Default for StorageConfig {\n\n fn default() -> Self {\n\n StorageConfig {\n\n save_ballot_time: StorageConfig::default_save_ballot_time(),\n\n load_ballot_time: StorageConfig::default_load_ballot_time(),\n\n save_log_entry_time: StorageConfig::default_save_log_entry_time(),\n\n load_log_entry_time: StorageConfig::default_load_log_entry_time(),\n\n save_log_snapshot_time: StorageConfig::default_save_log_snapshot_time(),\n\n load_log_snapshot_time: StorageConfig::default_load_log_snapshot_time(),\n\n }\n\n }\n\n}\n\n\n\n/// 通信チャンネルの構成設定.\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct ChannelConfig {\n\n /// メッセージの消失率.\n\n ///\n\n /// `1.0`なら全てのメッセージが相手に届くことなく消失する.\n\n #[serde(default = \"ChannelConfig::default_drop\")]\n", "file_path": "raftlog_simu/src/io/configs.rs", "rank": 30, "score": 57659.40527873209 }, { "content": "\n\n /// スナップショットの保存に要する時間.\n\n #[serde(default = \"StorageConfig::default_save_log_snapshot_time\")]\n\n pub save_log_snapshot_time: Range<LogicalDuration>,\n\n\n\n /// スナップショットの読み込みに要する時間.\n\n #[serde(default = \"StorageConfig::default_load_log_snapshot_time\")]\n\n pub load_log_snapshot_time: Range<LogicalDuration>,\n\n}\n\nimpl StorageConfig {\n\n /// `1...5`\n\n pub fn default_save_ballot_time() -> Range<LogicalDuration> {\n\n Range { min: 1, max: 5 }\n\n }\n\n\n\n /// `1...5`\n\n pub fn default_load_ballot_time() -> Range<LogicalDuration> {\n\n Range { min: 1, max: 5 }\n\n }\n\n\n", "file_path": "raftlog_simu/src/io/configs.rs", "rank": 31, "score": 57658.746207539225 }, { "content": "pub struct StorageConfig {\n\n /// 投票状況の保存に要する時間.\n\n #[serde(default = \"StorageConfig::default_save_ballot_time\")]\n\n pub save_ballot_time: Range<LogicalDuration>,\n\n\n\n /// 投票状況の復元に要する時間.\n\n #[serde(default = \"StorageConfig::default_load_ballot_time\")]\n\n pub load_ballot_time: Range<LogicalDuration>,\n\n\n\n /// 個々のログエントリの保存に要する時間.\n\n ///\n\n /// 対象のエントリ数がNの場合には、時間はN倍になる.\n\n #[serde(default = \"StorageConfig::default_save_log_entry_time\")]\n\n pub save_log_entry_time: Range<LogicalDuration>,\n\n\n\n /// 個々のログエントリの読み込みに要する時間.\n\n ///\n\n /// 対象のエントリ数がNの場合には、時間はN倍になる.\n\n #[serde(default = \"StorageConfig::default_load_log_entry_time\")]\n\n pub load_log_entry_time: Range<LogicalDuration>,\n", "file_path": "raftlog_simu/src/io/configs.rs", "rank": 32, "score": 57658.67265777346 }, { "content": " /// `1...5`\n\n pub fn default_save_log_entry_time() -> Range<LogicalDuration> {\n\n Range { min: 1, max: 5 }\n\n }\n\n\n\n /// `1...5`\n\n pub fn default_load_log_entry_time() -> Range<LogicalDuration> {\n\n Range { min: 1, max: 5 }\n\n }\n\n\n\n /// `100...500`\n\n pub fn default_save_log_snapshot_time() -> Range<LogicalDuration> {\n\n Range { min: 100, max: 500 }\n\n }\n\n\n\n /// `100...500`\n\n pub fn default_load_log_snapshot_time() -> Range<LogicalDuration> {\n\n Range { min: 100, max: 500 }\n\n }\n\n}\n", "file_path": "raftlog_simu/src/io/configs.rs", "rank": 33, "score": 57655.34737015035 }, { "content": " term: common.term(),\n\n };\n\n appender.append(common, vec![noop]);\n\n\n\n Leader {\n\n followers,\n\n appender,\n\n commit_lower_bound: term_start_index,\n\n }\n\n }\n\n pub fn handle_timeout(&mut self, common: &mut Common<IO>) -> Result<NextState<IO>> {\n\n self.broadcast_empty_entries(common);\n\n Ok(None)\n\n }\n\n pub fn handle_message(\n\n &mut self,\n\n common: &mut Common<IO>,\n\n message: Message,\n\n ) -> Result<NextState<IO>> {\n\n if let Message::AppendEntriesReply(reply) = message {\n", "file_path": "src/node_state/leader/mod.rs", "rank": 34, "score": 57425.70600549238 }, { "content": "use self::appender::LogAppender;\n\nuse self::follower::FollowersManager;\n\nuse super::{Common, NextState};\n\nuse crate::election::Role;\n\nuse crate::log::{LogEntry, LogIndex, LogSuffix, ProposalId};\n\nuse crate::message::{Message, SequenceNumber};\n\nuse crate::{ErrorKind, Io, Result};\n\n\n\nmod appender;\n\nmod follower;\n\n\n\n/// 選挙で選ばれたリーダ.\n\n///\n\n/// 主に、以下のようなことを行う:\n\n///\n\n/// - フォロワーとのログ同期:\n\n/// - フォロワーのログが遅れている場合には、適宜`AppendEntriesCall`を使って差分を送信\n\n/// - リーダのログ先頭よりも遅れている場合には、`InstallSnapshotCast`を送信\n\n/// - 新規ログエントリの処理:\n\n/// - ローカルログへの追記およびフォロワーへのブロードキャスト、を行う\n", "file_path": "src/node_state/leader/mod.rs", "rank": 35, "score": 57424.408984412126 }, { "content": "/// - 過半数からの承認(ログ保存)を得られた時点で、コミット済みとなる\n\n/// - クラスタ構成変更対応:\n\n/// - 整合性を維持しながらの動的クラスタ構成変更用の処理諸々\n\n/// - e.g., join-consensusを間に挟んだ段階的な構成移行\n\n/// - 定期的なハートビートメッセージのブロードキャストによるリーダ維持\n\npub struct Leader<IO: Io> {\n\n followers: FollowersManager<IO>,\n\n appender: LogAppender<IO>,\n\n commit_lower_bound: LogIndex,\n\n}\n\nimpl<IO: Io> Leader<IO> {\n\n pub fn new(common: &mut Common<IO>) -> Self {\n\n common.set_timeout(Role::Leader);\n\n let term_start_index = common.log().tail().index;\n\n let followers = FollowersManager::new(common.config().clone());\n\n let mut appender = LogAppender::new();\n\n\n\n // 新しいリーダ選出直後に追加されるログエントリ.\n\n // 詳細は、論文の「8 Client interaction」参照.\n\n let noop = LogEntry::Noop {\n", "file_path": "src/node_state/leader/mod.rs", "rank": 36, "score": 57421.49114426277 }, { "content": " let updated = self.followers.handle_append_entries_reply(common, &reply);\n\n\n\n track!(self.followers.log_sync(common, &reply))?;\n\n\n\n if updated {\n\n track!(self.handle_committed_log(common))?;\n\n }\n\n }\n\n Ok(None)\n\n }\n\n pub fn run_once(&mut self, common: &mut Common<IO>) -> Result<NextState<IO>> {\n\n while let Some(appended) = track!(self.appender.run_once(common))? {\n\n for e in &appended.entries {\n\n if let LogEntry::Config { ref config, .. } = *e {\n\n self.followers.handle_config_updated(config);\n\n\n\n // 構成変更のタイミングによっては、\n\n // 一時的にコミット済み領域が巻き戻る可能性があるので、\n\n // それを防ぐために、下限を更新する.\n\n //\n", "file_path": "src/node_state/leader/mod.rs", "rank": 37, "score": 57420.86947310856 }, { "content": " // 新構成のメンバのローカルログが、旧構成のものに追い付いた\n\n // => 構成変更の次のフェーズに遷移\n\n let term = common.term();\n\n let config = common.config().to_next_state();\n\n let entry = LogEntry::Config { term, config };\n\n self.propose(common, entry);\n\n }\n\n Ok(())\n\n }\n\n fn next_proposal_id(&self, common: &Common<IO>) -> ProposalId {\n\n let term = common.term();\n\n let index = self.appender.unappended_log_tail(common);\n\n ProposalId { term, index }\n\n }\n\n fn broadcast_slice(&mut self, common: &mut Common<IO>, slice: LogSuffix) {\n\n self.followers\n\n .set_last_broadcast_seq_no(common.next_seq_no());\n\n common.set_timeout(Role::Leader);\n\n common.rpc_caller().broadcast_append_entries(slice);\n\n }\n", "file_path": "src/node_state/leader/mod.rs", "rank": 38, "score": 57419.1552699235 }, { "content": " }\n\n track!(self.handle_change_config(common))?;\n\n track!(self.followers.run_once(common))?;\n\n Ok(None)\n\n }\n\n pub fn propose(&mut self, common: &mut Common<IO>, entry: LogEntry) -> ProposalId {\n\n let proposal_id = self.next_proposal_id(common);\n\n self.appender.append(common, vec![entry]);\n\n proposal_id\n\n }\n\n pub fn heartbeat_syn(&mut self, common: &mut Common<IO>) -> SequenceNumber {\n\n let seq_no = common.next_seq_no();\n\n self.broadcast_empty_entries(common);\n\n seq_no\n\n }\n\n pub fn proposal_queue_len(&self, common: &Common<IO>) -> usize {\n\n self.appender.unappended_log_tail(common) - common.log().tail().index\n\n }\n\n pub fn last_heartbeat_ack(&self) -> SequenceNumber {\n\n self.followers.latest_hearbeat_ack()\n", "file_path": "src/node_state/leader/mod.rs", "rank": 39, "score": 57417.46443358406 }, { "content": " }\n\n\n\n fn handle_change_config(&mut self, common: &mut Common<IO>) -> Result<()> {\n\n if common.config().state().is_stable() {\n\n return Ok(());\n\n }\n\n\n\n if self.appender.is_busy() {\n\n // 前回の構成変更用に追記したログエントリがまだ処理されていない可能性がある\n\n return Ok(());\n\n }\n\n\n\n let committed = self.followers.committed_log_tail();\n\n if committed < common.log().last_record().head.index {\n\n // まだ新構成がコミットされていない可能性がある\n\n return Ok(());\n\n }\n\n\n\n let joint_committed = self.followers.joint_committed_log_tail();\n\n if joint_committed == committed {\n", "file_path": "src/node_state/leader/mod.rs", "rank": 40, "score": 57415.14404024563 }, { "content": " fn broadcast_empty_entries(&mut self, common: &mut Common<IO>) {\n\n let head = common.log().tail();\n\n let entries = Vec::new();\n\n let slice = LogSuffix { head, entries };\n\n self.broadcast_slice(common, slice);\n\n }\n\n fn handle_committed_log(&mut self, common: &mut Common<IO>) -> Result<()> {\n\n let committed = self.followers.committed_log_tail();\n\n if committed < self.commit_lower_bound {\n\n // コミット済みのログ領域でも、現在のtermよりも前に追加されたものはまだコミットできない.\n\n // 詳細は論文の「5.4.2 Committing entries from previous terms」を参照のこと.\n\n return Ok(());\n\n }\n\n\n\n let old = common.log().committed_tail();\n\n if old.index == committed {\n\n // 未処理していないコミット済みログ領域は無い.\n\n return Ok(());\n\n }\n\n track_assert!(\n", "file_path": "src/node_state/leader/mod.rs", "rank": 41, "score": 57409.25657579174 }, { "content": " old.index < committed,\n\n ErrorKind::InconsistentState,\n\n \"old={:?}, committed={:?}\",\n\n old,\n\n committed\n\n );\n\n\n\n // 履歴に新しいコミット済み領域を記録する.\n\n // 新規コミット済み領域の処理は`Common::run_once`関数の中で行われる.\n\n track!(common.handle_log_committed(committed))?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/node_state/leader/mod.rs", "rank": 42, "score": 57403.13685803951 }, { "content": " // e.g., `Joint構成への遷移時に巻き戻りが発生するケース\n\n //\n\n // - 1. 新旧のコミット地点が一致したので`Joint`に遷移\n\n // - 2. `Joint`用の構成をリーダのローカルログに追記開始\n\n // - 3. 追記が完了する前に、旧構成のコミット地点のみが進む\n\n // - 4. `Joint`構成の追記が完了 => 以後はJoint合意が使用される\n\n // - 5. この時点で新構成のコミット地点の方が遅れているので、\n\n // Joint合意を取ると最悪で、1の地点までコミットが巻き戻る\n\n //\n\n // 上のケース以外にも、複数の構成変更が並行して実行された場合にも、\n\n // 一時的な巻き戻りが発生する可能性がある.\n\n if self.commit_lower_bound < common.log().committed_tail().index {\n\n // NOTE:\n\n // `commit_lower_bound`は、`Term`の開始地点を記録するためにも使っているため、\n\n // 上の条件式が必要.\n\n self.commit_lower_bound = common.log().committed_tail().index;\n\n }\n\n }\n\n }\n\n self.broadcast_slice(common, appended);\n", "file_path": "src/node_state/leader/mod.rs", "rank": 43, "score": 57399.16737011383 }, { "content": "use futures::{Async, Future, Poll};\n\nuse std::collections::VecDeque;\n\n\n\nuse self::rpc_builder::{RpcCallee, RpcCaller};\n\nuse super::candidate::Candidate;\n\nuse super::follower::Follower;\n\nuse super::leader::Leader;\n\nuse super::{NextState, RoleState};\n\nuse crate::cluster::ClusterConfig;\n\nuse crate::election::{Ballot, Role, Term};\n\nuse crate::log::{Log, LogHistory, LogIndex, LogPosition, LogPrefix, LogSuffix};\n\nuse crate::message::{Message, MessageHeader, SequenceNumber};\n\nuse crate::metrics::NodeStateMetrics;\n\nuse crate::node::{Node, NodeId};\n\nuse crate::{Error, ErrorKind, Event, Io, Result};\n\n\n\nmod rpc_builder;\n\n\n\n/// 全ての状態に共通する処理をまとめた構造体.\n\npub struct Common<IO: Io> {\n", "file_path": "src/node_state/common/mod.rs", "rank": 44, "score": 57237.09832725095 }, { "content": " pub fn set_timeout(&mut self, role: Role) {\n\n self.timeout = self.io.create_timeout(role);\n\n }\n\n\n\n /// タイムアウトに達していないかを確認する.\n\n pub fn poll_timeout(&mut self) -> Result<Async<()>> {\n\n track!(self.timeout.poll())\n\n }\n\n\n\n /// ユーザに通知するイベントがある場合には、それを返す.\n\n pub fn next_event(&mut self) -> Option<Event> {\n\n self.metrics.event_queue_len.decrement();\n\n self.events.pop_front()\n\n }\n\n\n\n /// 受信メッセージがある場合には、それを返す.\n\n pub fn try_recv_message(&mut self) -> Result<Option<Message>> {\n\n if let Some(message) = self.unread_message.take() {\n\n Ok(Some(message))\n\n } else {\n", "file_path": "src/node_state/common/mod.rs", "rank": 45, "score": 57232.48685915258 }, { "content": " // * FollowerDelete(Followerのsubstate)\n\n log_is_being_deleted: bool,\n\n}\n\nimpl<IO> Common<IO>\n\nwhere\n\n IO: Io,\n\n{\n\n /// 新しい`Common`インスタンスを生成する.\n\n pub fn new(\n\n node_id: NodeId,\n\n mut io: IO,\n\n config: ClusterConfig,\n\n metrics: NodeStateMetrics,\n\n ) -> Self {\n\n // 最初は(仮に)フォロワーだとしておく\n\n let timeout = io.create_timeout(Role::Follower);\n\n Common {\n\n local_node: Node::new(node_id),\n\n io,\n\n history: LogHistory::new(config),\n", "file_path": "src/node_state/common/mod.rs", "rank": 46, "score": 57228.70933558046 }, { "content": " // そのスナップショットのロードが行われるまでの間には、上の条件が`false`になる可能性がある.\n\n track!(self.history.record_consumed(new_tail.index))?;\n\n }\n\n Ok(())\n\n }\n\n fn set_role(&mut self, new_role: Role) {\n\n if self.local_node.role != new_role {\n\n self.local_node.role = new_role;\n\n self.events.push_back(Event::RoleChanged { new_role });\n\n }\n\n }\n\n fn is_following_sender(&self, message: &Message) -> bool {\n\n self.local_node.ballot.voted_for == message.header().sender\n\n }\n\n}\n\n\n\npub enum HandleMessageResult<IO: Io> {\n\n Handled(Option<RoleState<IO>>),\n\n Unhandled(Message),\n\n}\n\n\n\n#[derive(Debug, Clone)]\n", "file_path": "src/node_state/common/mod.rs", "rank": 47, "score": 57224.151634763264 }, { "content": " track!(self.io.try_recv_message())\n\n }\n\n }\n\n\n\n /// ローカルログのスナップショットのインストールを開始する.\n\n pub fn install_snapshot(&mut self, snapshot: LogPrefix) -> Result<()> {\n\n track_assert!(\n\n self.history.head().index <= snapshot.tail.index,\n\n ErrorKind::InconsistentState\n\n );\n\n track_assert!(self.install_snapshot.is_none(), ErrorKind::Busy);\n\n\n\n let future = InstallSnapshot::new(self, snapshot);\n\n self.install_snapshot = Some(future);\n\n Ok(())\n\n }\n\n\n\n /// 受信メッセージに対する共通的な処理を実行する.\n\n pub fn handle_message(&mut self, message: Message) -> HandleMessageResult<IO> {\n\n if self.local_node.role == Role::Leader\n", "file_path": "src/node_state/common/mod.rs", "rank": 48, "score": 57223.090954929794 }, { "content": " self.set_ballot(new_ballot);\n\n self.set_role(Role::Candidate);\n\n RoleState::Candidate(Candidate::new(self))\n\n }\n\n\n\n /// `Follower`状態に遷移する.\n\n pub fn transit_to_follower(\n\n &mut self,\n\n followee: NodeId,\n\n pending_vote: Option<MessageHeader>,\n\n ) -> RoleState<IO> {\n\n self.metrics.transit_to_follower_total.increment();\n\n let new_ballot = Ballot {\n\n term: self.local_node.ballot.term,\n\n voted_for: followee,\n\n };\n\n self.set_ballot(new_ballot);\n\n self.set_role(Role::Follower);\n\n self.notify_new_leader_elected();\n\n RoleState::Follower(Follower::new(self, pending_vote))\n", "file_path": "src/node_state/common/mod.rs", "rank": 49, "score": 57222.98100245178 }, { "content": " return self.log().tail().index < snapshot.summary.tail.index;\n\n }\n\n false\n\n }\n\n\n\n /// `Leader`状態に遷移する.\n\n pub fn transit_to_leader(&mut self) -> RoleState<IO> {\n\n self.metrics.transit_to_leader_total.increment();\n\n self.set_role(Role::Leader);\n\n self.notify_new_leader_elected();\n\n RoleState::Leader(Leader::new(self))\n\n }\n\n\n\n /// `Candidate`状態に遷移する.\n\n pub fn transit_to_candidate(&mut self) -> RoleState<IO> {\n\n self.metrics.transit_to_candidate_total.increment();\n\n let new_ballot = Ballot {\n\n term: (self.local_node.ballot.term.as_u64() + 1).into(),\n\n voted_for: self.local_node.id.clone(),\n\n };\n", "file_path": "src/node_state/common/mod.rs", "rank": 50, "score": 57222.70058097575 }, { "content": " * 最初の AE-call を受け取ってから開始するので、\n\n * T になった\"以降\"も、一度もlogに対する変更は行われていない。\n\n */\n\n debug_assert!(!self.log_is_being_deleted);\n\n\n\n // リーダが確定したので、フォロー先を変更する\n\n let leader = message.header().sender.clone();\n\n self.unread_message = Some(message);\n\n let next = self.transit_to_follower(leader, None);\n\n HandleMessageResult::Handled(Some(next))\n\n }\n\n _ => HandleMessageResult::Unhandled(message), // 個別のロールに処理を任せる\n\n }\n\n }\n\n }\n\n\n\n /// バックグランド処理を一単位実行する.\n\n pub fn run_once(&mut self) -> Result<NextState<IO>> {\n\n loop {\n\n // スナップショットのインストール処理\n", "file_path": "src/node_state/common/mod.rs", "rank": 51, "score": 57222.15232197213 }, { "content": " let leader = message.header().sender.clone();\n\n self.unread_message = Some(message);\n\n self.transit_to_follower(leader, None)\n\n } else if self.local_node.role == Role::Leader {\n\n self.transit_to_candidate()\n\n } else {\n\n let local = self.local_node.id.clone();\n\n self.transit_to_follower(local, None)\n\n };\n\n HandleMessageResult::Handled(Some(next_state))\n\n } else if message.header().term < self.local_node.ballot.term {\n\n // c) 自分のtermの方が大きい => 選挙期間が古くなっていることを送信元の通知\n\n\n\n // NOTE: 返信メッセージの中身は重要ではないので、一番害の無さそうなものを送っておく\n\n self.rpc_callee(message.header()).reply_request_vote(false);\n\n HandleMessageResult::Handled(None)\n\n } else {\n\n // d) 同じ選挙期間に属するノードからのメッセージ\n\n match message {\n\n Message::RequestVoteCall { .. } if !self.is_following_sender(&message) => {\n", "file_path": "src/node_state/common/mod.rs", "rank": 52, "score": 57221.69103105065 }, { "content": " }\n\n\n\n /// 新しいリーダーが選出されたことを通知する.\n\n pub fn notify_new_leader_elected(&mut self) {\n\n self.events.push_back(Event::NewLeaderElected);\n\n }\n\n\n\n /// 次のメッセージ送信に使用されるシーケンス番号を返す.\n\n ///\n\n /// このメソッド自体は単に値を返すのみであり、\n\n /// 番号のインクリメントを行うことはない.\n\n pub fn next_seq_no(&self) -> SequenceNumber {\n\n self.seq_no\n\n }\n\n\n\n /// `IO`への参照を返す.\n\n pub fn io(&self) -> &IO {\n\n &self.io\n\n }\n\n\n", "file_path": "src/node_state/common/mod.rs", "rank": 53, "score": 57221.0145405834 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use prometrics::metrics::MetricBuilder;\n\n use trackable::result::TestResult;\n\n\n\n use crate::log::{LogEntry, LogPrefix};\n\n use crate::metrics::NodeStateMetrics;\n\n use crate::test_util::tests::TestIoBuilder;\n\n\n\n #[test]\n\n fn is_snapshot_installing_works() -> TestResult {\n\n let node_id: NodeId = \"node1\".into();\n\n let metrics = track!(NodeStateMetrics::new(&MetricBuilder::new()))?;\n\n let io = TestIoBuilder::new()\n\n .add_member(node_id.clone())\n\n .add_member(\"node2\".into())\n\n .add_member(\"node3\".into())\n", "file_path": "src/node_state/common/mod.rs", "rank": 54, "score": 57215.92866989018 }, { "content": " .finish();\n\n let cluster = io.cluster.clone();\n\n let mut common = Common::new(node_id, io, cluster.clone(), metrics);\n\n let prefix = LogPrefix {\n\n tail: LogPosition::default(),\n\n config: cluster,\n\n snapshot: Vec::default(),\n\n };\n\n\n\n assert!(!common.is_snapshot_installing());\n\n common.install_snapshot(prefix)?;\n\n assert!(common.is_snapshot_installing());\n\n\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn is_focusing_on_installing_snapshot_works() -> TestResult {\n\n let node_id: NodeId = \"node1\".into();\n\n let metrics = track!(NodeStateMetrics::new(&MetricBuilder::new()))?;\n", "file_path": "src/node_state/common/mod.rs", "rank": 55, "score": 57215.759213475096 }, { "content": " pub fn rpc_callee<'a>(&'a mut self, caller: &'a MessageHeader) -> RpcCallee<IO> {\n\n RpcCallee::new(self, caller)\n\n }\n\n\n\n /// ストレージにあるlogに対する削除処理の開始・終了を管理する。\n\n pub fn set_if_log_is_being_deleted(&mut self, deleting: bool) {\n\n self.log_is_being_deleted = deleting;\n\n }\n\n\n\n fn handle_committed(&mut self, suffix: LogSuffix) -> Result<()> {\n\n let new_tail = suffix.tail();\n\n for (index, entry) in (suffix.head.index.as_u64()..)\n\n .map(LogIndex::new)\n\n .zip(suffix.entries.into_iter())\n\n {\n\n let event = Event::Committed { index, entry };\n\n self.events.push_back(event);\n\n }\n\n if new_tail.index >= self.log().head().index {\n\n // 「ローカルログの終端よりも先の地点のスナップショット」をインストールした後、\n", "file_path": "src/node_state/common/mod.rs", "rank": 56, "score": 57214.5635053075 }, { "content": " unread_message: None,\n\n seq_no: SequenceNumber::new(0),\n\n timeout,\n\n events: VecDeque::new(),\n\n load_committed: None,\n\n install_snapshot: None,\n\n metrics,\n\n log_is_being_deleted: false,\n\n }\n\n }\n\n\n\n /// 現在のクラスタの構成情報を返す.\n\n pub fn config(&self) -> &ClusterConfig {\n\n self.history.config()\n\n }\n\n\n\n /// ローカルログ(の歴史)を返す.\n\n pub fn log(&self) -> &LogHistory {\n\n &self.history\n\n }\n", "file_path": "src/node_state/common/mod.rs", "rank": 57, "score": 57214.418439526686 }, { "content": " local_node: Node,\n\n history: LogHistory,\n\n timeout: IO::Timeout,\n\n events: VecDeque<Event>,\n\n io: IO,\n\n unread_message: Option<Message>,\n\n seq_no: SequenceNumber,\n\n load_committed: Option<IO::LoadLog>,\n\n install_snapshot: Option<InstallSnapshot<IO>>,\n\n metrics: NodeStateMetrics,\n\n\n\n // ストレージ中のlogに対する削除処理が\n\n // 進行中であるかどうかを表すフラグ。\n\n //\n\n // このフラグが true である場合は\n\n // ストレージ中のlogと\n\n // メモリ中のcacheに相当する`history`とでズレが生じている。\n\n // false である場合は、2つは一致している。\n\n //\n\n // 削除処理を行う箇所:\n", "file_path": "src/node_state/common/mod.rs", "rank": 58, "score": 57214.20900638116 }, { "content": " ///\n\n /// このメソッドが`true`を返している間は、\n\n /// 新しいスナップショットのインストールを行うことはできない.\n\n pub fn is_snapshot_installing(&self) -> bool {\n\n self.install_snapshot.is_some()\n\n }\n\n\n\n /// Returns `true` if and only if a node is installing snapshot and should not do\n\n /// anything else until the running snapshot installation completes.\n\n /// This method should be used to determine the next state of a node.\n\n ///\n\n /// The difference between `is_snapshot_installing` and `is_focusing_on_installing_snapshot` is\n\n /// that a node can concurrently process multiple tasks while installing snapshot.\n\n ///\n\n /// Calls `is_snapshot_installing` if you want to confirm whether another snapshot installation\n\n /// is running or not.\n\n pub fn is_focusing_on_installing_snapshot(&self) -> bool {\n\n if let Some(ref snapshot) = self.install_snapshot {\n\n // This condition is a bit complicated.\n\n // See https://github.com/frugalos/raftlog/pull/16#discussion_r250061583.\n", "file_path": "src/node_state/common/mod.rs", "rank": 59, "score": 57213.18070240559 }, { "content": "\n\n /// ログのコミットイベントを処理する.\n\n pub fn handle_log_committed(&mut self, new_tail: LogIndex) -> Result<()> {\n\n track!(self.history.record_committed(new_tail))\n\n }\n\n\n\n /// ローカルログのロールバックイベントを処理する.\n\n pub fn handle_log_rollbacked(&mut self, new_tail: LogPosition) -> Result<()> {\n\n track!(self.history.record_rollback(new_tail))\n\n }\n\n\n\n /// ログのスナップショットインストール完了イベントを処理する.\n\n pub fn handle_log_snapshot_installed(\n\n &mut self,\n\n new_head: LogPosition,\n\n config: ClusterConfig,\n\n ) -> Result<()> {\n\n track!(self.history.record_snapshot_installed(new_head, config))\n\n }\n\n\n", "file_path": "src/node_state/common/mod.rs", "rank": 60, "score": 57212.76831112967 }, { "content": " track!(self.history.record_snapshot_loaded(&prefix))?;\n\n let event = Event::SnapshotLoaded {\n\n new_head: prefix.tail,\n\n snapshot: prefix.snapshot,\n\n };\n\n self.metrics.event_queue_len.increment();\n\n self.events.push_back(event);\n\n Ok(())\n\n }\n\n\n\n /// ローカルノードの投票状況を更新する.\n\n pub fn set_ballot(&mut self, new_ballot: Ballot) {\n\n if self.local_node.ballot != new_ballot {\n\n self.local_node.ballot = new_ballot.clone();\n\n self.metrics.event_queue_len.increment();\n\n self.events.push_back(Event::TermChanged { new_ballot });\n\n }\n\n }\n\n\n\n /// スナップショットをインストール中の場合には`true`を返す.\n", "file_path": "src/node_state/common/mod.rs", "rank": 61, "score": 57212.59613209462 }, { "content": " && !self.config().is_known_node(&message.header().sender)\n\n {\n\n // a) リーダは、不明なノードからのメッセージは無視\n\n //\n\n // リーダ以外は、クラスタの構成変更を跨いで再起動が発生した場合に、\n\n // 停止時には知らなかった新構成を把握するために、\n\n // 不明なノードからもメッセージも受信する必要がある.\n\n HandleMessageResult::Handled(None)\n\n } else if message.header().term > self.local_node.ballot.term {\n\n // b) 相手のtermの方が大きい => 新しい選挙が始まっているので追従する\n\n let is_follower = self.local_node.ballot.voted_for != self.local_node.id;\n\n if is_follower && self.local_node.ballot.voted_for != message.header().sender {\n\n // リーダをフォロー中(i.e., 定期的にハートビートを受信できている)の場合には、\n\n // そのリーダを信じて、現在の選挙を維持する.\n\n //\n\n // これはクラスタ構成変更時に、旧構成のメンバによって、延々と新選挙の開始が繰り返されてしまう\n\n // 可能性がある問題への対処となる.\n\n // この問題の詳細は論文の「6 Cluster membership changes」の\"The third issue is ...\"部分を参照のこと.\n\n return HandleMessageResult::Handled(None);\n\n }\n", "file_path": "src/node_state/common/mod.rs", "rank": 62, "score": 57211.57792565879 }, { "content": " /// `IO`への破壊的な参照を返す.\n\n ///\n\n /// 使い方を間違えるとデータの整合性を破壊してしまう可能性があるので、\n\n /// 注意を喚起する意味を込めて`unsafe`とする.\n\n pub unsafe fn io_mut(&mut self) -> &mut IO {\n\n &mut self.io\n\n }\n\n\n\n /// 自身の所有権を放棄して、\n\n /// 代わりに`IO`のインスタンスを返す.\n\n pub fn release_io(self) -> IO {\n\n self.io\n\n }\n\n\n\n /// 指定範囲のローカルログをロードする.\n\n pub fn load_log(&mut self, start: LogIndex, end: Option<LogIndex>) -> IO::LoadLog {\n\n self.io.load_log(start, end)\n\n }\n\n\n\n /// `from`以降のsuffixエントリ [from..) を削除する\n", "file_path": "src/node_state/common/mod.rs", "rank": 63, "score": 57211.53164499063 }, { "content": " let io = TestIoBuilder::new()\n\n .add_member(node_id.clone())\n\n .add_member(\"node2\".into())\n\n .add_member(\"node3\".into())\n\n .finish();\n\n let cluster = io.cluster.clone();\n\n let mut common = Common::new(node_id, io, cluster.clone(), metrics);\n\n let prev_term = Term::new(0);\n\n let node_prefix = LogPrefix {\n\n tail: LogPosition {\n\n prev_term,\n\n index: LogIndex::new(3),\n\n },\n\n config: cluster.clone(),\n\n snapshot: vec![0],\n\n };\n\n let log_suffix = LogSuffix {\n\n head: LogPosition {\n\n prev_term,\n\n index: LogIndex::new(3),\n", "file_path": "src/node_state/common/mod.rs", "rank": 64, "score": 57211.1126137158 }, { "content": " pub fn delete_suffix_from(&mut self, from: LogIndex) -> IO::DeleteLog {\n\n self.io.delete_suffix_from(from)\n\n }\n\n\n\n /// ローカルログの末尾部分に`suffix`を追記する.\n\n pub fn save_log_suffix(&mut self, suffix: &LogSuffix) -> IO::SaveLog {\n\n self.io.save_log_suffix(suffix)\n\n }\n\n\n\n /// 現在の投票状況を保存する.\n\n pub fn save_ballot(&mut self) -> IO::SaveBallot {\n\n self.io.save_ballot(self.local_node.ballot.clone())\n\n }\n\n\n\n /// 以前の投票状況を復元する.\n\n pub fn load_ballot(&mut self) -> IO::LoadBallot {\n\n self.io.load_ballot()\n\n }\n\n\n\n /// 指定されたロール用のタイムアウトを設定する.\n", "file_path": "src/node_state/common/mod.rs", "rank": 65, "score": 57210.699202917385 }, { "content": "\n\n // このif文以降の計算では、historyに基づき状態を遷移させる。\n\n // 一方で、ストレージ上のlogと食い違ったhistoryで遷移を行うと問題が生じるため、\n\n // それを阻止するべく、logに対する削除中の場合には何もしない。\n\n if self.log_is_being_deleted {\n\n return HandleMessageResult::Handled(None);\n\n }\n\n\n\n self.local_node.ballot.term = message.header().term;\n\n let next_state = if let Message::RequestVoteCall(m) = message {\n\n if m.log_tail.is_newer_or_equal_than(self.history.tail()) {\n\n // 送信者(候補者)のログは十分に新しいので、その人を支持する\n\n let candidate = m.header.sender.clone();\n\n self.transit_to_follower(candidate, Some(m.header))\n\n } else {\n\n // ローカルログの方が新しいので、自分で立候補する\n\n self.transit_to_candidate()\n\n }\n\n } else if let Message::AppendEntriesCall { .. } = message {\n\n // 新リーダが当選していたので、その人のフォロワーとなる\n", "file_path": "src/node_state/common/mod.rs", "rank": 66, "score": 57210.49418523717 }, { "content": " if let Async::Ready(Some(summary)) = track!(self.install_snapshot.poll())? {\n\n let SnapshotSummary {\n\n tail: new_head,\n\n config,\n\n } = summary;\n\n self.install_snapshot = None;\n\n self.events.push_back(Event::SnapshotInstalled { new_head });\n\n track!(self.history.record_snapshot_installed(new_head, config))?;\n\n }\n\n\n\n // コミット済みログの処理.\n\n if let Async::Ready(Some(log)) = track!(self.load_committed.poll())? {\n\n // コミット済みのログを取得したので、ユーザに(イベント経由で)通知する.\n\n self.load_committed = None;\n\n match log {\n\n Log::Prefix(snapshot) => track!(self.handle_log_snapshot_loaded(snapshot))?,\n\n Log::Suffix(slice) => track!(self.handle_committed(slice))?,\n\n }\n\n }\n\n\n", "file_path": "src/node_state/common/mod.rs", "rank": 67, "score": 57209.984655994354 }, { "content": " /// ログのスナップショットロードイベントを処理する.\n\n pub fn handle_log_snapshot_loaded(&mut self, prefix: LogPrefix) -> Result<()> {\n\n if self.history.committed_tail().index < prefix.tail.index {\n\n // タイミング次第では、進行中のスナップショットインストールを追い越して、\n\n // ロードが発生してしまうことがあるので、その場合でも`LogHistory`の整合性が崩れないように、\n\n // 先にインストールが完了したものとして処理してしまう.\n\n // (`consumed_tail.index <= committed_tail.index`の不変項を維持するため)\n\n //\n\n // NOTE: \"タイミング次第\"の例\n\n // - 1. インストールが物理的には完了\n\n // - スナップショット地点以前のログは削除された\n\n // - raftlog層への通知はまだ\n\n // - 2. スナップショット地点以前へのロード要求が発行された\n\n // - 3. ログは残っていないので、1のスナップショットをロードする\n\n // => このメソッドに入ってくる\n\n // - 4. インストール完了が通知される\n\n track!(self\n\n .history\n\n .record_snapshot_installed(prefix.tail, prefix.config.clone(),))?;\n\n }\n", "file_path": "src/node_state/common/mod.rs", "rank": 68, "score": 57207.76662120826 }, { "content": "\n\n /// ローカルログのコミット済み領域の終端位置を返す.\n\n pub fn log_committed_tail(&self) -> LogPosition {\n\n self.history.committed_tail()\n\n }\n\n\n\n /// 現在の`Term` (選挙番号) を返す.\n\n pub fn term(&self) -> Term {\n\n self.local_node.ballot.term\n\n }\n\n\n\n /// ローカルノードの情報を返す.\n\n pub fn local_node(&self) -> &Node {\n\n &self.local_node\n\n }\n\n\n\n /// ローカルログへの追記イベントを処理する.\n\n pub fn handle_log_appended(&mut self, suffix: &LogSuffix) -> Result<()> {\n\n track!(self.history.record_appended(suffix))\n\n }\n", "file_path": "src/node_state/common/mod.rs", "rank": 69, "score": 57207.59698100298 }, { "content": " index: LogIndex::new(5),\n\n },\n\n config: cluster,\n\n snapshot: vec![1],\n\n };\n\n\n\n assert!(!common.is_focusing_on_installing_snapshot());\n\n // Applies a prefix before tests.\n\n common.handle_log_snapshot_loaded(node_prefix)?;\n\n common.install_snapshot(leader_prefix)?;\n\n // The node is installing a snapshot and focusing on the installation.\n\n assert!(common.is_focusing_on_installing_snapshot());\n\n // Appends new log entries.\n\n // Now `committed_tail` < `the tail of a prefix(snapshot)` < `appended_tail`\n\n common.handle_log_appended(&log_suffix)?;\n\n assert_eq!(\n\n common.log().tail(),\n\n LogPosition {\n\n prev_term,\n\n index: LogIndex::new(6)\n\n }\n\n );\n\n // The node is not focusing on the installation.\n\n assert!(!common.is_focusing_on_installing_snapshot());\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/node_state/common/mod.rs", "rank": 70, "score": 57206.654237490766 }, { "content": " // 別の人をフォロー中に投票依頼が来た場合ので拒否\n\n self.rpc_callee(message.header()).reply_request_vote(false);\n\n HandleMessageResult::Handled(None)\n\n }\n\n Message::AppendEntriesCall { .. } if !self.is_following_sender(&message) => {\n\n /*\n\n * この節に入るときには削除処理中ではない。なぜなら……\n\n *\n\n * 1. 自分と同じTerm Tからメッセージが届き\n\n * かつ、それがAppendEntriesCall (AE-call) であるということは\n\n * そのメッセージの送り主 N が T のリーダーである。\n\n *\n\n * 2. 非リーダーである自分については、\n\n * いま T にいる以上、Term S (S < T) から遷移してTになっている。\n\n * logに対する処理中でhistoryとズレている場合は遷移を遅延させるので、\n\n * T になった\"時点\"では、logに対する変更は行われていない。\n\n *\n\n * 3. S から T になって以降は AE-call は受け取っていない。\n\n * (受け取っているなら N をfollowしている筈なので矛盾)\n\n * Term T での主な計算(logへの変更も含む)は\n", "file_path": "src/node_state/common/mod.rs", "rank": 71, "score": 57205.46351657166 }, { "content": " if self.load_committed.is_some()\n\n || self.history.consumed_tail().index == self.history.committed_tail().index\n\n {\n\n // コミット済みのログの読み込み中 or 未処理のコミット済みログ領域がない\n\n break;\n\n }\n\n\n\n let start = self.history.consumed_tail().index;\n\n let end = self.history.committed_tail().index;\n\n self.load_committed = Some(self.load_log(start, Some(end)));\n\n }\n\n Ok(None)\n\n }\n\n\n\n /// RPCの要求用のインスタンスを返す.\n\n pub fn rpc_caller(&mut self) -> RpcCaller<IO> {\n\n RpcCaller::new(self)\n\n }\n\n\n\n /// RPCの応答用のインスタンスを返す.\n", "file_path": "src/node_state/common/mod.rs", "rank": 72, "score": 57204.544138740996 }, { "content": " },\n\n entries: vec![\n\n LogEntry::Command {\n\n term: prev_term,\n\n command: Vec::default(),\n\n },\n\n LogEntry::Command {\n\n term: prev_term,\n\n command: Vec::default(),\n\n },\n\n LogEntry::Command {\n\n term: prev_term,\n\n command: Vec::default(),\n\n },\n\n ],\n\n };\n\n // The prefix of a leader is a bit ahead.\n\n let leader_prefix = LogPrefix {\n\n tail: LogPosition {\n\n prev_term,\n", "file_path": "src/node_state/common/mod.rs", "rank": 73, "score": 57202.245635387844 }, { "content": " // follower/delete.rs にある\n\n // delete_test_scenario1 でプログラムが異常終了する。\n\n // 詳しくは当該テストを参考のこと。\n\n Ok(None)\n\n }\n\n _ => Ok(Some(common.transit_to_candidate())),\n\n }\n\n }\n\n pub fn handle_message(\n\n &mut self,\n\n common: &mut Common<IO>,\n\n message: Message,\n\n ) -> Result<NextState<IO>> {\n\n if let Message::AppendEntriesCall { .. } = message {\n\n common.set_timeout(Role::Follower);\n\n if unsafe { common.io_mut().is_busy() } {\n\n common.rpc_callee(message.header()).reply_busy();\n\n return Ok(None);\n\n }\n\n }\n", "file_path": "src/node_state/follower/mod.rs", "rank": 74, "score": 57156.93820098478 }, { "content": "use self::append::FollowerAppend;\n\nuse self::delete::FollowerDelete;\n\nuse self::idle::FollowerIdle;\n\nuse self::init::FollowerInit;\n\nuse self::snapshot::FollowerSnapshot;\n\nuse super::{Common, NextState};\n\nuse crate::election::Role;\n\nuse crate::message::{Message, MessageHeader};\n\nuse crate::{Io, Result};\n\n\n\nmod append;\n\nmod delete;\n\nmod idle;\n\nmod init;\n\nmod snapshot;\n\n\n\n/// 別の人(ノード)に投票しているフォロワー.\n\n///\n\n/// リーダーから送られてきたメッセージを処理して、ログの同期を行う.\n\n///\n", "file_path": "src/node_state/follower/mod.rs", "rank": 75, "score": 57155.33931529029 }, { "content": "\n\n match *self {\n\n Follower::Init(ref mut t) => track!(t.handle_message(common, message)),\n\n Follower::Idle(ref mut t) => track!(t.handle_message(common, message)),\n\n Follower::Append(ref mut t) => track!(t.handle_message(common, message)),\n\n Follower::Snapshot(ref mut t) => track!(t.handle_message(common, message)),\n\n Follower::Delete(ref mut t) => track!(t.handle_message(common, message)),\n\n }\n\n }\n\n pub fn run_once(&mut self, common: &mut Common<IO>) -> Result<NextState<IO>> {\n\n match *self {\n\n Follower::Init(ref mut t) => track!(t.run_once(common)),\n\n Follower::Idle(_) => Ok(None),\n\n Follower::Append(ref mut t) => track!(t.run_once(common)),\n\n Follower::Snapshot(ref mut t) => track!(t.run_once(common)),\n\n Follower::Delete(ref mut t) => track!(t.run_once(common)),\n\n }\n\n }\n\n}\n", "file_path": "src/node_state/follower/mod.rs", "rank": 76, "score": 57154.15720240159 }, { "content": "/// タイムアウト時間内にリーダからメッセージを受信しなかった場合には、\n\n/// その選挙期間は完了したものと判断して、自身が立候補して次の選挙を始める.\n\npub enum Follower<IO: Io> {\n\n /// 初期化状態 (主に投票状況の保存を行う).\n\n Init(FollowerInit<IO>),\n\n\n\n /// リーダからのメッセージ処理が可能な状態.\n\n Idle(FollowerIdle<IO>),\n\n\n\n /// ローカルログへの追記中.\n\n Append(FollowerAppend<IO>),\n\n\n\n /// ローカルログへのスナップショット保存中.\n\n Snapshot(FollowerSnapshot<IO>),\n\n\n\n /// ローカルログの末尾部分を削除中\n\n Delete(FollowerDelete<IO>),\n\n}\n\nimpl<IO: Io> Follower<IO> {\n\n pub fn new(common: &mut Common<IO>, pending_vote: Option<MessageHeader>) -> Self {\n", "file_path": "src/node_state/follower/mod.rs", "rank": 77, "score": 57149.824562775175 }, { "content": " common.set_timeout(Role::Follower);\n\n let follower = FollowerInit::new(common, pending_vote);\n\n Follower::Init(follower)\n\n }\n\n pub fn handle_timeout(&mut self, common: &mut Common<IO>) -> Result<NextState<IO>> {\n\n match self {\n\n Follower::Delete(delete) => {\n\n // Delete中にタイムアウトしたことを記録する。\n\n // これによって削除完了後にはcandidateに遷移するようになる。\n\n //\n\n // * IMPORTANT REMARK *\n\n // 削除後にcandidateに遷移する振る舞いにしているのは\n\n // `Io`トレイではタイマーに周期性を要求していないからである。\n\n // もし非周期的なタイマー(一度だけ発火するタイマー)が使われている場合に、\n\n // かつ、このような遷移処理を行わない場合では、\n\n // 極端な状況で全員がFollowerになりクラスタが硬直する。\n\n delete.set_timeout();\n\n\n\n // Delete中はタイムアウトしても削除処理を続行する。\n\n // もしタイムアウトによってキャンセルした場合は\n", "file_path": "src/node_state/follower/mod.rs", "rank": 78, "score": 57148.50225889294 }, { "content": "//! シミュレータ用のI/O実装.\n\npub use self::deterministic::{DeterministicIo, DeterministicIoBuilder};\n\npub use self::storage::Storage;\n\npub use self::timer::Timer;\n\npub use self::transport::MessageBroker;\n\n\n\npub mod configs;\n\npub mod futures;\n\n\n\nmod deterministic;\n\nmod storage;\n\nmod timer;\n\nmod transport;\n\n\n\n/// I/O関連の構成設定.\n\n#[derive(Debug, Default, Clone, Serialize, Deserialize)]\n\npub struct IoConfig {\n\n #[serde(default)]\n\n pub channel: configs::ChannelConfig,\n\n\n\n #[serde(default)]\n\n pub storage: configs::StorageConfig,\n\n\n\n #[serde(default)]\n\n pub timer: configs::TimerConfig,\n\n}\n", "file_path": "raftlog_simu/src/io/mod.rs", "rank": 79, "score": 56674.47719573117 }, { "content": "struct Down {\n\n machine: MachineState,\n\n io: DeterministicIo,\n\n restart: LogicalDuration,\n\n}\n\nimpl Down {\n\n pub fn new(machine: MachineState, io: DeterministicIo, restart: LogicalDuration) -> Self {\n\n Down {\n\n machine,\n\n io,\n\n restart,\n\n }\n\n }\n\n pub fn propose_command(&mut self, _command: Command) -> Result<()> {\n\n track_panic!(ErrorKind::NotLeader, \"This process is down\");\n\n }\n\n pub fn propose_config(&mut self, _: ClusterMembers) -> Result<()> {\n\n track_panic!(ErrorKind::NotLeader, \"This process is down\");\n\n }\n\n pub fn heartbeat(&mut self) -> Result<()> {\n", "file_path": "raftlog_simu/src/process.rs", "rank": 80, "score": 46864.29891046548 }, { "content": "#[derive(PartialEq, Eq)]\n\nstruct Committed {\n\n entry: LogEntry,\n\n state: MachineState,\n\n}\n", "file_path": "raftlog_simu/src/simulator.rs", "rank": 81, "score": 45291.61011584446 }, { "content": "struct Alive {\n\n logger: Logger,\n\n machine: MachineState,\n\n next_commit: LogIndex,\n\n rlog: ReplicatedLog<DeterministicIo>,\n\n proposals: Vec<ProposalId>,\n\n heartbeats: VecDeque<SequenceNumber>,\n\n}\n\nimpl Alive {\n\n /// 新しい`Alive`インスタンスを生成する.\n\n pub fn new(\n\n logger: Logger,\n\n node_id: NodeId,\n\n members: ClusterMembers,\n\n io: DeterministicIo,\n\n ) -> Self {\n\n let metric_builder = MetricBuilder::new();\n\n let machine = MachineState::new();\n\n let rlog = ReplicatedLog::new(node_id, members, io, &metric_builder).expect(\"Never fails\");\n\n Alive {\n", "file_path": "raftlog_simu/src/process.rs", "rank": 82, "score": 45291.61011584446 }, { "content": "fn main() {\n\n let matches = app_from_crate!()\n\n .arg(\n\n Arg::with_name(\"CONFIG_FILE\")\n\n .short(\"c\")\n\n .long(\"config\")\n\n .takes_value(true),\n\n ).arg(Arg::with_name(\"RANDOM_SEED\").long(\"seed\").takes_value(true))\n\n .arg(\n\n Arg::with_name(\"LOOP_COUNT\")\n\n .long(\"loop-count\")\n\n .takes_value(true),\n\n ).get_matches();\n\n\n\n //\n\n // 1. Load Configuration\n\n //\n\n let mut config = if let Some(config_file) = matches.value_of(\"CONFIG_FILE\") {\n\n track_try_unwrap!(serdeconv::from_toml_file(config_file))\n\n } else {\n", "file_path": "raftlog_simu/src/main.rs", "rank": 83, "score": 44936.77971997236 }, { "content": "use trackable::error::TrackableError;\n\nuse trackable::error::{ErrorKind as TrackableErrorKind, ErrorKindExt};\n\n\n\n/// クレート固有の`Error`型.\n\n#[derive(Debug, Clone, TrackableError)]\n\npub struct Error(TrackableError<ErrorKind>);\n\nimpl From<std::io::Error> for Error {\n\n fn from(f: std::io::Error) -> Self {\n\n ErrorKind::Other.cause(f).into()\n\n }\n\n}\n\nimpl From<prometrics::Error> for Error {\n\n fn from(f: prometrics::Error) -> Self {\n\n ErrorKind::Other.cause(f).into()\n\n }\n\n}\n\n\n\n/// 発生し得るエラーの種類.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum ErrorKind {\n", "file_path": "src/error.rs", "rank": 84, "score": 34741.64548937994 }, { "content": " InvalidInput,\n\n\n\n /// 不整合な状態に陥った.\n\n ///\n\n /// プログラムのバグやI/O周りの重大な問題(e.g., データ改善)により、\n\n /// 本来発生するはずのない状態が生じてしまった.\n\n ///\n\n /// このエラーを受け取った場合、利用者はそのノードの使用を停止して、\n\n /// どのような問題が発生しているかを詳細に調査すべきである.\n\n ///\n\n /// もし使用を継続した場合には、最悪のケースでは、コミット済みのログ領域が\n\n /// 別のエントリによって上書きされてしまうこともあり得る.\n\n InconsistentState,\n\n\n\n /// その他エラー.\n\n ///\n\n /// 主に`Io`トレイトの実装のために設けられたエラー区分.\n\n ///\n\n /// このエラーを受け取った場合、利用者はそのノードの使用を停止して、\n\n /// どのような問題が発生しているかを詳細に調査すべきである.\n\n Other,\n\n}\n\nimpl TrackableErrorKind for ErrorKind {}\n", "file_path": "src/error.rs", "rank": 85, "score": 34731.860166052276 }, { "content": " /// リーダのみが処理可能な操作が、リーダではないノードに対して行われた.\n\n ///\n\n /// このエラーを受け取った場合、利用者はリーダノードに対して、\n\n /// 同じ要求をリトライすべきである.\n\n NotLeader,\n\n\n\n /// リソースに空きが無くて、要求を受け付けることができない.\n\n ///\n\n /// このエラーを受け取った場合、利用者はある程度時間を空ける、ないし、\n\n /// 現在実行中の処理の完了を確認してから、同様の要求をリトライすべきである.\n\n ///\n\n /// 典型的には、あるスナップショットのインストール中に、\n\n /// 別のスナップショットのインストールが要求された場合に、\n\n /// このエラーが返される.\n\n Busy,\n\n\n\n /// 入力が不正.\n\n ///\n\n /// このエラーを受け取った場合、利用者は可能であれば、\n\n /// 入力値を適切なものに修正して、同様の操作をリトライすることが望ましい.\n", "file_path": "src/error.rs", "rank": 86, "score": 34729.039608883126 }, { "content": "pub struct NodeStateMetrics {\n\n pub(crate) transit_to_candidate_total: Counter,\n\n pub(crate) transit_to_follower_total: Counter,\n\n pub(crate) transit_to_leader_total: Counter,\n\n pub(crate) event_queue_len: Gauge,\n\n pub(crate) poll_timeout_total: Counter,\n\n pub(crate) candidate_to_leader_duration_seconds: Histogram,\n\n pub(crate) candidate_to_follower_duration_seconds: Histogram,\n\n pub(crate) loader_to_candidate_duration_seconds: Histogram,\n\n}\n\nimpl NodeStateMetrics {\n\n pub(crate) fn new(builder: &MetricBuilder) -> Result<Self> {\n\n let mut builder: MetricBuilder = builder.clone();\n\n builder.subsystem(\"node_state\");\n\n let transit_to_candidate_total = track!(builder\n\n .counter(\"transit_to_candidate_total\")\n\n .help(\"Number of transitions to candidate role\")\n\n .finish())?;\n\n let transit_to_follower_total = track!(builder\n\n .counter(\"transit_to_follower_total\")\n", "file_path": "src/metrics.rs", "rank": 87, "score": 34721.1440808352 }, { "content": " builder\n\n .histogram(\"candidate_to_follower_duration_seconds\")\n\n .help(\"Elapsed time moving from candidate to follower\")\n\n ))?;\n\n let loader_to_candidate_duration_seconds = track!(make_role_change_histogram(\n\n builder\n\n .histogram(\"loader_to_candidate_duration_seconds\")\n\n .help(\"Elapsed time moving from loader to candidate\")\n\n ))?;\n\n Ok(Self {\n\n transit_to_candidate_total,\n\n transit_to_follower_total,\n\n transit_to_leader_total,\n\n event_queue_len,\n\n poll_timeout_total,\n\n candidate_to_leader_duration_seconds,\n\n candidate_to_follower_duration_seconds,\n\n loader_to_candidate_duration_seconds,\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/metrics.rs", "rank": 88, "score": 34717.30006459939 }, { "content": " .help(\"Number of transitions to follower role\")\n\n .finish())?;\n\n let transit_to_leader_total = track!(builder\n\n .counter(\"transit_to_leader_total\")\n\n .help(\"Number of transitions to leader role\")\n\n .finish())?;\n\n let event_queue_len = track!(builder\n\n .gauge(\"event_queue_len\")\n\n .help(\"Length of a raft event queue\")\n\n .finish())?;\n\n let poll_timeout_total = track!(builder\n\n .counter(\"poll_timeout_total\")\n\n .help(\"Number of timeout\")\n\n .finish())?;\n\n let candidate_to_leader_duration_seconds = track!(make_role_change_histogram(\n\n builder\n\n .histogram(\"candidate_to_leader_duration_seconds\")\n\n .help(\"Elapsed time moving from candidate to leader\")\n\n ))?;\n\n let candidate_to_follower_duration_seconds = track!(make_role_change_histogram(\n", "file_path": "src/metrics.rs", "rank": 89, "score": 34713.74013603272 }, { "content": "//! raftlog のメトリクス。\n\n\n\nuse prometrics::metrics::{Counter, Gauge, Histogram, HistogramBuilder, MetricBuilder};\n\n\n\nuse crate::{Error, Result};\n\n\n\n/// `raftlog` 全体に関するメトリクス。\n\n#[derive(Clone)]\n\npub struct RaftlogMetrics {\n\n pub(crate) node_state: NodeStateMetrics,\n\n}\n\nimpl RaftlogMetrics {\n\n pub(crate) fn new(builder: &MetricBuilder) -> Result<Self> {\n\n let node_state = track!(NodeStateMetrics::new(builder))?;\n\n Ok(Self { node_state })\n\n }\n\n}\n\n\n\n/// ノード状態に関するメトリクス。\n\n#[derive(Clone)]\n", "file_path": "src/metrics.rs", "rank": 90, "score": 34712.856467536265 }, { "content": " /// メッセージのヘッダを返す.\n\n pub fn header(&self) -> &MessageHeader {\n\n match self {\n\n Message::RequestVoteCall(m) => &m.header,\n\n Message::RequestVoteReply(m) => &m.header,\n\n Message::AppendEntriesCall(m) => &m.header,\n\n Message::AppendEntriesReply(m) => &m.header,\n\n Message::InstallSnapshotCast(m) => &m.header,\n\n }\n\n }\n\n\n\n pub(crate) fn set_destination(&mut self, dst: &NodeId) {\n\n match self {\n\n Message::RequestVoteCall(m) => {\n\n m.header.destination = dst.clone();\n\n }\n\n Message::RequestVoteReply(m) => {\n\n m.header.destination = dst.clone();\n\n }\n\n Message::AppendEntriesCall(m) => {\n", "file_path": "src/message.rs", "rank": 91, "score": 34611.42131235776 }, { "content": " /// コミット済みログの終端インデックス.\n\n pub committed_log_tail: LogIndex,\n\n\n\n /// 追記対象となるログの末尾部分.\n\n pub suffix: LogSuffix,\n\n}\n\n\n\n/// `AppendEntriesRPC`の応答メッセージ.\n\n#[derive(Debug, Clone)]\n\npub struct AppendEntriesReply {\n\n /// メッセージヘッダ.\n\n pub header: MessageHeader,\n\n\n\n /// 応答者(follower)のログの終端位置.\n\n ///\n\n /// これは「実際のログの終端」というよりは、\n\n /// 「リーダに次に送って貰いたい末尾部分の開始位置」的な意味合いを有する.\n\n ///\n\n /// それを考慮すると`next_head`といった名前の方が適切かもしれない.\n\n pub log_tail: LogPosition,\n", "file_path": "src/message.rs", "rank": 92, "score": 34611.29856958675 }, { "content": "\n\n /// 応答者が忙しいかどうか.\n\n ///\n\n /// この値が`true`の場合には、\n\n /// followerの`log_tail`が遅れていたとしても、\n\n /// リーダはログの同期のための追加のメッセージ送信を行わない.\n\n pub busy: bool,\n\n}\n\n\n\n/// `InstallSnapshotRPC`用のメッセージ.\n\n///\n\n/// 論文中では、これも他のRPC同様に\"要求・応答\"形式となっているが、\n\n/// 他のRPCとは異なり、これに関しては本質的には応答は不要なので、\n\n/// ここでは一方的な送信のみをサポートしている.\n\n#[derive(Debug, Clone)]\n\npub struct InstallSnapshotCast {\n\n /// メッセージヘッダ.\n\n pub header: MessageHeader,\n\n\n\n /// 保存対象となるログの前半部分(i.e., スナップショット).\n", "file_path": "src/message.rs", "rank": 93, "score": 34611.0224034008 }, { "content": "}\n\nimpl From<AppendEntriesCall> for Message {\n\n fn from(f: AppendEntriesCall) -> Self {\n\n Message::AppendEntriesCall(f)\n\n }\n\n}\n\nimpl From<AppendEntriesReply> for Message {\n\n fn from(f: AppendEntriesReply) -> Self {\n\n Message::AppendEntriesReply(f)\n\n }\n\n}\n\nimpl From<InstallSnapshotCast> for Message {\n\n fn from(f: InstallSnapshotCast) -> Self {\n\n Message::InstallSnapshotCast(f)\n\n }\n\n}\n\n\n\n/// メッセージのヘッダ.\n\n#[derive(Debug, Clone)]\n\npub struct MessageHeader {\n", "file_path": "src/message.rs", "rank": 94, "score": 34609.60140332439 }, { "content": " /// 送信者のログの終端位置.\n\n pub log_tail: LogPosition,\n\n}\n\n\n\n/// `RequestVoteRPC`の応答メッセージ.\n\n#[derive(Debug, Clone)]\n\npub struct RequestVoteReply {\n\n /// メッセージヘッダ.\n\n pub header: MessageHeader,\n\n\n\n /// 投票を行ったかどうか.\n\n pub voted: bool,\n\n}\n\n\n\n/// `AppendEntriesRPC`の要求メッセージ.\n\n#[derive(Debug, Clone)]\n\npub struct AppendEntriesCall {\n\n /// メッセージヘッダ.\n\n pub header: MessageHeader,\n\n\n", "file_path": "src/message.rs", "rank": 95, "score": 34609.40464933237 }, { "content": " m.header.destination = dst.clone();\n\n }\n\n Message::AppendEntriesReply(m) => {\n\n m.header.destination = dst.clone();\n\n }\n\n Message::InstallSnapshotCast(m) => {\n\n m.header.destination = dst.clone();\n\n }\n\n }\n\n }\n\n}\n\nimpl From<RequestVoteCall> for Message {\n\n fn from(f: RequestVoteCall) -> Self {\n\n Message::RequestVoteCall(f)\n\n }\n\n}\n\nimpl From<RequestVoteReply> for Message {\n\n fn from(f: RequestVoteReply) -> Self {\n\n Message::RequestVoteReply(f)\n\n }\n", "file_path": "src/message.rs", "rank": 96, "score": 34606.77500838982 }, { "content": "//! RPC用のメッセージ群.\n\n//!\n\n//! なおRaftの論文に倣って\"RPC\"という呼称を採用しているが、\n\n//! 実際にここで想定されている通信モデルは、RPCではなく\n\n//! 非同期のメッセージ送受信モデル、となっている.\n\nuse crate::election::Term;\n\nuse crate::log::{LogIndex, LogPosition, LogPrefix, LogSuffix};\n\nuse crate::node::NodeId;\n\n\n\n/// RPC用のメッセージ全般.\n\n#[derive(Debug, Clone)]\n\n#[allow(missing_docs)]\n\npub enum Message {\n\n RequestVoteCall(RequestVoteCall),\n\n RequestVoteReply(RequestVoteReply),\n\n AppendEntriesCall(AppendEntriesCall),\n\n AppendEntriesReply(AppendEntriesReply),\n\n InstallSnapshotCast(InstallSnapshotCast),\n\n}\n\nimpl Message {\n", "file_path": "src/message.rs", "rank": 97, "score": 34606.097092510245 }, { "content": " /// メッセージの送信元.\n\n pub sender: NodeId,\n\n\n\n // FIXME: ヘッダには含めないようにする\n\n /// メッセージの宛先\n\n pub destination: NodeId,\n\n\n\n /// シーケンス番号.\n\n pub seq_no: SequenceNumber,\n\n\n\n /// 送信者の現在の`Term`.\n\n pub term: Term,\n\n}\n\n\n\n/// `RequestVoteRPC`の要求メッセージ.\n\n#[derive(Debug, Clone)]\n\npub struct RequestVoteCall {\n\n /// メッセージヘッダ.\n\n pub header: MessageHeader,\n\n\n", "file_path": "src/message.rs", "rank": 98, "score": 34605.97390187598 }, { "content": " pub prefix: LogPrefix,\n\n}\n\n\n\n/// メッセージのシーケンス番号.\n\n///\n\n/// この番号はノード毎に管理され、要求系のメッセージ送信の度にインクリメントされる.\n\n/// 応答系のメッセージでは、対応する要求メッセージのシーケンス番号が使用される.\n\n///\n\n/// シーケンス番号は、一つの`Term`内では単調増加することが保証されている.\n\n/// 逆に言えば、複数の`Term`を跨いだ場合には、シーケンス番号が増加する保証は無い.\n\n#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]\n\npub struct SequenceNumber(u64);\n\nimpl SequenceNumber {\n\n /// 新しい`SequenceNumber`インスタンスを生成する.\n\n pub fn new(num: u64) -> Self {\n\n SequenceNumber(num)\n\n }\n\n\n\n /// シーケンス番号の値を返す.\n\n pub fn as_u64(self) -> u64 {\n\n self.0\n\n }\n\n}\n", "file_path": "src/message.rs", "rank": 99, "score": 34605.31891592765 } ]
Rust
src/algebra/geometry.rs
hsnavarro/retrogame-rust
bc62342d95001b70a1d11c822f2722cfc0b73fec
use crate::algebra::{Vec2f, cross_product, distance, dot_product}; use crate::shapes; pub fn is_point_inside_rect(rect: &shapes::Rect, point: Vec2f) -> bool { let rect_points = rect.get_points_clockwise(); for i in 0..rect_points.len() { let j = (i + 1) % rect_points.len(); if cross_product(rect_points[j] - rect_points[i], point - rect_points[i]) < 0.0 { return false; } } true } pub fn closest_to_point_in_rect_border(rect: &shapes::Rect, point: Vec2f) -> Vec2f { let mut min_distance = f64::MAX; let mut closest_point = Vec2f::new(); let rect_points = rect.get_points_clockwise(); let mut update_closest_point = |rect_point: Vec2f| { let distance = distance(rect_point, point); if distance < min_distance { min_distance = distance; closest_point = rect_point; } }; for rect_point in rect_points.iter() { update_closest_point(*rect_point); } for i in 0..rect_points.len() { let j = (i + 1) % rect_points.len(); let rect_side = rect_points[j] - rect_points[i]; let projected_vector = (point - rect_points[i]).projection(rect_side); if projected_vector.magnitude() > rect_side.magnitude() || dot_product(projected_vector, rect_side) < 0.0 { continue; } let projected_point = rect_points[i] + projected_vector; update_closest_point(projected_point); } closest_point } #[cfg(test)] mod tests { use super::*; use crate::algebra::Vec2f; use crate::shapes::Rect; use sdl2::pixels::Color; mod is_point_inside_rect_tests { use super::*; #[test] fn point_outside_rect() { let rect = Rect::create_rect(2.0, 10.0, 10.0, 20.0, Color::BLACK); let point = Vec2f { x: 40.0, y: 40.0 }; assert!(is_point_inside_rect(&rect, point) == false); } #[test] fn point_inside_rect() { let rect = Rect::create_rect(2.0, 10.0, 10.0, 20.0, Color::BLACK); let point = Vec2f { x: 10.0, y: 20.0 }; assert!(is_point_inside_rect(&rect, point) == true); } #[test] fn point_in_rect_border() { let rect = Rect::create_rect(2.0, 10.0, 10.0, 20.0, Color::BLACK); let point = Vec2f { x: 2.0, y: 15.0 }; assert!(is_point_inside_rect(&rect, point) == true); } } mod closest_to_point_in_rect_border_tests { use super::*; #[test] fn closest_is_corner() { let rect = Rect::create_rect(2.0, 10.0, 20.0, 10.0, Color::BLACK); let point = Vec2f { x: 40.0, y: 40.0 }; let ans = Vec2f { x: 22.0, y: 20.0 }; assert_eq!(closest_to_point_in_rect_border(&rect, point), ans); } #[test] fn closest_is_side() { let rect = Rect::create_rect(2.0, 10.0, 20.0, 10.0, Color::BLACK); let point = Vec2f { x: 6.0, y: 4.0 }; let ans = Vec2f { x: 6.0, y: 10.0 }; assert_eq!(closest_to_point_in_rect_border(&rect, point), ans); } #[test] fn point_inside_rect() { let rect = Rect::create_rect(2.0, 10.0, 20.0, 10.0, Color::BLACK); let point = Vec2f { x: 10.0, y: 13.0 }; let ans = Vec2f { x: 10.0, y: 10.0 }; assert_eq!(closest_to_point_in_rect_border(&rect, point), ans); } #[test] fn point_in_rect_border() { let rect = Rect::create_rect(2.0, 10.0, 20.0, 10.0, Color::BLACK); let point = Vec2f { x: 12.0, y: 10.0 }; let ans = Vec2f { x: 12.0, y: 10.0 }; assert_eq!(closest_to_point_in_rect_border(&rect, point), ans); } } }
use crate::algebra::{Vec2f, cross_product, distance, dot_product}; use crate::shapes; pub fn is_point_inside_rect(rect: &shapes::Rect, point: Vec2f) -> bool { let rect_points = rect.get_points_clockwise(); for i in 0..rect_points.len() { let j = (i + 1) % rect_points.len(); if cross_product(rect_points[j] - rect_points[i], point - rect_points[i]) < 0.0 { return false; } } true } pub fn closest_to_point_in_rect_border(rect: &shapes::Rect, point: Vec2f) -> Vec2f { let mut min_distance = f64::MAX; let mut closest_point = Vec2f::new(); let rect_points = rect.get_points_clockwise(); let mut update_closest_point = |rect_point: Vec2f| { let distance = distance(rect_point, point); if distance < min_distance { min_distance = distance; closest_point = rect_point; } }; for rect_point in rect_points.iter() { update_closest_point(*rect_point); } for i in 0..rect_points.len() { let j = (i + 1) % rect_points.len(); let rect_side = rect_points[j] - rect_points[i]; let projected_vector = (point - rect_points[i]).projection(rect_side); if projected_vector.magnitude() > rect_side.magnitude() || dot_product(projected_vector, rect_side) < 0.0 { continue; } let projected_point = rect_points[i] + projected_vector; update_closest_point(projected_point); } closest_point } #[cfg(test)] mod tests { use super::*; use crate::algebra::Vec2f; use crate::shapes::Rect; use sdl2::pixels::Color; mod is_point_inside_rect_tests { use super::*; #[test] fn point_outs
t), ans); } #[test] fn point_in_rect_border() { let rect = Rect::create_rect(2.0, 10.0, 20.0, 10.0, Color::BLACK); let point = Vec2f { x: 12.0, y: 10.0 }; let ans = Vec2f { x: 12.0, y: 10.0 }; assert_eq!(closest_to_point_in_rect_border(&rect, point), ans); } } }
ide_rect() { let rect = Rect::create_rect(2.0, 10.0, 10.0, 20.0, Color::BLACK); let point = Vec2f { x: 40.0, y: 40.0 }; assert!(is_point_inside_rect(&rect, point) == false); } #[test] fn point_inside_rect() { let rect = Rect::create_rect(2.0, 10.0, 10.0, 20.0, Color::BLACK); let point = Vec2f { x: 10.0, y: 20.0 }; assert!(is_point_inside_rect(&rect, point) == true); } #[test] fn point_in_rect_border() { let rect = Rect::create_rect(2.0, 10.0, 10.0, 20.0, Color::BLACK); let point = Vec2f { x: 2.0, y: 15.0 }; assert!(is_point_inside_rect(&rect, point) == true); } } mod closest_to_point_in_rect_border_tests { use super::*; #[test] fn closest_is_corner() { let rect = Rect::create_rect(2.0, 10.0, 20.0, 10.0, Color::BLACK); let point = Vec2f { x: 40.0, y: 40.0 }; let ans = Vec2f { x: 22.0, y: 20.0 }; assert_eq!(closest_to_point_in_rect_border(&rect, point), ans); } #[test] fn closest_is_side() { let rect = Rect::create_rect(2.0, 10.0, 20.0, 10.0, Color::BLACK); let point = Vec2f { x: 6.0, y: 4.0 }; let ans = Vec2f { x: 6.0, y: 10.0 }; assert_eq!(closest_to_point_in_rect_border(&rect, point), ans); } #[test] fn point_inside_rect() { let rect = Rect::create_rect(2.0, 10.0, 20.0, 10.0, Color::BLACK); let point = Vec2f { x: 10.0, y: 13.0 }; let ans = Vec2f { x: 10.0, y: 10.0 }; assert_eq!(closest_to_point_in_rect_border(&rect, poin
random
[ { "content": "pub fn distance(x: Vec2f, y: Vec2f) -> f64 {\n\n (x - y).square_magnitude()\n\n}\n\n\n", "file_path": "src/algebra/vec2f.rs", "rank": 1, "score": 116311.21337077796 }, { "content": "pub fn dot_product(lhs: Vec2f, rhs: Vec2f) -> f64 {\n\n lhs.x * rhs.x + lhs.y * rhs.y\n\n}\n\n\n\nimpl Add for Vec2f {\n\n type Output = Self;\n\n\n\n fn add(self, rhs: Vec2f) -> Self::Output {\n\n Vec2f { x: self.x + rhs.x, y: self.y + rhs.y }\n\n }\n\n}\n\n\n\nimpl AddAssign for Vec2f {\n\n fn add_assign(&mut self, rhs: Vec2f) {\n\n *self = Vec2f { x: self.x + rhs.x, y: self.y + rhs.y };\n\n }\n\n}\n\n\n\nimpl Div<f64> for Vec2f {\n\n type Output = Self;\n", "file_path": "src/algebra/vec2f.rs", "rank": 3, "score": 102627.26807571482 }, { "content": "pub fn cross_product(lhs: Vec2f, rhs: Vec2f) -> f64 {\n\n lhs.x * rhs.y - lhs.y * rhs.x\n\n}\n\n\n", "file_path": "src/algebra/vec2f.rs", "rank": 4, "score": 102627.26807571482 }, { "content": "fn block_rect(entity: &mut entities::RectEntity) {\n\n let width_limit = game_settings::SCREEN_WIDTH as f64 - entity.shape.width();\n\n let height_limit = game_settings::SCREEN_HEIGHT as f64 - entity.shape.height();\n\n\n\n let clamp = |x: &mut f64, min_value: f64, max_value: f64| {\n\n if *x < min_value { *x = min_value; }\n\n if *x > max_value { *x = max_value; }\n\n };\n\n\n\n let Vec2f { x: mut new_x, y: mut new_y } = entity.shape.position();\n\n \n\n clamp(&mut new_x, 0.0, width_limit);\n\n clamp(&mut new_y, 0.0, height_limit);\n\n\n\n entity.shape.set_position(Vec2f { x: new_x, y: new_y });\n\n}\n\n\n", "file_path": "src/physics/physics_update.rs", "rank": 5, "score": 51623.23411407185 }, { "content": "pub fn update_game_frame(frame_time: f64, \n\n rect_entities: &mut Vec<entities::RectEntity>, \n\n circle_entities: &mut Vec<entities::CircleEntity>) {\n\n \n\n let mut delta_time_left = frame_time; \n\n \n\n let minimum = |x: f64, y: f64| if x < y { x } else { y };\n\n\n\n while delta_time_left > 0.0 {\n\n let delta_time = minimum(delta_time_left, game_settings::FIXED_DELTA_TIME);\n\n \n\n update_simulation_frame(delta_time, rect_entities, circle_entities);\n\n\n\n delta_time_left -= delta_time;\n\n }\n\n}\n\n\n", "file_path": "src/physics/physics_update.rs", "rank": 6, "score": 51564.6687735482 }, { "content": "fn treat_screen_circle_collision(entity: &mut entities::CircleEntity) {\n\n let collision_detection = detect_screen_circle_collision(entity);\n\n \n\n let entity_direction = &mut entity.physics_properties.direction;\n\n\n\n match collision_detection {\n\n Some(ScreenCollisionType::HORIZONTAL) => {\n\n *entity_direction = Vec2f { x: -entity_direction.x, y: entity_direction.y };\n\n }, \n\n Some(ScreenCollisionType::VERTICAL) => {\n\n *entity_direction = Vec2f { x: entity_direction.x, y: -entity_direction.y };\n\n }, \n\n None => {} \n\n }\n\n}\n\n\n", "file_path": "src/physics/physics_update.rs", "rank": 7, "score": 49387.20295640834 }, { "content": "fn treat_circle_rect_collision(circle_entity: &mut entities::CircleEntity, \n\n rect_entity:&entities::RectEntity) -> bool {\n\n \n\n match detect_circle_rect_collision(circle_entity, rect_entity) {\n\n Some(penetration_vector) => {\n\n circle_entity.shape.move_shape(penetration_vector);\n\n \n\n let collision_normal = penetration_vector.norm(); \n\n let old_direction = circle_entity.physics_properties.direction; \n\n\n\n let perpendicular = old_direction.perpendicular(collision_normal);\n\n let parallel = old_direction.projection(collision_normal);\n\n\n\n let new_direction = perpendicular - parallel; \n\n\n\n circle_entity.physics_properties.direction = new_direction;\n\n \n\n true\n\n }\n\n None => false\n\n }\n\n}\n\n\n", "file_path": "src/physics/physics_update.rs", "rank": 8, "score": 48376.101274183166 }, { "content": "fn main() {\n\n let target = env::var(\"TARGET\").unwrap();\n\n if target.contains(\"pc-windows\") {\n\n let manifest_dir = PathBuf::from(env::var(\"CARGO_MANIFEST_DIR\").unwrap());\n\n let mut lib_dir = manifest_dir.clone();\n\n let mut dll_dir = manifest_dir.clone();\n\n if target.contains(\"msvc\") {\n\n lib_dir.push(\"msvc\");\n\n dll_dir.push(\"msvc\");\n\n } else {\n\n lib_dir.push(\"gnu-mingw\");\n\n dll_dir.push(\"gnu-mingw\");\n\n }\n\n lib_dir.push(\"lib\");\n\n dll_dir.push(\"dll\");\n\n if target.contains(\"x86_64\") {\n\n lib_dir.push(\"64\");\n\n dll_dir.push(\"64\");\n\n } else {\n\n lib_dir.push(\"32\");\n", "file_path": "build.rs", "rank": 9, "score": 31220.673140361316 }, { "content": "fn main() {\n\n let mut game = game::Game::new();\n\n game.run();\n\n}", "file_path": "src/main.rs", "rank": 10, "score": 30030.867560944676 }, { "content": "mod geometry;\n\nmod vec2f;\n\n\n\npub use geometry::*;\n\npub use vec2f::*;", "file_path": "src/algebra/mod.rs", "rank": 11, "score": 24066.84720220534 }, { "content": "mod circle;\n\nmod rect;\n\n\n\npub use circle::*;\n\npub use rect::*;", "file_path": "src/shapes/mod.rs", "rank": 12, "score": 24064.333196529293 }, { "content": "mod event_pump;\n\nmod time_system;\n\nmod render_system;\n\n\n\npub use event_pump::*;\n\npub use time_system::*;\n\npub use render_system::*;", "file_path": "src/systems/mod.rs", "rank": 13, "score": 24064.245917487697 }, { "content": "mod circle_entity;\n\nmod rect_entity;\n\n\n\npub use circle_entity::*;\n\npub use rect_entity::*;", "file_path": "src/entities/mod.rs", "rank": 14, "score": 24063.959296765035 }, { "content": "mod physics_update;\n\nmod physics_properties;\n\n\n\npub use physics_update::*;\n\npub use physics_properties::*;", "file_path": "src/physics/mod.rs", "rank": 15, "score": 24063.959296765035 }, { "content": "use std::ops::{Add, AddAssign, Div, Mul, MulAssign, Sub};\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub struct Vec2f {\n\n pub x: f64, \n\n pub y: f64\n\n}\n\n\n\nimpl Vec2f {\n\n pub fn new() -> Self {\n\n Vec2f::default()\n\n }\n\n\n\n pub fn norm(self) -> Vec2f {\n\n let magnitude = self.magnitude();\n\n let _x = self.x; \n\n let _y = self.y; \n\n assert!(magnitude != 0.0);\n\n\n\n self / magnitude\n", "file_path": "src/algebra/vec2f.rs", "rank": 16, "score": 23579.028987678612 }, { "content": " }\n\n\n\n pub fn magnitude(self) -> f64 {\n\n Vec2f::square_magnitude(self).sqrt()\n\n }\n\n \n\n pub fn square_magnitude(self) -> f64 {\n\n dot_product(self, self)\n\n }\n\n\n\n pub fn projection(self, rhs: Vec2f) -> Self {\n\n assert!(rhs.square_magnitude() != 0.0);\n\n \n\n (dot_product(self, rhs) * rhs) / rhs.square_magnitude() \n\n }\n\n\n\n pub fn perpendicular(self, rhs: Vec2f) -> Self {\n\n self - self.projection(rhs)\n\n }\n\n}\n\n\n", "file_path": "src/algebra/vec2f.rs", "rank": 17, "score": 23577.450601574303 }, { "content": "\n\n fn mul(self, rhs: f64) -> Self::Output {\n\n Vec2f { x: self.x * rhs, y: self.y * rhs }\n\n }\n\n}\n\n\n\nimpl Mul<Vec2f> for f64 {\n\n type Output = Vec2f;\n\n\n\n fn mul(self, rhs: Vec2f) -> Self::Output {\n\n Vec2f { x: self * rhs.x, y: self * rhs.y }\n\n }\n\n}\n\n\n\nimpl MulAssign<f64> for Vec2f {\n\n fn mul_assign(&mut self, rhs: f64) {\n\n *self = Vec2f { x: self.x * rhs, y: self.y * rhs };\n\n }\n\n}\n\n\n", "file_path": "src/algebra/vec2f.rs", "rank": 18, "score": 23576.987609362208 }, { "content": "impl Sub for Vec2f {\n\n type Output = Vec2f;\n\n\n\n fn sub(self, rhs: Vec2f) -> Self::Output {\n\n Vec2f { x: self.x - rhs.x, y: self.y - rhs.y }\n\n }\n\n}\n\n\n\nimpl Default for Vec2f {\n\n fn default() -> Self {\n\n Self { x: 0.0, y: 0.0 }\n\n }\n\n}", "file_path": "src/algebra/vec2f.rs", "rank": 19, "score": 23575.573458212897 }, { "content": "\n\n fn div(self, rhs: f64) -> Self::Output {\n\n assert!(rhs != 0.0); \n\n \n\n (1.0 / rhs) * self\n\n }\n\n}\n\n\n\nimpl Div<Vec2f> for f64 {\n\n type Output = Vec2f;\n\n\n\n fn div(self, rhs: Vec2f) -> Self::Output {\n\n assert!(self != 0.0); \n\n \n\n (1.0 / self) * rhs\n\n }\n\n}\n\n\n\nimpl Mul<f64> for Vec2f {\n\n type Output = Self;\n", "file_path": "src/algebra/vec2f.rs", "rank": 20, "score": 23575.33592206703 }, { "content": "fn update_simulation_frame(delta_time: f64, \n\n rect_entities: &mut Vec<entities::RectEntity>, \n\n circle_entities: &mut Vec<entities::CircleEntity>) {\n\n \n\n for rect_entity in rect_entities.iter_mut() {\n\n rect_entity.move_rect(delta_time);\n\n block_rect(rect_entity);\n\n }\n\n \n\n for circle_entity in circle_entities.iter_mut() {\n\n circle_entity.move_circle(delta_time);\n\n treat_screen_circle_collision(circle_entity);\n\n }\n\n\n\n let mut indexes_to_delete = Vec::new();\n\n\n\n for circle_entity in circle_entities.iter_mut() {\n\n for (i, rect_entity) in rect_entities.iter_mut().enumerate() {\n\n if treat_circle_rect_collision(circle_entity, rect_entity) && i != 0 {\n\n indexes_to_delete.push(i);\n\n }\n\n }\n\n }\n\n\n\n delete_rect_entities(&indexes_to_delete, rect_entities);\n\n}\n\n\n", "file_path": "src/physics/physics_update.rs", "rank": 21, "score": 23062.779197863154 }, { "content": "fn delete_rect_entities(indexes_to_delete: &Vec<usize>, \n\n rect_entities: &mut Vec<entities::RectEntity>) {\n\n \n\n let num_of_deletions = indexes_to_delete.len();\n\n \n\n if num_of_deletions == 0 { return; }\n\n\n\n let num_of_rects = rect_entities.len();\n\n\n\n assert!(num_of_deletions <= num_of_rects);\n\n \n\n let mut last_index = num_of_rects - 1;\n\n \n\n for i in indexes_to_delete.iter() {\n\n rect_entities.swap(*i, last_index);\n\n last_index -= 1;\n\n }\n\n\n\n rect_entities.truncate(num_of_rects - num_of_deletions);\n\n}\n\n\n", "file_path": "src/physics/physics_update.rs", "rank": 22, "score": 22036.295694012417 }, { "content": "fn detect_circle_rect_collision(circle_entity: &entities::CircleEntity, \n\n rect_entity: &entities::RectEntity) -> Option<Vec2f> {\n\n \n\n let circle_center = circle_entity.shape.center;\n\n let circle_radius = circle_entity.shape.radius;\n\n \n\n let closest_point = closest_to_point_in_rect_border(&rect_entity.shape, circle_center);\n\n let is_center_inside_rect = is_point_inside_rect(&rect_entity.shape, circle_center);\n\n \n\n let circle_center_penetration = (circle_center - closest_point).magnitude();\n\n let mut collision_normal = (circle_center - closest_point).norm();\n\n let mut circle_penetration = circle_radius - circle_center_penetration;\n\n \n\n if is_center_inside_rect { \n\n collision_normal *= -1.0; \n\n circle_penetration = circle_radius + circle_radius;\n\n } \n\n\n\n if circle_penetration <= 0.0 { return None; }\n\n\n\n Some(collision_normal * circle_penetration)\n\n}\n\n\n", "file_path": "src/physics/physics_update.rs", "rank": 23, "score": 20946.504292250687 }, { "content": "fn detect_screen_circle_collision(entity: &entities::CircleEntity) -> Option<ScreenCollisionType> {\n\n let entity_center = &entity.shape.center;\n\n let entity_radius = entity.shape.radius;\n\n\n\n let width_limit = game_settings::SCREEN_WIDTH as f64 - entity_radius;\n\n let height_limit = game_settings::SCREEN_HEIGHT as f64 - entity_radius;\n\n\n\n if entity_center.x < entity_radius || entity_center.x > width_limit {\n\n return Some(ScreenCollisionType::HORIZONTAL);\n\n }\n\n \n\n if entity_center.y < entity_radius || entity_center.y > height_limit {\n\n return Some(ScreenCollisionType::VERTICAL);\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "src/physics/physics_update.rs", "rank": 24, "score": 18871.143931640727 }, { "content": "use crate::algebra::Vec2f;\n\nuse crate::algebra::{closest_to_point_in_rect_border, is_point_inside_rect};\n\nuse crate::entities;\n\nuse crate::game_settings;\n\n\n\nuse std::vec::Vec;\n\n\n", "file_path": "src/physics/physics_update.rs", "rank": 31, "score": 6.8146827301267265 }, { "content": "use crate::algebra::*;\n\n\n\nuse sdl2::pixels::Color;\n\n\n\npub struct Rect {\n\n top_left: Vec2f, \n\n top_right: Vec2f, \n\n bottom_left: Vec2f, \n\n bottom_right: Vec2f, \n\n width: f64,\n\n height: f64,\n\n pub color: Color\n\n}\n\n\n\nimpl Rect {\n\n #[allow(dead_code)]\n\n pub fn new() -> Self {\n\n Rect::default()\n\n }\n\n\n", "file_path": "src/shapes/rect.rs", "rank": 32, "score": 6.494930652817418 }, { "content": "use sdl2::event::Event;\n\nuse sdl2::keyboard::Keycode;\n\n\n\npub struct EventPump {\n\n event_pump: sdl2::EventPump,\n\n pub close_game: bool, \n\n pub go_left: bool,\n\n pub go_right: bool\n\n}\n\n\n\nimpl EventPump {\n\n pub fn new(event_pump: sdl2::EventPump) -> Self {\n\n Self {\n\n event_pump,\n\n close_game: false,\n\n go_left: false,\n\n go_right: false\n\n } \n\n }\n\n\n", "file_path": "src/systems/event_pump.rs", "rank": 33, "score": 6.476704385750218 }, { "content": " pub fn position(&self) -> Vec2f {\n\n self.top_left\n\n }\n\n\n\n pub fn width(&self) -> f64 {\n\n self.width\n\n }\n\n \n\n pub fn height(&self) -> f64 {\n\n self.height\n\n }\n\n\n\n pub fn set_position(&mut self, new_position: Vec2f) {\n\n let position_delta = new_position - self.top_left;\n\n self.move_shape(position_delta);\n\n }\n\n\n\n pub fn move_shape(&mut self, position_delta: Vec2f) {\n\n self.top_left += position_delta;\n\n self.top_right += position_delta;\n", "file_path": "src/shapes/rect.rs", "rank": 34, "score": 6.390636594393265 }, { "content": "use crate::algebra::*;\n\n\n\nuse sdl2::pixels::Color;\n\n\n\npub struct Circle {\n\n pub center: Vec2f,\n\n pub radius: f64,\n\n pub color: Color\n\n}\n\n\n\nimpl Circle {\n\n #[allow(dead_code)] \n\n pub fn new() -> Self {\n\n Circle::default()\n\n }\n\n\n\n pub fn create_circle(x: f64, y: f64, radius: f64, color: Color) -> Self {\n\n Self {\n\n center: Vec2f { x: x, y: y },\n\n radius,\n", "file_path": "src/shapes/circle.rs", "rank": 35, "score": 6.237951290569967 }, { "content": " color\n\n }\n\n } \n\n \n\n #[allow(dead_code)] \n\n pub fn set_position(&mut self, new_position: Vec2f) {\n\n self.center = new_position;\n\n }\n\n\n\n pub fn move_shape(&mut self, position_delta: Vec2f) {\n\n self.center += position_delta;\n\n }\n\n}\n\n\n\nimpl Default for Circle {\n\n fn default() -> Self {\n\n Self {\n\n center: Vec2f::new(),\n\n radius: 1.0,\n\n color: Color::BLACK\n\n }\n\n }\n\n}", "file_path": "src/shapes/circle.rs", "rank": 36, "score": 6.102468033202674 }, { "content": "use crate::algebra::Vec2f;\n\n\n\npub struct PhysicsProperties {\n\n pub direction: Vec2f,\n\n pub velocity_magnitude: f64\n\n}\n\n\n\nimpl PhysicsProperties {\n\n #[allow(dead_code)]\n\n pub fn new() -> Self {\n\n PhysicsProperties::default() \n\n }\n\n\n\n pub fn velocity(&self) -> Vec2f {\n\n self.direction * self.velocity_magnitude\n\n }\n\n}\n\n\n\nimpl Default for PhysicsProperties {\n\n fn default() -> Self { \n\n Self {\n\n direction: Vec2f::new(), \n\n velocity_magnitude: 0.0 \n\n } \n\n }\n\n}", "file_path": "src/physics/physics_properties.rs", "rank": 37, "score": 5.99369338448307 }, { "content": "use crate::algebra::Vec2f;\n\nuse crate::entities;\n\nuse crate::game_settings;\n\nuse crate::shapes;\n\n\n\nuse sdl2::gfx::primitives::DrawRenderer;\n\nuse sdl2::pixels::Color;\n\nuse sdl2::rect::Rect;\n\nuse sdl2::render::Canvas;\n\nuse sdl2::VideoSubsystem;\n\nuse sdl2::video::Window;\n\n\n\nuse std::vec::Vec;\n\n\n\npub struct RenderSystem {\n\n canvas: Canvas<Window>\n\n}\n\n\n\nimpl RenderSystem {\n\n pub fn new(video_subsystem: VideoSubsystem) -> Self {\n", "file_path": "src/systems/render_system.rs", "rank": 38, "score": 5.412425883976831 }, { "content": " self.bottom_left += position_delta;\n\n self.bottom_right += position_delta;\n\n }\n\n \n\n pub fn get_points_clockwise(&self) -> Vec<Vec2f> {\n\n vec![self.bottom_left, self.top_left, self.top_right, self.bottom_right]\n\n }\n\n \n\n pub fn create_rect(x: f64, y: f64, width: f64, height: f64, color: Color) -> Self {\n\n Self {\n\n top_left: Vec2f { x: x, y: y },\n\n top_right: Vec2f { x: x + width, y: y },\n\n bottom_left: Vec2f { x: x, y: y + height },\n\n bottom_right: Vec2f { x: x + width, y: y + height },\n\n width,\n\n height,\n\n color \n\n }\n\n }\n\n}\n\n\n\nimpl Default for Rect {\n\n fn default() -> Self {\n\n Rect::create_rect(0.0, 0.0, 1.0, 1.0, Color::BLACK)\n\n }\n\n}", "file_path": "src/shapes/rect.rs", "rank": 39, "score": 4.825337858595488 }, { "content": " pub fn handle_input(&mut self) {\n\n for event in self.event_pump.poll_iter() {\n\n match event {\n\n Event::KeyDown { keycode: Some(Keycode::Escape), .. } \n\n | Event::Quit { .. } => { \n\n self.close_game = true; \n\n },\n\n Event::KeyDown { keycode: Some(Keycode::A), .. } => {\n\n self.go_left = true; \n\n }, \n\n Event::KeyUp { keycode: Some(Keycode::A), .. } => {\n\n self.go_left = false; \n\n }, \n\n Event::KeyDown { keycode: Some(Keycode::D), .. } => {\n\n self.go_right = true; \n\n }, \n\n Event::KeyUp { keycode: Some(Keycode::D), .. } => {\n\n self.go_right = false; \n\n }, \n\n _ => {}\n\n } \n\n }\n\n }\n\n} ", "file_path": "src/systems/event_pump.rs", "rank": 40, "score": 4.6897712765642465 }, { "content": " \n\n if go_left && go_right {\n\n go_left = false;\n\n go_right = false;\n\n } \n\n\n\n let find_direction = || -> Vec2f { \n\n if go_left {\n\n LEFT\n\n } else if go_right {\n\n RIGHT\n\n } else {\n\n STAY\n\n }\n\n };\n\n\n\n player.physics_properties.direction = find_direction();\n\n }\n\n\n\n pub fn run(&mut self) {\n", "file_path": "src/game.rs", "rank": 41, "score": 4.523336685511574 }, { "content": "use crate::physics::*;\n\nuse crate::shapes::*;\n\n\n\nuse sdl2::pixels::Color;\n\n\n\npub struct RectEntity {\n\n pub shape: Rect, \n\n pub physics_properties: PhysicsProperties\n\n}\n\n\n\nimpl RectEntity {\n\n #[allow(dead_code)] \n\n pub fn new() -> Self {\n\n RectEntity::default()\n\n }\n\n \n\n #[allow(dead_code)] \n\n pub fn create_rect_entity(x: f64, y: f64, height: f64, width: f64, velocity_magnitude: f64, color: Color) -> Self {\n\n Self {\n\n shape: Rect::create_rect(x, y, width, height, color),\n", "file_path": "src/entities/rect_entity.rs", "rank": 42, "score": 4.2202146512728955 }, { "content": "use crate::algebra::Vec2f;\n\n\n\nuse crate::entities;\n\n\n\nuse crate::physics;\n\n\n\nuse crate::game_settings;\n\n\n\nuse crate::systems::EventPump;\n\nuse crate::systems::TimeSystem;\n\nuse crate::systems::RenderSystem;\n\n\n\nuse rand::Rng;\n\n\n\nuse sdl2::pixels::Color;\n\n\n\nuse std::vec::Vec;\n\n\n\nconst RECT_HEIGHT: f64 = 40.0;\n\n\n", "file_path": "src/game.rs", "rank": 43, "score": 4.165795006713671 }, { "content": "use crate::physics::*;\n\nuse crate::shapes::*;\n\n\n\nuse sdl2::pixels::Color;\n\n\n\npub struct CircleEntity {\n\n pub shape: Circle,\n\n pub physics_properties: PhysicsProperties\n\n}\n\n\n\nimpl CircleEntity {\n\n #[allow(dead_code)]\n\n fn new() -> Self {\n\n CircleEntity::default()\n\n } \n\n \n\n pub fn create_circle_entity(x: f64, y: f64, radius: f64, velocity_magnitude: f64, color: Color) -> Self {\n\n Self {\n\n shape: Circle::create_circle(x, y, radius, color),\n\n physics_properties: PhysicsProperties { velocity_magnitude: velocity_magnitude, ..PhysicsProperties::default() }\n", "file_path": "src/entities/circle_entity.rs", "rank": 44, "score": 3.9690269532422136 }, { "content": "const RIGHT: Vec2f = Vec2f { x: 1.0, y: 0.0 };\n\nconst LEFT: Vec2f = Vec2f { x: -1.0, y: 0.0 };\n\nconst STAY: Vec2f = Vec2f { x: 0.0, y: 0.0 };\n\n\n\npub struct Game {\n\n event_pump: EventPump,\n\n time_system: TimeSystem,\n\n render_system: RenderSystem,\n\n rect_entities: Vec<entities::RectEntity>,\n\n circle_entities: Vec<entities::CircleEntity>,\n\n}\n\n\n\nimpl Game {\n\n pub fn new() -> Self {\n\n let sdl_context = sdl2::init().unwrap();\n\n\n\n let event_pump = EventPump::new(sdl_context.event_pump().unwrap());\n\n let time_system = TimeSystem::new(sdl_context.timer().unwrap());\n\n let render_system = RenderSystem::new(sdl_context.video().unwrap());\n\n \n", "file_path": "src/game.rs", "rank": 45, "score": 3.6856350910895213 }, { "content": "mod algebra;\n\nmod entities;\n\nmod game;\n\nmod game_settings;\n\nmod physics;\n\nmod shapes;\n\nmod systems;\n\n\n", "file_path": "src/main.rs", "rank": 46, "score": 3.383545110633694 }, { "content": "# retrogame-rust\n\n\n\nThe idea is to implement a game similar to Atari Breakout, just to learn Rust in a fun way.\n\n\n\n## TODO\n\n\n\n#### UI\n\n - [ ] Add score\n\n - [ ] Add game over screen\n\n \n\n#### Graphics\n\n - [ ] Fix precision errors causing visual glitches\n\n - [ ] Change color palette to be similar to Breakout\n\n\n\n#### Physics\n\n - [ ] Fix circle collision with screen boundaries\n\n\n\n#### Gameplay\n\n - [ ] Limit ball initial direction\n\n - [ ] Add \"effect\" to ball\n\n - [ ] Add lifes?\n\n \n\n #### Sound\n\n - [ ] Ball collision sound\n\n - [ ] Background music?\n\n \n\n #### Formatting\n\n - [ ] Change \"use\" syntax? \n\n - [ ] Use cfg_if crate?\n\n \n\n## Learning Opportunities\n\n- Profiling (Windows Performance Analyzer)\n\n- Testing (Rust unit tests, commit only if all tests are ok)\n\n\n\n# Project mini-Cloud\n\n\n\nThe main goal is to stream the game at 60 fps using a Raspberry Pi as a server. \n\nThe client should only send input, decompress, and decode data from the server. \n\n\n\n## TODO\n\n\n\n- [ ] Raspberry Pi Setup (create a guide) \n\n- [ ] Build crate for user side\n", "file_path": "README.md", "rank": 47, "score": 3.2911515430705824 }, { "content": " fn rects_generator(num_of_rect_lines: u8, max_num_of_rect_per_line: u8) -> Vec<entities::RectEntity> {\n\n let mut rect_entities = Vec::new();\n\n let mut random_generator = rand::thread_rng();\n\n\n\n let color_palette = vec![Color::BLUE, Color::GREEN, Color::RED, Color::YELLOW];\n\n\n\n let mut rect_x;\n\n let mut rect_y = 0.0;\n\n\n\n for _ in 0..num_of_rect_lines {\n\n \n\n let num_of_rects = random_generator.gen_range(3..=max_num_of_rect_per_line); \n\n\n\n /* TODO: fix floating point errors causing visual glitches when rendering \n\n let num_of_rects = || -> u8 {\n\n let mut num;\n\n loop {\n\n num = random_generator.gen_range(3..=max_num_of_rect_per_line);\n\n\n\n if game_settings::SCREEN_WIDTH % num as u32 == 0 {\n", "file_path": "src/game.rs", "rank": 48, "score": 3.1314239628447873 }, { "content": " }\n\n }\n\n\n\n pub fn move_circle(&mut self, delta_time: f64) {\n\n self.shape.move_shape(self.physics_properties.velocity() * delta_time);\n\n }\n\n}\n\n\n\nimpl Default for CircleEntity {\n\n fn default() -> Self {\n\n Self {\n\n shape: Circle::default(),\n\n physics_properties: PhysicsProperties::default()\n\n }\n\n }\n\n}\n", "file_path": "src/entities/circle_entity.rs", "rank": 49, "score": 3.0052759941088336 }, { "content": "pub const SCREEN_WIDTH: u32 = 960;\n\npub const SCREEN_HEIGHT: u32 = 540;\n\n\n\npub const FIXED_FPS: u32 = 120;\n\npub const FIXED_DELTA_TIME: f64 = 1.0 / FIXED_FPS as f64;\n\n\n\npub const PLAYER_VELOCITY: f64 = 300.0; \n\npub const PLAYER_WIDTH: f64 = 200.0;\n\npub const PLAYER_HEIGHT: f64 = 20.0;\n\n\n\npub const BALL_VELOCITY: f64 = 300.0;\n\npub const BALL_RADIUS: f64 = 10.0;", "file_path": "src/game_settings.rs", "rank": 50, "score": 2.7694764328918433 }, { "content": "use std::env;\n\nuse std::path::PathBuf;\n\n\n", "file_path": "build.rs", "rank": 51, "score": 2.6854377125039397 }, { "content": " physics_properties: PhysicsProperties { velocity_magnitude: velocity_magnitude, ..PhysicsProperties::default() }\n\n }\n\n }\n\n\n\n pub fn move_rect(&mut self, delta_time: f64) {\n\n self.shape.move_shape(self.physics_properties.velocity() * delta_time);\n\n }\n\n}\n\n\n\nimpl Default for RectEntity {\n\n fn default() -> Self {\n\n Self {\n\n shape: Rect::default(),\n\n physics_properties: PhysicsProperties::default()\n\n }\n\n }\n\n}", "file_path": "src/entities/rect_entity.rs", "rank": 52, "score": 2.598672250760971 }, { "content": " 0.0, \n\n rect_color\n\n );\n\n\n\n rect_entities.push(rect_entity);\n\n\n\n rect_x += rect_width;\n\n }\n\n\n\n rect_y += RECT_HEIGHT;\n\n }\n\n \n\n return rect_entities;\n\n }\n\n \n\n fn process_player_input(&mut self) {\n\n let player = self.rect_entities.first_mut().unwrap();\n\n \n\n let mut go_left = self.event_pump.go_left;\n\n let mut go_right = self.event_pump.go_right;\n", "file_path": "src/game.rs", "rank": 53, "score": 2.559323024541457 }, { "content": " for circle_entity in circle_entities.iter() {\n\n self.draw_filled_circle_with_border(circle_entity);\n\n }\n\n }\n\n\n\n pub fn render(&mut self, rect_entities: &Vec<entities::RectEntity>, circle_entities: &Vec<entities::CircleEntity>) {\n\n self.canvas.set_draw_color(Color::GREY);\n\n self.canvas.clear();\n\n \n\n self.draw_rect_entities(rect_entities);\n\n self.draw_circle_entities(circle_entities);\n\n\n\n self.canvas.present();\n\n }\n\n}", "file_path": "src/systems/render_system.rs", "rank": 54, "score": 2.4481168064498506 }, { "content": " timer_subsystem\n\n }\n\n }\n\n\n\n pub fn update_frame(&mut self) {\n\n let current_time = Self::get_current_time(&self.timer_subsystem);\n\n self.frame_duration = current_time - self.last_time;\n\n self.last_time = current_time;\n\n self.game_frame_duration = self.frame_duration * self.scale;\n\n }\n\n\n\n fn get_current_time(timer_subsystem: &sdl2::TimerSubsystem) -> f64 {\n\n let counter = timer_subsystem.performance_counter() as f64;\n\n let frequency = timer_subsystem.performance_frequency() as f64;\n\n\n\n counter / frequency\n\n }\n\n}\n", "file_path": "src/systems/time_system.rs", "rank": 55, "score": 2.2644480234880207 }, { "content": "pub struct TimeSystem {\n\n pub fixed_delta_time: f64,\n\n pub game_frame_duration: f64,\n\n\n\n frame_duration: f64,\n\n last_time: f64,\n\n scale: f64,\n\n timer_subsystem: sdl2::TimerSubsystem,\n\n}\n\n\n\nimpl TimeSystem {\n\n pub fn new(timer_subsystem: sdl2::TimerSubsystem) -> Self {\n\n let last_time = Self::get_current_time(&timer_subsystem);\n\n\n\n Self {\n\n fixed_delta_time: 1.0 / 120.0,\n\n game_frame_duration: 0.0,\n\n frame_duration: 0.0,\n\n last_time: last_time,\n\n scale: 1.0,\n", "file_path": "src/systems/time_system.rs", "rank": 56, "score": 2.1014177659975983 }, { "content": " Self {\n\n canvas: RenderSystem::create_canvas(game_settings::SCREEN_WIDTH, game_settings::SCREEN_HEIGHT, &video_subsystem)\n\n }\n\n }\n\n \n\n fn create_canvas(window_width: u32, window_height: u32, video_subsystem: &VideoSubsystem) -> Canvas<Window> {\n\n video_subsystem\n\n .window(\"block-game-rust\", window_width, window_height)\n\n .position_centered()\n\n .build()\n\n .unwrap() \n\n .into_canvas()\n\n .build()\n\n .unwrap()\n\n }\n\n\n\n fn draw_filled_rect_with_border(&mut self, rect_entity: &entities::RectEntity) {\n\n let rect_color = rect_entity.shape.color;\n\n let Vec2f { x: rect_x, y: rect_y } = rect_entity.shape.position();\n\n \n", "file_path": "src/systems/render_system.rs", "rank": 57, "score": 1.926056280293333 }, { "content": " loop {\n\n self.render_system.render(&self.rect_entities, &self.circle_entities);\n\n \n\n self.event_pump.handle_input();\n\n if self.event_pump.close_game { break; }\n\n\n\n self.process_player_input(); \n\n\n\n self.time_system.update_frame();\n\n\n\n physics::update_game_frame(self.time_system.game_frame_duration, \n\n &mut self.rect_entities, \n\n &mut self.circle_entities);\n\n\n\n //println!(\"{:.10}\", self.time_system.game_frame_duration);\n\n } \n\n }\n\n}", "file_path": "src/game.rs", "rank": 58, "score": 1.895856979722237 }, { "content": " let player = entities::RectEntity::create_rect_entity(\n\n game_settings::SCREEN_WIDTH as f64 * 0.5, \n\n game_settings::SCREEN_HEIGHT as f64 * 0.9, \n\n game_settings::PLAYER_HEIGHT, \n\n game_settings::PLAYER_WIDTH, \n\n game_settings::PLAYER_VELOCITY, \n\n Color::BLACK);\n\n\n\n let mut rect_entities = vec![player];\n\n \n\n rect_entities.append(&mut Game::rects_generator(6u8, 7u8));\n\n \n\n let mut game_ball = entities::CircleEntity::create_circle_entity(\n\n game_settings::SCREEN_WIDTH as f64 * 0.5,\n\n game_settings::SCREEN_HEIGHT as f64 * 0.5, \n\n game_settings::BALL_RADIUS, \n\n game_settings::BALL_VELOCITY, \n\n Color::CYAN\n\n );\n\n\n", "file_path": "src/game.rs", "rank": 59, "score": 1.8759671734104153 }, { "content": " let rect_x = rect_x.round() as i32; \n\n let rect_y = rect_y.round() as i32;\n\n let rect_width = rect_entity.shape.width().round() as u32;\n\n let rect_height = rect_entity.shape.height().round() as u32;\n\n\n\n let rect = Rect::new(rect_x, rect_y, rect_width, rect_height);\n\n \n\n self.canvas.set_draw_color(rect_color);\n\n self.canvas.fill_rect(rect).unwrap(); \n\n\n\n self.canvas.set_draw_color(Color::BLACK);\n\n self.canvas.draw_rect(rect).unwrap(); \n\n }\n\n\n\n fn draw_filled_circle_with_border(&mut self, circle_entity: &entities::CircleEntity) {\n\n let entities::CircleEntity { \n\n shape: shapes::Circle { \n\n center: Vec2f { x: x_center, y: y_center }, \n\n radius: circle_radius, \n\n color: circle_color, \n", "file_path": "src/systems/render_system.rs", "rank": 60, "score": 1.7513973816403459 }, { "content": " let ball_initial_direction = || -> Vec2f {\n\n let pi = std::f64::consts::PI;\n\n let random_angle = rand::thread_rng().gen_range(0.0..(2.0 * pi)); \n\n\n\n Vec2f { x: random_angle.sin(), y: random_angle.cos() }\n\n }();\n\n \n\n game_ball.physics_properties.direction = ball_initial_direction;\n\n \n\n let circle_entities = vec![game_ball];\n\n \n\n Self {\n\n event_pump,\n\n time_system,\n\n render_system,\n\n rect_entities,\n\n circle_entities\n\n }\n\n }\n\n\n", "file_path": "src/game.rs", "rank": 61, "score": 1.727748109175545 }, { "content": " .. \n\n }, \n\n .. \n\n } = circle_entity;\n\n\n\n let x_center_i16 = x_center.round() as i16;\n\n let y_center_i16 = y_center.round() as i16;\n\n let circle_radius_i16 = circle_radius.round() as i16;\n\n\n\n self.canvas.filled_circle(x_center_i16, y_center_i16, circle_radius_i16, *circle_color).unwrap();\n\n self.canvas.circle(x_center_i16, y_center_i16, circle_radius_i16, Color::BLACK).unwrap();\n\n }\n\n\n\n fn draw_rect_entities(&mut self, rect_entities: &Vec<entities::RectEntity>) {\n\n for rect_entity in rect_entities.iter() {\n\n self.draw_filled_rect_with_border(rect_entity);\n\n }\n\n }\n\n \n\n fn draw_circle_entities(&mut self, circle_entities: &Vec<entities::CircleEntity>) {\n", "file_path": "src/systems/render_system.rs", "rank": 62, "score": 1.5071094648940035 }, { "content": " dll_dir.push(\"32\");\n\n }\n\n println!(\"cargo:rustc-link-search=all={}\", lib_dir.display());\n\n for entry in std::fs::read_dir(dll_dir).expect(\"Can't read DLL dir\") {\n\n let entry_path = entry.expect(\"Invalid fs entry\").path();\n\n let file_name_result = entry_path.file_name();\n\n let mut new_file_path = manifest_dir.clone();\n\n if let Some(file_name) = file_name_result {\n\n let file_name = file_name.to_str().unwrap();\n\n if file_name.ends_with(\".dll\") {\n\n new_file_path.push(file_name);\n\n std::fs::copy(&entry_path, new_file_path.as_path())\n\n .expect(\"Can't copy from DLL dir\");\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "build.rs", "rank": 63, "score": 0.9241091034102618 } ]
Rust
src/system2/memory.rs
huhlig/vcpu16-rs
4fda8e36fbaae7dddb79f5cb96646205d36ef677
use std::char; use std::fmt; use std::io::{Read, Write}; use std::mem; use std::slice; use super::Word; use super::SystemError; #[derive(Clone)] pub struct Memory { buffer: [Word; 65536], } impl Memory { pub fn new() -> Memory { Memory { buffer: [0; 65536], } } pub fn load(&mut self, reader: &mut Read) { unsafe { let memory_size = mem::size_of_val(&self.buffer); let memory_slice = slice::from_raw_parts_mut( &mut self.buffer as *mut _ as *mut u8, memory_size, ); reader.read_exact(memory_slice).unwrap(); } } pub fn save(&mut self, writer: &mut Write) { unsafe { let memory_size = mem::size_of_val(&self.buffer); let memory_slice = slice::from_raw_parts_mut( &mut self.buffer as *mut _ as *mut u8, memory_size, ); writer.write(memory_slice).unwrap(); } } pub fn clear(&mut self) { self.buffer = [0; 65536]; } pub fn write(&mut self, address: Word, buffer: &[Word]) -> Result<(), SystemError> { if address as usize + buffer.len() > 65535 { return Err(SystemError::AddressOverflow); } let start = address as usize; let end = start + buffer.len(); Ok(self.buffer[start..end].copy_from_slice(buffer)) } pub fn read(&mut self, address: Word, length: Word) -> Result<&[Word], SystemError> { if address as usize + length as usize > 65535 { return Err(SystemError::AddressOverflow); } let start = address as usize; let end = start + length as usize; Ok(&self.buffer[start..end as usize]) } pub fn set(&mut self, address: Word, value: Word) { self.buffer[address as usize] = value } pub fn get(&self, address: Word) -> Word { self.buffer[address as usize] } } impl fmt::Display for Memory { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { writeln!(f, "Memory 0 1 2 3 4 5 6 7 8 9 A B C D E F")?; for base in (0..4096usize).map(|o| o * 16) { write!(f, "0x{:04X}", base)?; for offset in 0..16usize { write!(f, " {:04X}", self.buffer[base + offset])?; } write!(f, " ")?; for offset in 0..16usize { if let Some(ch) = char::from_u32(self.buffer[base + offset] as u32) { if ch.is_ascii_alphanumeric() { write!(f, "{}", ch)?; } else { write!(f, "{}", '.')?; } } else { write!(f, "{}", '.')?; } } writeln!(f, " ")?; } Ok(()) } } impl fmt::Debug for Memory { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { writeln!(f, "Memory 0 1 2 3 4 5 6 7 8 9 A B C D E F")?; for base in (0..4096usize).map(|o| o * 16) { write!(f, "0x{:04X}", base)?; for offset in 0..16usize { write!(f, " {:04X}", self.buffer[base + offset])?; } write!(f, " ")?; for offset in 0..16usize { if let Some(ch) = char::from_u32(self.buffer[base + offset] as u32) { if ch.is_ascii_alphanumeric() { write!(f, "{}", ch)?; } else { write!(f, "{}", '.')?; } } else { write!(f, "{}", '.')?; } } writeln!(f, " ")?; } Ok(()) } } #[cfg(test)] mod tests { use super::Word; use super::Memory; use rand::{Rng, SeedableRng, XorShiftRng}; use std::io::Cursor; #[test] pub fn test_load_save() { let mut mem = Memory::new(); let mut input: [u8; 131072] = [0; 131072]; let mut output: [u8; 131072] = [0; 131072]; XorShiftRng::from_seed([1; 4]).fill_bytes(&mut input[..]); mem.load(&mut Cursor::new(&mut input[..])); mem.save(&mut Cursor::new(&mut output[..])); assert_eq!(&input[..], &output[..]); } #[test] pub fn test_write_clear_read() { let mut mem = Memory::new(); let read_address: Word = 0x0100; let write_address: Word = 0x0104; let write_buffer: [Word; 8] = [1; 8]; let empty_buffer: [Word; 16] = [0; 16]; let dirty_buffer: [Word; 16] = [0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0]; assert_eq!(&empty_buffer[..], mem.read(read_address, 16).unwrap()); mem.write(write_address, &write_buffer).unwrap(); assert_eq!(&dirty_buffer[..], mem.read(read_address, 16).unwrap()); mem.clear(); assert_eq!(&empty_buffer[..], mem.read(read_address, 16).unwrap()); } #[test] pub fn test_set_get() { let mut mem = Memory::new(); let address: u16 = 0xFFFF; let oldvalue: u16 = 0x0000; let newvalue: u16 = 0x2222; assert_eq!(oldvalue, mem.get(address)); mem.set(address, newvalue); assert_eq!(newvalue, mem.get(address)); } #[test] pub fn test_display() { let mut mem = Memory::new(); for addr in 0..65536u32 { let addr = addr as u16; mem.set(addr, addr); } println!("{}", mem); } }
use std::char; use std::fmt; use std::io::{Read, Write}; use std::mem; use std::slice; use super::Word; use super::SystemError; #[derive(Clone)] pub struct Memory { buffer: [Word; 65536], } impl Memory { pub fn new() -> Memory { Memory { buffer: [0; 65536], } } pub fn load(&mut self, reader: &mut Read) { unsafe { let memory_size = mem::size_of_val(&self.buffer); let memory_slice = slice::from_raw_parts_mut( &mut self.buffer as *mut _ as *mut u8, memory_size, ); reader.read_exact(memory_slice).unwrap(); } } pub fn save(&mut self, writer: &mut Write) { unsafe { let memory_size = mem::size_of_val(&self.buffer); let memory_slice = slice::from_raw_parts_mut( &mut self.buffer as *mut _ as *mut u8, memory_
assert_eq!(&dirty_buffer[..], mem.read(read_address, 16).unwrap()); mem.clear(); assert_eq!(&empty_buffer[..], mem.read(read_address, 16).unwrap()); } #[test] pub fn test_set_get() { let mut mem = Memory::new(); let address: u16 = 0xFFFF; let oldvalue: u16 = 0x0000; let newvalue: u16 = 0x2222; assert_eq!(oldvalue, mem.get(address)); mem.set(address, newvalue); assert_eq!(newvalue, mem.get(address)); } #[test] pub fn test_display() { let mut mem = Memory::new(); for addr in 0..65536u32 { let addr = addr as u16; mem.set(addr, addr); } println!("{}", mem); } }
size, ); writer.write(memory_slice).unwrap(); } } pub fn clear(&mut self) { self.buffer = [0; 65536]; } pub fn write(&mut self, address: Word, buffer: &[Word]) -> Result<(), SystemError> { if address as usize + buffer.len() > 65535 { return Err(SystemError::AddressOverflow); } let start = address as usize; let end = start + buffer.len(); Ok(self.buffer[start..end].copy_from_slice(buffer)) } pub fn read(&mut self, address: Word, length: Word) -> Result<&[Word], SystemError> { if address as usize + length as usize > 65535 { return Err(SystemError::AddressOverflow); } let start = address as usize; let end = start + length as usize; Ok(&self.buffer[start..end as usize]) } pub fn set(&mut self, address: Word, value: Word) { self.buffer[address as usize] = value } pub fn get(&self, address: Word) -> Word { self.buffer[address as usize] } } impl fmt::Display for Memory { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { writeln!(f, "Memory 0 1 2 3 4 5 6 7 8 9 A B C D E F")?; for base in (0..4096usize).map(|o| o * 16) { write!(f, "0x{:04X}", base)?; for offset in 0..16usize { write!(f, " {:04X}", self.buffer[base + offset])?; } write!(f, " ")?; for offset in 0..16usize { if let Some(ch) = char::from_u32(self.buffer[base + offset] as u32) { if ch.is_ascii_alphanumeric() { write!(f, "{}", ch)?; } else { write!(f, "{}", '.')?; } } else { write!(f, "{}", '.')?; } } writeln!(f, " ")?; } Ok(()) } } impl fmt::Debug for Memory { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { writeln!(f, "Memory 0 1 2 3 4 5 6 7 8 9 A B C D E F")?; for base in (0..4096usize).map(|o| o * 16) { write!(f, "0x{:04X}", base)?; for offset in 0..16usize { write!(f, " {:04X}", self.buffer[base + offset])?; } write!(f, " ")?; for offset in 0..16usize { if let Some(ch) = char::from_u32(self.buffer[base + offset] as u32) { if ch.is_ascii_alphanumeric() { write!(f, "{}", ch)?; } else { write!(f, "{}", '.')?; } } else { write!(f, "{}", '.')?; } } writeln!(f, " ")?; } Ok(()) } } #[cfg(test)] mod tests { use super::Word; use super::Memory; use rand::{Rng, SeedableRng, XorShiftRng}; use std::io::Cursor; #[test] pub fn test_load_save() { let mut mem = Memory::new(); let mut input: [u8; 131072] = [0; 131072]; let mut output: [u8; 131072] = [0; 131072]; XorShiftRng::from_seed([1; 4]).fill_bytes(&mut input[..]); mem.load(&mut Cursor::new(&mut input[..])); mem.save(&mut Cursor::new(&mut output[..])); assert_eq!(&input[..], &output[..]); } #[test] pub fn test_write_clear_read() { let mut mem = Memory::new(); let read_address: Word = 0x0100; let write_address: Word = 0x0104; let write_buffer: [Word; 8] = [1; 8]; let empty_buffer: [Word; 16] = [0; 16]; let dirty_buffer: [Word; 16] = [0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0]; assert_eq!(&empty_buffer[..], mem.read(read_address, 16).unwrap()); mem.write(write_address, &write_buffer).unwrap();
random
[ { "content": "/// Memory Array\n\nstruct Memory {\n\n /// Memory Buffer\n\n buffer: [u16; 65536],\n\n}\n\n\n", "file_path": "src/system/cpu.rs", "rank": 0, "score": 69478.01755072441 }, { "content": "/// Internal Clock\n\nstruct Clock {\n\n /// Is CPU Halted\n\n halted: bool,\n\n /// Cycles since Startup\n\n cycles: u64,\n\n /// Countdown timer\n\n timer: u16,\n\n /// Cycles remaining till instruction completes\n\n busy: u8,\n\n}\n\n\n", "file_path": "src/system/cpu.rs", "rank": 1, "score": 41741.26330562591 }, { "content": "/// Interrupt Request Queue\n\nstruct IRQ {\n\n /// Queue of interrupts\n\n interrupts: Vec<u16>,\n\n /// Are interrupt's currently enabled\n\n enabled: bool,\n\n /// Interrupt Address\n\n address: u16,\n\n}\n\n\n", "file_path": "src/system/cpu.rs", "rank": 2, "score": 41741.26330562591 }, { "content": "type Word = u16;\n\n\n\n/// Shared System\n\npub struct System {\n\n mem: Memory,\n\n cpu: VCPU16,\n\n clk: Clock,\n\n bus: Bus,\n\n}\n\n\n\nimpl System {\n\n pub fn new() {\n\n System {\n\n mem: Memory::new(),\n\n cpu: VCPU16: new(),\n\n clk: Clock::new(),\n\n bus: Bus::new(),\n\n }\n\n }\n\n pub fn mem(&mut self) -> &mut Memory { &self.mem }\n\n pub fn cpu(&mut self) -> &mut VCPU16 { &self.cpu }\n\n pub fn clk(&mut self) -> &mut Clock { &self.clk }\n\n pub fn bus(&mut self) -> &mut Bus { &self.bus }\n\n}\n", "file_path": "src/system/mod.rs", "rank": 3, "score": 39546.238653262124 }, { "content": "/// Hardware Interface\n\npub trait Hardware {\n\n /// Connect Hardware to the VCPU16\n\n fn connect(id: u16, system: System);\n\n /// Send an Interrupt to Hardware\n\n fn interrupt(message: u16);\n\n /// Time step\n\n fn step();\n\n /// Disconnect hardware from the VCPU16\n\n fn disconnect();\n\n}", "file_path": "src/system/hardware/mod.rs", "rank": 4, "score": 38494.44299482215 }, { "content": "enum BufferError {\n\n BufferFull,\n\n}\n\n\n\nimpl std::error::Error for BufferError {\n\n fn description(&self) -> &str {\n\n match self {\n\n BufferError::BufferFull => &\"Buffer is full\"\n\n }\n\n }\n\n}\n\n\n\nimpl CircularBuffer<T> {\n\n pub fn new() -> CircularBuffer<T> {\n\n CircularBuffer {\n\n buffer: [0; 256],\n\n write: 0,\n\n read: 0,\n\n }\n\n }\n\n pub fn enqueue(&mut self, value: T) -> Result<(), BufferError> {\n\n\n\n }\n\n pub fn dequeue(&mut self) -> Result<T, BufferError> {}\n\n}", "file_path": "src/util.rs", "rank": 17, "score": 28277.194856927035 }, { "content": " pub fn load(&mut self, reader: &mut Read) {\n\n unsafe {\n\n let memory_size = mem::size_of_val(&self.mem.buffer);\n\n let memory_slice = slice::from_raw_parts_mut(\n\n &mut self.mem.buffer as *mut _ as *mut u8,\n\n memory_size,\n\n );\n\n reader.read_exact(memory_slice).unwrap();\n\n }\n\n }\n\n ///\n\n /// Save memory to writer\n\n ///\n\n pub fn save(&mut self, writer: &mut Write) {\n\n unsafe {\n\n let memory_size = mem::size_of_val(&self.mem.buffer);\n\n let memory_slice = slice::from_raw_parts_mut(\n\n &mut self.mem.buffer as *mut _ as *mut u8,\n\n memory_size,\n\n );\n", "file_path": "src/system/mem.rs", "rank": 18, "score": 29.258347621553042 }, { "content": " pub fn load_mem(&mut self, reader: &mut Read) {\n\n unsafe {\n\n let memory_size = mem::size_of_val(&self.mem.buffer);\n\n let memory_slice = slice::from_raw_parts_mut(\n\n &mut self.mem.buffer as *mut _ as *mut u8,\n\n memory_size,\n\n );\n\n reader.read_exact(memory_slice).unwrap();\n\n }\n\n }\n\n ///\n\n /// Save memory to writer\n\n ///\n\n pub fn save_mem(&mut self, writer: &mut Write) {\n\n unsafe {\n\n let memory_size = mem::size_of_val(&self.mem.buffer);\n\n let memory_slice = slice::from_raw_parts_mut(\n\n &mut self.mem.buffer as *mut _ as *mut u8,\n\n memory_size,\n\n );\n", "file_path": "src/system/cpu.rs", "rank": 19, "score": 29.018415388897367 }, { "content": " writer.write(memory_slice).unwrap();\n\n }\n\n }\n\n ///\n\n /// Clear Memory\n\n ///\n\n pub fn clear(&mut self) {\n\n self.mem.buffer = [0; 65536];\n\n }\n\n ///\n\n /// Write a slice of memory from buffer\n\n ///\n\n pub fn write(&mut self, address: u16, buffer: &[u16]) {\n\n self.mem.buffer[address as usize..buffer.len()].copy_from_slice(buffer)\n\n }\n\n ///\n\n /// Read a slice length of memory at address\n\n ///\n\n pub fn read(&mut self, address: u16, length: u16) -> &[u16] {\n\n &self.mem.buffer[address as usize..length as usize]\n", "file_path": "src/system/mem.rs", "rank": 20, "score": 21.81045181736348 }, { "content": " writer.write(memory_slice).unwrap();\n\n }\n\n }\n\n ///\n\n /// Clear Memory\n\n ///\n\n pub fn clear_mem(&mut self) {\n\n self.mem.buffer = [0; 65536];\n\n }\n\n ///\n\n /// Write a slice of memory from buffer\n\n ///\n\n pub fn write_mem(&mut self, address: u16, buffer: &[u16]) {\n\n self.mem.buffer[address as usize..buffer.len()].copy_from_slice(buffer)\n\n }\n\n ///\n\n /// Read a slice length of memory at address\n\n ///\n\n pub fn read_mem(&mut self, address: u16, length: u16) -> &[u16] {\n\n &self.mem.buffer[address as usize..length as usize]\n", "file_path": "src/system/cpu.rs", "rank": 21, "score": 21.532032991482204 }, { "content": " mem.load(&mut Cursor::new(&mut input[..]));\n\n\n\n // Save our memory to output\n\n mem.save(&mut Cursor::new(&mut output[..]));\n\n\n\n // Compare buffers\n\n assert_eq!(&input[..], &output[..]);\n\n }\n\n\n\n #[test]\n\n pub fn test_write_clear_read() {\n\n // Create our Memory and external buffers\n\n let mut mem = Memory::new();\n\n\n\n let read_address: u16 = 0x0100;\n\n let write_address: u16 = 0x0104;\n\n let write_buffer: [u16; 8] = [1; 8];\n\n let clear_buffer: [u16; 16] = [0; 16];\n\n let dirty_buffer: [u16; 16] = [0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0];\n\n\n", "file_path": "src/system/mem.rs", "rank": 22, "score": 21.510139175556066 }, { "content": " use rand::{Rng, SeedableRng, XorShiftRng};\n\n use std::io::Cursor;\n\n\n\n #[test]\n\n pub fn test_loadsave() {\n\n // Create our Memory and external buffers\n\n let mut cpu = VCPU16::new();\n\n let mut input: [u8; 131072] = [0; 131072];\n\n let mut output: [u8; 131072] = [0; 131072];\n\n\n\n // Fill our input Buffer\n\n XorShiftRng::from_seed([1; 4]).fill_bytes(&mut input[..]);\n\n\n\n // Load our input into Memory\n\n cpu.load_mem(&mut Cursor::new(&mut input[..]));\n\n\n\n // Save our memory to output\n\n cpu.save_mem(&mut Cursor::new(&mut output[..]));\n\n\n\n // Compare buffers\n", "file_path": "src/system/cpu.rs", "rank": 23, "score": 20.23749271520319 }, { "content": "\n\n\n\n/// Memory Array\n\n#[derive(Clone)]\n\npub struct Memory {\n\n buffer: [u16; 65536],\n\n}\n\n\n\nimpl Memory {\n\n ///\n\n /// Create new Memory Buffer\n\n ///\n\n pub fn new() -> Memory {\n\n Memory {\n\n buffer: [0; 65536],\n\n }\n\n }\n\n ///\n\n /// Load Memory from Reader\n\n ///\n", "file_path": "src/system/mem.rs", "rank": 24, "score": 20.145290327732372 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Memory;\n\n use rand::{Rng, SeedableRng, XorShiftRng};\n\n use std::io::Cursor;\n\n\n\n #[test]\n\n pub fn test_load_save() {\n\n // Create our Memory and external buffers\n\n let mut mem = Memory::new();\n\n let mut input: [u8; 131072] = [0; 131072];\n\n let mut output: [u8; 131072] = [0; 131072];\n\n\n\n // Fill our input Buffer\n\n XorShiftRng::from_seed([1; 4]).fill_bytes(&mut input[..]);\n\n\n\n // Load our input into Memory\n", "file_path": "src/system/mem.rs", "rank": 25, "score": 19.986946649896684 }, { "content": "/// Interrupt Request Queue\n\n#[derive(Clone, Copy)]\n\npub struct Queue {\n\n interrupts: [Word; 256],\n\n write: u8,\n\n read: u8,\n\n}\n\n\n\nimpl fmt::Debug for Queue {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"IRQ ( Disabled: {} Queue: \", self.enabled)?;\n\n if self.read < self.write {\n\n write!(f, \"[\")?;\n\n for i in &self.interrupts[self.read as usize..self.write as usize] {\n\n write!(f, \" 0x{:04X}\", i)?;\n\n }\n\n write!(f, \" ]\")?;\n\n } else if self.read > self.write {\n\n write!(f, \"[\")?;\n\n for i in &self.interrupts[self.read as usize..] {\n", "file_path": "src/system2/queue.rs", "rank": 26, "score": 19.279169125588936 }, { "content": " // Assert Buffer written\n\n assert_eq!(&empty_buffer[..], &mem.read(address - 8, 24));\n\n\n\n // Write buffer\n\n mem.write(address, &buffer);\n\n\n\n // Assert Data Written\n\n assert_eq!(&dirty_buffer[..], &mem.read(address - 8, 24));\n\n\n\n // Clear memory1\n\n mem.clear();\n\n\n\n // Assert Data Cleared\n\n assert_eq!(&empty_buffer[..], &mem.read(address - 8, 24));\n\n }\n\n\n\n #[test]\n\n pub fn test_set_get() {\n\n // Create our Memory and external buffers\n\n let mut mem = Memory::new();\n", "file_path": "src/system/mem.rs", "rank": 27, "score": 18.606634655892112 }, { "content": " }\n\n pub fn is_empty(&self) -> bool {\n\n self.read == self.write\n\n }\n\n pub fn is_full(&self) -> bool {\n\n self.write.wrapping_add(1) == self.read\n\n }\n\n pub fn enqueue(&mut self, value: Word) -> Result<(), SystemError> {\n\n if self.write.wrapping_add(1) == self.read {\n\n return Err(SystemError::InterruptOverflow);\n\n }\n\n self.interrupts[self.write as usize] = value;\n\n self.write = self.write.wrapping_add(1);\n\n Ok(())\n\n }\n\n pub fn dequeue(&mut self) -> Result<Word, SystemError> {\n\n if self.read == self.write {\n\n return Err(SystemError::InterruptUnderflow);\n\n }\n\n let value = self.interrupts[self.read as usize];\n", "file_path": "src/system2/queue.rs", "rank": 28, "score": 16.116880241547676 }, { "content": "use super::Word;\n\nuse super::hardware::Hardware;\n\n\n\npub struct Bus {\n\n hardware: HashMap<Word, Hardware>\n\n}\n\n\n\nimpl Bus {\n\n pub fn new() -> Bus {\n\n Bus {\n\n hardware: HashMap<Word, Hardware>,\n\n }\n\n }\n\n}", "file_path": "src/system/bus.rs", "rank": 29, "score": 15.572777593812045 }, { "content": " }\n\n\n\n #[test]\n\n pub fn test_read_write() {\n\n // Create our Memory and external buffers\n\n let mut cpu = VCPU16::new();\n\n\n\n let address: u16 = 0x1111;\n\n let oldvalue: u16 = 0x0000;\n\n let newvalue: u16 = 0x2222;\n\n\n\n // Assert Memory at address equals oldvalue\n\n assert_eq!(oldvalue, cpu.get(address));\n\n\n\n // Set Memory at address to newvalue\n\n ram.set(address, newvalue);\n\n\n\n // Assert Memory at address equals newvalue\n\n assert_eq!(newvalue, ram.get(address));\n\n }\n\n}", "file_path": "src/system/cpu.rs", "rank": 30, "score": 15.5520740351105 }, { "content": "use std::error::Error;\n\nuse std::result::Result;\n\n\n\npub struct CircularBuffer<T> {\n\n buffer: [T; 256],\n\n write: usize,\n\n read: usize,\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 31, "score": 15.226672356235726 }, { "content": " }\n\n ///\n\n /// Set a single Cell of Memory at address\n\n ///\n\n pub fn set(&mut self, address: u16, value: u16) {\n\n self.mem.buffer[address as usize] = value\n\n }\n\n ///\n\n /// Get a single Cell of Memory at address\n\n ///\n\n pub fn get(&self, address: u16) -> u16 {\n\n self.mem.buffer[address as usize]\n\n }\n\n}\n\n\n\nimpl fmt::Display for Memory {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result<(), Error> {\n\n write!(f, \" 0 1 2 3 4 5 6 7 8 9 A B C D E F\");\n\n for o in 0..65536.step_by(16) {\n\n write!(f, \"0x{:04X} {:04X} {:04X} {:04X} {:04X} {:04X} {:04X} {:04X} {:04X} {:04X} {:04X} {:04X} {:04X} {:04X} {:04X} {:04X}\", o,\n", "file_path": "src/system/mem.rs", "rank": 32, "score": 15.13929766038924 }, { "content": " write!(f, \" 0x{:04X}\", i)?;\n\n }\n\n for i in &self.interrupts[..self.write as usize] {\n\n write!(f, \" 0x{:04X}\", i)?;\n\n }\n\n write!(f, \" ]\")?;\n\n } else {\n\n write!(f, \"empty\")?;\n\n }\n\n write!(f, \" ] )\")\n\n }\n\n}\n\n\n\nimpl Queue {\n\n pub fn new() -> Queue {\n\n Queue {\n\n interrupts: [0; 256],\n\n write: 0,\n\n read: 0,\n\n }\n", "file_path": "src/system2/queue.rs", "rank": 33, "score": 15.054887178437188 }, { "content": "\n\nimpl IRQ {\n\n pub fn new() -> IRQ {\n\n IRQ {\n\n interrupts: [[0; 256]; 65536\n\n enabled: false,\n\n head: 0,\n\n tail: 0,\n\n }\n\n }\n\n pub fn enqueue(&mut self, id: u16, message: Word) {\n\n if enabled {\n\n if\n\n }\n\n }\n\n}", "file_path": "src/system/pic.rs", "rank": 34, "score": 14.550795712716193 }, { "content": " /// Interrupt Request Queue\n\n irq: Queue,\n\n}\n\n\n\nimpl System {\n\n /// Create a new System\n\n pub fn new() -> System {\n\n System {\n\n registers: Registers::new(),\n\n hardware: Vec::new(),\n\n memory: Memory::new(),\n\n clock: Clock::new(),\n\n state: State::Idle,\n\n irq: Queue::new(),\n\n }\n\n }\n\n /// Step the System forward one clock cycle\n\n pub fn step(&mut self) -> Result<(), SystemError> {\n\n // Advance the clock\n\n self.clock.step()?;\n", "file_path": "src/system2/system.rs", "rank": 35, "score": 13.832212440437104 }, { "content": "mod registers;\n\nmod decoder;\n\nmod system;\n\n\n\npub mod hardware;\n\npub use self::bus::Bus;\n\npub use self::clock::Clock;\n\npub use self::error::SystemError;\n\npub use self::memory::Memory;\n\npub use self::queue::Queue;\n\npub use self::registers::Registers;\n\npub use self::decoder::State;\n\npub use self::system::System;\n\n\n\n/// System Word\n\npub type Word = u16;\n", "file_path": "src/system2/mod.rs", "rank": 36, "score": 13.695068354103263 }, { "content": " /// Register Y\n\n pub y: Word,\n\n /// Register Z\n\n pub z: Word,\n\n /// Register I\n\n pub i: Word,\n\n /// Register J\n\n pub j: Word,\n\n}\n\n\n\nimpl Registers {\n\n pub fn new() -> Registers {\n\n Registers {\n\n pc: 0,\n\n sp: 0,\n\n ps: 0,\n\n ia: 0,\n\n sf: 0,\n\n a: 0,\n\n b: 0,\n", "file_path": "src/system2/registers.rs", "rank": 37, "score": 12.940296522626362 }, { "content": "use std::io::{Read, Write};\n\nuse std::mem;\n\nuse std::slice;\n\n\n\n/// VCPU16 Context\n\npub struct VCPU16<'sys> {\n\n sys: &'sys System,\n\n irq: IRQ,\n\n pc: u16,\n\n sp: u16,\n\n ps: u16,\n\n a: u16,\n\n b: u16,\n\n c: u16,\n\n x: u16,\n\n y: u16,\n\n z: u16,\n\n i: u16,\n\n j: u16,\n\n}\n\n\n\n/// Memory Array\n", "file_path": "src/system/cpu.rs", "rank": 38, "score": 12.536074558079703 }, { "content": " self.read = self.read.wrapping_add(1);\n\n Ok(value)\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n pub fn test_pic() {\n\n let mut queue = Queue::new();\n\n\n\n assert!(queue.is_empty());\n\n assert!(queue.is_disabled());\n\n for input in 0..512u16 {\n\n queue.enqueue(input).unwrap();\n\n queue.enable();\n\n assert!(queue.is_enabled());\n", "file_path": "src/system2/queue.rs", "rank": 39, "score": 12.282365221208877 }, { "content": "pub struct Clock {\n\n halted: bool,\n\n cycles: u64,\n\n}\n\n\n\nimpl Clock {\n\n pub fn new() {\n\n Clock {\n\n halted: false,\n\n cycles: 0,\n\n }\n\n }\n\n pub fn halted(&self) -> bool { self.halted }\n\n pub fn cycles(&self) -> u64 { self.cycles }\n\n pub fn step(&mut self) { self.cycles += 1 }\n\n}", "file_path": "src/system/clock.rs", "rank": 40, "score": 11.589380934668998 }, { "content": " }\n\n}\n\n\n\nimpl fmt::Debug for Clock {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n writeln!(f, \"Clock Halted: {} Cycles: {}\", self.halted, self.cycles)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Clock;\n\n\n\n #[test]\n\n pub fn test_clock() {\n\n let mut clk = Clock::new();\n\n\n\n for expected in 0..200u64 {\n\n assert_eq!(expected, clk.cycles());\n\n clk.step().unwrap();\n\n }\n\n clk.halt();\n\n assert!(clk.halted())\n\n }\n\n}", "file_path": "src/system2/clock.rs", "rank": 41, "score": 11.488585553067422 }, { "content": " self.buffer[o + 0x0], self.buffer[o + 0x1], self.buffer[o + 0x2], self.buffer[o + 0x3],\n\n self.buffer[o + 0x4], self.buffer[o + 0x5], self.buffer[o + 0x6], self.buffer[o + 0x7],\n\n self.buffer[o + 0x8], self.buffer[o + 0x9], self.buffer[o + 0xA], self.buffer[o + 0xB],\n\n self.buffer[o + 0xC], self.buffer[o + 0xD], self.buffer[o + 0xE], self.buffer[o + 0xF]\n\n );\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Memory {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result<(), Error> {\n\n write!(f, \" 0 1 2 3 4 5 6 7 8 9 A B C D E F\");\n\n for o in 0..65536.step_by(16) {\n\n write!(f, \"0x{:04X} {:04X} {:04X} {:04X} {:04X} {:04X} {:04X} {:04X} {:04X} {:04X} {:04X} {:04X} {:04X} {:04X} {:04X} {:04X}\", o,\n\n self.buffer[o + 0x0], self.buffer[o + 0x1], self.buffer[o + 0x2], self.buffer[o + 0x3],\n\n self.buffer[o + 0x4], self.buffer[o + 0x5], self.buffer[o + 0x6], self.buffer[o + 0x7],\n\n self.buffer[o + 0x8], self.buffer[o + 0x9], self.buffer[o + 0xA], self.buffer[o + 0xB],\n\n self.buffer[o + 0xC], self.buffer[o + 0xD], self.buffer[o + 0xE], self.buffer[o + 0xF]\n\n );\n\n }\n", "file_path": "src/system/mem.rs", "rank": 42, "score": 11.04448971046951 }, { "content": " assert_eq!(&input[..], &output[..]);\n\n }\n\n\n\n #[test]\n\n pub fn test_set_get() {\n\n // Create our Memory and external buffers\n\n let mut cpu = VCPU16::new();\n\n\n\n let address: u16 = 0xFFFF;\n\n let oldvalue: u16 = 0x0000;\n\n let newvalue: u16 = 0x2222;\n\n\n\n // Assert Memory at address equals oldvalue\n\n assert_eq!(oldvalue, cpu.get_mem(address));\n\n\n\n // Set Memory at address to newvalue\n\n cpu.set_mem(address, newvalue);\n\n\n\n // Assert Memory at address equals newvalue\n\n assert_eq!(newvalue, cpu.get_mem(address));\n", "file_path": "src/system/cpu.rs", "rank": 43, "score": 11.015057951271835 }, { "content": "use super::system::Word;\n\n\n\n/// Programmable Interrupt Controller\n\npub struct PIC {\n\n /// Map of Queued Hardware\n\n interrupts: HashMap<u16, IRQ>,\n\n /// Is Interrupt Queueing Enabled\n\n enabled: bool,\n\n /// Current Queue Head\n\n head: u8,\n\n /// Current Queue Tail\n\n tail: u8,\n\n}\n\n\n\npub struct IRQ {\n\n interrupts: [u16;256],\n\n enabled: bool,\n\n head: usize,\n\n tail: usize,\n\n}\n", "file_path": "src/system/pic.rs", "rank": 44, "score": 10.464678903143323 }, { "content": " }\n\n ///\n\n /// Set a single Cell of Memory at address\n\n ///\n\n pub fn set_mem(&mut self, address: u16, value: u16) {\n\n self.mem.buffer[address as usize] = value\n\n }\n\n ///\n\n /// Get a single Cell of Memory at address\n\n ///\n\n pub fn get_mem(&self, address: u16) -> u16 {\n\n self.mem.buffer[address as usize]\n\n }\n\n /// Get value of the Program Counter (PC) Register\n\n pub fn get_pc(&self) -> u16 { self.pc }\n\n /// Get value of the Stack Pointer (SP) Register\n\n pub fn get_sp(&self) -> u16 { self.sp }\n\n /// Get value of the Program Status (PS) Register\n\n pub fn get_ps(&self) -> u16 { self.ps }\n\n /// Get value of the Interrupt Address (IA) Register\n", "file_path": "src/system/cpu.rs", "rank": 45, "score": 10.058252582959954 }, { "content": " match self.state {\n\n State::Idle => {\n\n /// Fetch\n\n let base_address = self.registers.pc;\n\n let opcode_word = self.memory.get(base_address);\n\n self.registers.pc += 1;\n\n },\n\n };\n\n // Fetch\n\n let base = self.registers.pc;\n\n let word = self.memory.get(base);\n\n // Decode\n\n\n\n\n\n // Iterate through Hardware\n\n for device in &self.hardware {\n\n //registers: &Registers, memory: &Memory, clock: &Clock, pic: &PIC\n\n device.update(&self.clock, &mut self.registers, &mut self.memory, &mut PIC)?;\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/system2/system.rs", "rank": 46, "score": 9.862631551851774 }, { "content": "use std::collections::HashMap;\n\n\n\npub struct Instruction {\n\n address: usize,\n\n line: usize,\n\n opcode: ,\n\n u_argument: ,\n\n m_argument: ,\n\n comment: Option<String>,\n\n}\n\n\n\npub struct Context {\n\n source: String,\n\n output: Vec<u16>,\n\n labels: HashMap<String, usize>,\n\n}\n\n\n\nimpl Context {\n\n pub fn assemble(source: String) -> Vec<u16> {\n\n let lines: Vec<&str> = source.split('\\n').collect();\n\n let labels: HashMap<String, usize> = HashMap::new();\n\n let output: Vec<u16> = Vec::new();\n\n\n\n for line in lines {\n\n\n\n }\n\n }\n\n}", "file_path": "src/assembler.rs", "rank": 47, "score": 9.752960797965759 }, { "content": " interrupts: Vec::new(),\n\n enabled: bool,\n\n address: u16,\n\n },\n\n pc: 0,\n\n sp: 0,\n\n ps: 0,\n\n a: 0,\n\n b: 0,\n\n c: 0,\n\n x: 0,\n\n y: 0,\n\n z: 0,\n\n i: 0,\n\n j: 0,\n\n }\n\n }\n\n ///\n\n /// Load Memory from Reader\n\n ///\n", "file_path": "src/system/cpu.rs", "rank": 48, "score": 9.143469655149325 }, { "content": "#[derive(Clone, Copy)]\n\npub struct Clock {\n\n halted: bool,\n\n cycles: u64,\n\n}\n\n\n\nimpl Clock {\n\n /// Create a new Clock\n\n pub fn new() -> Clock {\n\n Clock {\n\n halted: false,\n\n cycles: 0,\n\n }\n\n }\n\n /// Is Clock Still Active\n\n pub fn halted(&self) -> bool {\n\n self.halted\n\n }\n\n /// Current Clock Cycles Since Startup\n\n pub fn cycles(&self) -> u64 {\n", "file_path": "src/system2/clock.rs", "rank": 49, "score": 9.074111966011596 }, { "content": "//\n\n// Copyright 2017 Hans W. Uhlig.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n//\n\n\n\nuse super::Clock;\n\nuse super::Memory;\n\nuse super::PIC;\n\nuse super::Registers;\n\nuse super::SSystemError;\n\nuse super::Word;\n\n\n\n/// Hardware Trait\n", "file_path": "src/system2/hardware/mod.rs", "rank": 50, "score": 8.942962621846782 }, { "content": "use super::Queue;\n\nuse super::PIC;\n\nuse super::Registers;\n\nuse super::State;\n\nuse super::SystemError;\n\nuse std::fmt;\n\n\n\n/// A System is a container for all Hardware.\n\n/// A Primary CPU always exists in Hardware Slot 0.\n\npub struct System {\n\n /// System Registers\n\n registers: Registers,\n\n /// System Hardware\n\n hardware: Vec<Box<Hardware>>,\n\n /// System Memory\n\n memory: Memory,\n\n /// System Clock\n\n clock: Clock,\n\n /// System State\n\n state: State,\n", "file_path": "src/system2/system.rs", "rank": 51, "score": 8.926917504484468 }, { "content": " let u_arg = upper(self, word);\n\n let m_arg = middle(self, word);\n\n match code {\n\n 0x01 => {\n\n // SET m, u\n\n // Sets m to u\n\n self.clk.busy += 1;\n\n let u_val = read_arg(self, u_arg);\n\n write_arg(self, m_arg, u_val);\n\n }\n\n 0x02 => {\n\n // ADD m, u\n\n // Sets m to m + u, sets PS to 0x0001 if there's an overflow, 0x0000 otherwise\n\n self.clk.busy += 2;\n\n let m_val = read_arg(self, m_arg);\n\n let u_val = read_arg(self, u_arg);\n\n let (result, overflow) = m_val.overflowing_add(u_val);\n\n write_arg(self, m_arg, result);\n\n if overflow {\n\n self.ps = 0x0001;\n", "file_path": "src/system/cpu.rs", "rank": 52, "score": 8.799511622220699 }, { "content": " }\n\n }\n\n /// Was CPU Interrupted\n\n fn interrupted(&mut self) -> bool {\n\n\n\n }\n\n /// Execute Next Instruction\n\n fn execute(&mut self) {\n\n let address = self.pc;\n\n let word = self.mem.buffer[address as usize] as u16;\n\n fn next_pc(cpu: &mut VCPU16) -> u16 {\n\n let pc = cpu.pc;\n\n cpu.clk.busy += 1;\n\n cpu.pc += 1;\n\n cpu.mem.buffer[pc as usize] as u16\n\n }\n\n fn push_sp(cpu: &mut VCPU16) -> u16 {\n\n cpu.sp -= 1;\n\n cpu.sp\n\n }\n", "file_path": "src/system/cpu.rs", "rank": 53, "score": 8.557871401565706 }, { "content": " write!(f, \"Registers ( PC: {:04X}, SP: {:04X}, PS: {:04X}, IA: {:04X}, A: {:04X}, B: {:04X}, \\\n\n C: {:04X}, X: {:04X}, Y: {:04X}, Z: {:04X}, I: {:04X}, J: {:04X} )\", self.pc, self.sp,\n\n self.ps, self.ia, self.a, self.b, self.c, self.x, self.y, self.z, self.i, self.j)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Registers;\n\n\n\n #[test]\n\n pub fn test_display() {\n\n println!();\n\n let reg = Registers::new();\n\n\n\n println!(\"{}\", reg);\n\n }\n\n}\n", "file_path": "src/system2/registers.rs", "rank": 54, "score": 8.543330332967978 }, { "content": " self.cycles\n\n }\n\n /// Advance Clock if not halted\n\n pub fn step(&mut self) -> Result<u64, SystemError> {\n\n if self.halted {\n\n Err(SystemError::ClockHalted)\n\n } else {\n\n self.cycles += 1;\n\n Ok(self.cycles)\n\n }\n\n }\n\n /// Halt Clock\n\n pub fn halt(&mut self) {\n\n self.halted = true;\n\n }\n\n}\n\n\n\nimpl fmt::Display for Clock {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n writeln!(f, \"Clock Halted: {} Cycles: {}\", self.halted, self.cycles)\n", "file_path": "src/system2/clock.rs", "rank": 55, "score": 8.386950043392696 }, { "content": " 0x1D => { Argument::Register(Register::PS) }\n\n // Value of Memory at Next Word\n\n 0x1E => { Argument::Memory(next_pc(cpu)) }\n\n // Next Word as Literal\n\n 0x1F => { Argument::Literal(next_pc(cpu)) }\n\n _ => { Argument::Literal(0x0000) }\n\n }\n\n };\n\n fn skip_next(cpu: &mut VCPU16) {\n\n let word = cpu.pc;\n\n if (word & 0x3FF) == 0 {\n\n cpu.clk.busy += 1;\n\n cpu.pc += 1;\n\n } else if (word & 0x001F) == 0 {\n\n cpu.clk.busy += 1;\n\n cpu.pc += 1;\n\n match (word & 0xFC00) >> 10 {\n\n 0x10 => { cpu.pc += 1 }\n\n 0x11 => { cpu.pc += 1 }\n\n 0x12 => { cpu.pc += 1 }\n", "file_path": "src/system/cpu.rs", "rank": 56, "score": 8.106556157202174 }, { "content": "#[derive(Clone, Copy)]\n\npub struct Registers {\n\n /// Stack Pointer\n\n pub sp: Word,\n\n /// Program Counter\n\n pub pc: Word,\n\n /// Program Status\n\n pub ps: Word,\n\n /// Interrupt Address\n\n pub ia: Word,\n\n /// System Flags\n\n pub sf: Word,\n\n /// Register A\n\n pub a: Word,\n\n /// Register B\n\n pub b: Word,\n\n /// Register C\n\n pub c: Word,\n\n /// Register X\n\n pub x: Word,\n", "file_path": "src/system2/registers.rs", "rank": 57, "score": 7.997168576065869 }, { "content": " c: 0,\n\n x: 0,\n\n y: 0,\n\n z: 0,\n\n i: 0,\n\n j: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Registers {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"PC: {:04X}, SP: {:04X}, PS: {:04X}, IA: {:04X}, A: {:04X}, B: {:04X}, C: {:04X}, \\\n\n X: {:04X}, Y: {:04X}, Z: {:04X}, I: {:04X}, J: {:04X}\", self.pc, self.sp, self.ps, self.ia,\n\n self.a, self.b, self.c, self.x, self.y, self.z, self.i, self.j)\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Registers {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n", "file_path": "src/system2/registers.rs", "rank": 58, "score": 7.933482231402563 }, { "content": " self.j += 1;\n\n }\n\n 0x1F => {\n\n // STD m, u\n\n // Sets m to u, then decreases I and J by 1\n\n self.clk.busy += 2;\n\n let u_val = read_arg(self, u_arg);\n\n write_arg(self, m_arg, u_val);\n\n self.i -= 1;\n\n self.j -= 1;\n\n }\n\n _ => { /* Error */ }\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::VCPU16;\n", "file_path": "src/system/cpu.rs", "rank": 59, "score": 7.860447251088447 }, { "content": " Register::SP => { cpu.sp = value }\n\n Register::PS => { cpu.ps = value }\n\n }\n\n }\n\n }\n\n }\n\n fn read_arg(cpu: &mut VCPU16, arg: Argument) -> u16 {\n\n match arg {\n\n Argument::Literal(value) => { value }\n\n Argument::Memory(address) => { cpu.mem.buffer[address as usize] }\n\n Argument::Register(reg) => {\n\n match reg {\n\n Register::A => { cpu.a }\n\n Register::B => { cpu.b }\n\n Register::C => { cpu.c }\n\n Register::X => { cpu.x }\n\n Register::Y => { cpu.y }\n\n Register::Z => { cpu.z }\n\n Register::I => { cpu.i }\n\n Register::J => { cpu.j }\n", "file_path": "src/system/cpu.rs", "rank": 60, "score": 7.699821797137702 }, { "content": " let code = (word & 0x03E0) >> 5;\n\n let upper = upper(self, word);\n\n match code {\n\n 0x01 => {\n\n // JSR u\n\n // Pushes the address of the next instruction to the stack, then sets PC to u\n\n self.clk.busy += 3;\n\n self.mem.buffer[push_sp(self) as usize] = self.pc;\n\n self.pc = read_arg(self, upper);\n\n }\n\n 0x08 => {\n\n // INT u\n\n // Triggers a software interrupt with message u\n\n self.clk.busy += 4;\n\n }\n\n _ => { /* Error */ }\n\n };\n\n } else {\n\n // Process Binary OpCode\n\n let code = (word & 0x001F) >> 0;\n", "file_path": "src/system/cpu.rs", "rank": 61, "score": 7.645470764388167 }, { "content": " fn pop_sp(cpu: &mut VCPU16) -> u16 {\n\n let sp = cpu.sp;\n\n cpu.sp += 1;\n\n sp\n\n }\n\n fn write_arg(cpu: &mut VCPU16, arg: Argument, value: u16) {\n\n match arg {\n\n Argument::Literal(_) => { /* Do nothing */ }\n\n Argument::Memory(address) => { cpu.mem.buffer[address as usize] = value }\n\n Argument::Register(reg) => {\n\n match reg {\n\n Register::A => { cpu.a = value }\n\n Register::B => { cpu.b = value }\n\n Register::C => { cpu.c = value }\n\n Register::X => { cpu.x = value }\n\n Register::Y => { cpu.y = value }\n\n Register::Z => { cpu.z = value }\n\n Register::I => { cpu.i = value }\n\n Register::J => { cpu.j = value }\n\n Register::PC => { cpu.pc = value }\n", "file_path": "src/system/cpu.rs", "rank": 62, "score": 7.580701809178365 }, { "content": " 0x3F => { Argument::Literal(0x001E) }\n\n _ => { Argument::Literal(0x0000) }\n\n }\n\n };\n\n fn middle(cpu: &mut VCPU16, word: u16) -> Argument {\n\n match (word & 0x03E0) >> 5 {\n\n // register\n\n 0x00 => { Argument::Register(Register::A) }\n\n 0x01 => { Argument::Register(Register::B) }\n\n 0x02 => { Argument::Register(Register::C) }\n\n 0x03 => { Argument::Register(Register::X) }\n\n 0x04 => { Argument::Register(Register::Y) }\n\n 0x05 => { Argument::Register(Register::Z) }\n\n 0x06 => { Argument::Register(Register::I) }\n\n 0x07 => { Argument::Register(Register::J) }\n\n // [register]\n\n 0x08 => { Argument::Memory(cpu.a) }\n\n 0x09 => { Argument::Memory(cpu.b) }\n\n 0x0A => { Argument::Memory(cpu.c) }\n\n 0x0B => { Argument::Memory(cpu.x) }\n", "file_path": "src/system/cpu.rs", "rank": 63, "score": 7.355936817681233 }, { "content": "//\n\n// Copyright 2017 Hans W. Uhlig.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n//\n\n\n\nuse super::Bus;\n\nuse super::Clock;\n\nuse super::Hardware;\n\nuse super::Memory;\n", "file_path": "src/system2/system.rs", "rank": 64, "score": 7.25373359506956 }, { "content": "//\n\n// Copyright 2017 Hans W. Uhlig.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n//\n\n\n\nuse std::fmt;\n\nuse super::SystemError;\n\nuse super::Word;\n\n\n", "file_path": "src/system2/queue.rs", "rank": 65, "score": 7.2071697212854735 }, { "content": "//\n\n// Copyright 2017 Hans W. Uhlig.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n//\n\n\n\nuse super::Word;\n\nuse std::fmt;\n\n\n\n/// VCPU16 Internal Registers\n", "file_path": "src/system2/registers.rs", "rank": 66, "score": 6.977037466465246 }, { "content": " let output = queue.dequeue().unwrap();\n\n assert_eq!(input, output);\n\n queue.disable();\n\n }\n\n }\n\n\n\n #[test]\n\n pub fn test_fill() {\n\n let mut irq = Queue::new();\n\n\n\n assert!(irq.is_empty());\n\n assert!(irq.is_disabled());\n\n for input in 0..255u16 {\n\n irq.enqueue(input).unwrap();\n\n }\n\n\n\n assert_eq!(IRQError::QueueFull, irq.enqueue(0).unwrap_err());\n\n }\n\n}", "file_path": "src/system2/queue.rs", "rank": 67, "score": 6.672233935461197 }, { "content": " 0x0B => {\n\n // BOR m, u\n\n // Sets m to m | u\n\n self.clk.busy += 1;\n\n let m_val = read_arg(self, m_arg);\n\n let u_val = read_arg(self, u_arg);\n\n let rv = m_val | u_val;\n\n write_arg(self, m_arg, rv);\n\n }\n\n 0x0C => {\n\n // XOR m, u\n\n // Sets m to m ^ u\n\n self.clk.busy += 1;\n\n let m_val = read_arg(self, m_arg);\n\n let u_val = read_arg(self, u_arg);\n\n let rv = m_val ^ u_val;\n\n write_arg(self, m_arg, rv);\n\n }\n\n 0x0D => {\n\n // LLS m, u\n", "file_path": "src/system/cpu.rs", "rank": 68, "score": 6.4238478669568115 }, { "content": " code: u16,\n\n time: u8,\n\n}\n\n\n\npub enum Instruction {\n\n Label {\n\n address: u16,\n\n name: String,\n\n },\n\n Nullary {\n\n address: u16,\n\n time: u8,\n\n size: u8,\n\n op: &'static OpCode,\n\n },\n\n Unary {\n\n addr: u16,\n\n time: u8,\n\n size: u8,\n\n arg0: Argument,\n", "file_path": "src/system/isa.rs", "rank": 69, "score": 6.418083695848217 }, { "content": " }\n\n 0x09 => {\n\n /* MDI m, u */\n\n // Sets m to m % u. If u==0, sets m to 0 instead.\n\n // (treats m, u as signed [MDI -7, 16 == -7])\n\n self.clk.busy += 3;\n\n let m_val = read_arg(self, m_arg) as i16;\n\n let u_val = read_arg(self, u_arg) as i16;\n\n let rv = if u_val != 0 { m_val % u_val } else { 0 };\n\n write_arg(self, m_arg, rv as u16);\n\n }\n\n 0x0A => {\n\n // AND m, u\n\n // Sets m to m & u\n\n self.clk.busy += 1;\n\n let m_val = read_arg(self, m_arg);\n\n let u_val = read_arg(self, u_arg);\n\n let rv = m_val & u_val;\n\n write_arg(self, m_arg, rv);\n\n }\n", "file_path": "src/system/cpu.rs", "rank": 70, "score": 6.337903849367288 }, { "content": " Register::PC => { cpu.pc }\n\n Register::SP => { cpu.sp }\n\n Register::PS => { cpu.ps }\n\n }\n\n }\n\n }\n\n }\n\n fn upper(cpu: &mut VCPU16, word: u16) -> Argument {\n\n match (word & 0xFC00) >> 10 {\n\n // register\n\n 0x00 => { Argument::Register(Register::A) }\n\n 0x01 => { Argument::Register(Register::B) }\n\n 0x02 => { Argument::Register(Register::C) }\n\n 0x03 => { Argument::Register(Register::X) }\n\n 0x04 => { Argument::Register(Register::Y) }\n\n 0x05 => { Argument::Register(Register::Z) }\n\n 0x06 => { Argument::Register(Register::I) }\n\n 0x07 => { Argument::Register(Register::J) }\n\n // [register]\n\n 0x08 => { Argument::Memory(cpu.a) }\n", "file_path": "src/system/cpu.rs", "rank": 71, "score": 6.278773148511564 }, { "content": " let m_val = read_arg(self, m_arg) as i32;\n\n let u_val = read_arg(self, u_arg) as i32;\n\n let (rv, ps) = if u_val != 0 {\n\n let rv = ((m_val / u_val) & 0xFFFF) as u16;\n\n let ps = (((m_val << 16) / u_val) & 0xFFFF) as u16;\n\n (rv, ps)\n\n } else {\n\n (0, 0)\n\n };\n\n write_arg(self, m_arg, rv);\n\n self.ps = ps;\n\n }\n\n 0x08 => {\n\n // MOD m, u\n\n // Sets m to m % u. if u==0, sets m to 0 instead.\n\n self.clk.busy += 3;\n\n let m_val = read_arg(self, m_arg);\n\n let u_val = read_arg(self, u_arg);\n\n let rv = if u_val != 0 { m_val % u_val } else { 0 };\n\n write_arg(self, m_arg, rv);\n", "file_path": "src/system/cpu.rs", "rank": 72, "score": 6.173118056656134 }, { "content": "## Community Adopted Specifications\n\n\n\nThe floppy drive specification was released approximately six months after the release of the initial specifications. Before this time a fan work specification for the [HMD2043 floppy disk drive](https://gist.github.com/DanielKeep/2495578) was released. A number of emulators continue to use this rather than the official M35FD.\n\n\n\nAlthough the specification of the LEM1802 says it must be initialised before use, most emulators start with the device already initialised with video memory mapped to 0x8000.\n\n\n\nAlthough there is a specification for a keyboard device, Notch's alpha releases of 0x10c which included a functional DCPU-16 system did not follow this. Instead the keyboard is interfaced through a 16 letter ring buffer mapped at 0x9000 (non configurable). An address is 0 before a key is pressed, and should be written as 0 again after being read so it can be checked later. Most emulators follow this functionality.\n\n\n\nThe official specifications don't go too in depth into the format of the assembly language, and there has been no official assembler released. The code in the specification is similar to NASM and community assemblers are also based on this. Most support labels, and some add macros. The language has come to be known as DASM (DCPU-16 Assembly) and typically has the extension `dasm` or `dasm16`. Object code has the extension `bin`, `dcpu` or `dcpu16`.\n\n\n", "file_path": "docs/dcpu/README.md", "rank": 73, "score": 5.8854081172924175 }, { "content": " // Sets m to m << u, sets PS to ((m<<u)>>16)&0xFFFF (logical left shift)\n\n self.clk.busy += 1;\n\n let m_val = read_arg(self, m_arg);\n\n let u_val = read_arg(self, u_arg);\n\n let rv = m_val << u_val;\n\n let ps = ((((m_val as u32) << (u_val as u32)) >> 16) & 0xFFFF) as u16;\n\n write_arg(self, m_arg, rv);\n\n self.ps = ps;\n\n }\n\n 0x0E => {\n\n // LRS m, u\n\n // Sets m to m >> u, sets PS to ((m<<16)>>u)&0xFFFF (logical right shift)\n\n self.clk.busy += 1;\n\n let m_val = read_arg(self, m_arg);\n\n let u_val = read_arg(self, u_arg);\n\n let rv = m_val >> u_val;\n\n let ps = ((((m_val as u32) << 16) >> u_val) & 0xFFFF) as u16;\n\n write_arg(self, m_arg, rv);\n\n self.ps = ps;\n\n }\n", "file_path": "src/system/cpu.rs", "rank": 74, "score": 5.821626325778911 }, { "content": "//\n\n// Copyright 2017 Hans W. Uhlig.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n//\n\n\n\nmod clock;\n\nmod error;\n\nmod memory;\n\nmod queue;\n", "file_path": "src/system2/mod.rs", "rank": 75, "score": 5.733032648715348 }, { "content": " 0x1B => {\n\n // SBX m, u\n\n // Sets m to m - u + PS, sets PS to 0xFFFF if there is an underflow, 0x0000 otherwise\n\n // TODO: Figure out Better Logic Here\n\n self.clk.busy += 3;\n\n let m_val = read_arg(self, m_arg) as u32;\n\n let u_val = read_arg(self, u_arg) as u32;\n\n let result = m_val - u_val + self.ps as u32;\n\n let rv = (result % 65_536) as u16;\n\n let ps = if result / 65_536 > 0 { 0xFFFF } else { 0x0000 };\n\n write_arg(self, m_arg, rv);\n\n self.ps = ps;\n\n }\n\n 0x1E => {\n\n // STI m, u\n\n // Sets m to u, then increases I and J by 1\n\n self.clk.busy += 2;\n\n let u_val = read_arg(self, u_arg);\n\n write_arg(self, m_arg, u_val);\n\n self.i += 1;\n", "file_path": "src/system/cpu.rs", "rank": 76, "score": 5.72325011824122 }, { "content": "//\n\n// Copyright 2017 Hans W. Uhlig.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n//\n\n\n\n/// CPU State\n\npub enum State {\n\n /// Step 1\n\n FetchBase {\n", "file_path": "src/system2/decoder.rs", "rank": 77, "score": 5.692439813059171 }, { "content": "//\n\n// Copyright 2017 Hans W. Uhlig.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n//\n\n\n\n#[cfg(test)]\n\nextern crate rand;\n\n\n\npub mod system2;", "file_path": "src/lib.rs", "rank": 78, "score": 5.649084969916558 }, { "content": " pub fn get_ia(&self) -> u16 { self.irq.address }\n\n /// Get value of Register A\n\n pub fn get_a(&self) -> u16 { self.a }\n\n /// Get value of Register B\n\n pub fn get_b(&self) -> u16 { self.b }\n\n /// Get value of Register C\n\n pub fn get_c(&self) -> u16 { self.c }\n\n /// Get value of Register X\n\n pub fn get_x(&self) -> u16 { self.x }\n\n /// Get value of Register Y\n\n pub fn get_y(&self) -> u16 { self.y }\n\n /// Get value of Register Z\n\n pub fn get_z(&self) -> u16 { self.z }\n\n /// Get value of Register I\n\n pub fn get_i(&self) -> u16 { self.i }\n\n /// Get value of Register J\n\n pub fn get_j(&self) -> u16 { self.j }\n\n /// Enqueue Interrupt\n\n pub fn interrupt(&mut self, message: u16) {\n\n if self.ia != 0 {\n", "file_path": "src/system/cpu.rs", "rank": 79, "score": 5.507357661484521 }, { "content": "//\n\n// Copyright 2017 Hans W. Uhlig.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n//\n\n\n\n/// Errors thrown by the System\n\n#[derive(Clone, Copy, Eq, PartialEq)]\n\npub enum SystemError {\n\n /// Clock has Stopped\n", "file_path": "src/system2/error.rs", "rank": 80, "score": 5.4418533553756685 }, { "content": " } else {\n\n self.ps = 0x0000;\n\n }\n\n }\n\n 0x03 => {\n\n // SUB m, u\n\n // Sets m to m - u, sets PS to 0xFFFF if there's an underflow, 0x0000 otherwise\n\n self.clk.busy += 2;\n\n let m_val = read_arg(self, m_arg);\n\n let u_val = read_arg(self, u_arg);\n\n let (result, overflow) = m_val.overflowing_sub(u_val);\n\n write_arg(self, m_arg, result);\n\n self.ps = if overflow { 0xFFFF } else { 0x0000 }\n\n }\n\n 0x04 => {\n\n // MUL m, u\n\n // Sets m to m * u, sets PS to ((m * u)>>16) & 0xFFFF) (treats m, u as unsigned)\n\n self.clk.busy += 2;\n\n let m_val = read_arg(self, m_arg) as u32;\n\n let u_val = read_arg(self, u_arg) as u32;\n", "file_path": "src/system/cpu.rs", "rank": 81, "score": 5.387886120674086 }, { "content": " let result = m_val * u_val;\n\n let ps = ((result & 0xFFFF0000) >> 16) as u16;\n\n let rv = ((result & 0x0000FFFF) >> 0) as u16;\n\n write_arg(self, m_arg, rv);\n\n self.ps = ps;\n\n }\n\n 0x05 => {\n\n // MLI m, u\n\n // Sets m to (m * u), sets PS to ((m*u)>>16) & 0xFFFF) (treats m, u as signed)\n\n self.clk.busy += 2;\n\n let m_val = read_arg(self, m_arg) as i32;\n\n let u_val = read_arg(self, u_arg) as i32;\n\n let result = m_val * u_val;\n\n let ps = ((result as u32 & 0xFFFF0000) >> 16) as u16;\n\n let rv = ((result as u32 & 0x0000FFFF) >> 0) as u16;\n\n write_arg(self, m_arg, rv);\n\n self.ps = ps;\n\n }\n\n 0x06 => {\n\n // DIV m, u\n", "file_path": "src/system/cpu.rs", "rank": 82, "score": 5.32453869234908 }, { "content": " 0x0F => {\n\n // ARS m, u\n\n // Sets m to m >>> u, sets PS to ((m<<16)>>>u)&0xFFFF (arithmetic shift) (treats m as signed)\n\n self.clk.busy += 1;\n\n let m_val = read_arg(self, m_arg) as i16;\n\n let u_val = read_arg(self, u_arg) as u16;\n\n let rv = m_val >> u_val; // i16 >>> u16\n\n let ps = ((((m_val as i32) << 16) >> u_val) & 0xFFFF) as u16;\n\n write_arg(self, m_arg, rv as u16);\n\n self.ps = ps;\n\n }\n\n 0x10 => {\n\n // IFB m, u\n\n // Performs next instruction only if (m & u) != 0\n\n self.clk.busy += 2;\n\n let m_val = read_arg(self, m_arg);\n\n let u_val = read_arg(self, u_arg);\n\n if m_val & u_val != 0 {\n\n self.execute();\n\n } else {\n", "file_path": "src/system/cpu.rs", "rank": 83, "score": 5.282597738740157 }, { "content": "/// is zero, interrupts will be triggered as normal again\n\npub const IAQ: OpCode = OpCode { name: \"RFI\", code: 0x0C, time: 2 };\n\n\n\n/// Mask: 0x03FF, Value: 0x10, Time: 2, Name: HWN, Type: Unary\n\n/// Description: Sets u to number of connected hardware devices\n\npub const HWN: OpCode = OpCode { name: \"HWN\", code: 0x10, time: 2 };\n\n\n\n/// Mask: 0xFFFF, Value: 0x10, Time: 4, Name: HWQ, Type: Unary\n\n/// Description: Sets X, Y, Z registers to information about hardware at port u\n\n/// * X is a 16 bit word identifying the manufacturer id\n\n/// * Y is a 16 bit word identifying the hardware id\n\n/// * Z is a 16 bit word identifying the hardware version\n\npub const HWQ: OpCode = OpCode { name: \"HWQ\", code: 0x11, time: 4 };\n\n\n\n/// Mask: 0x03FF, Value: 0x12, Time: 4, Name: HWI, Type: Unary\n\n/// Description: Sends an interrupt to hardware at port u\n\npub const HWI: OpCode = OpCode { name: \"HWN\", code: 0x12, time: 4 };\n\n\n\n//--------------------------------------------------------------------------------------------------\n\n// Binary Instructions\n", "file_path": "src/system/isa.rs", "rank": 84, "score": 5.226525147334428 }, { "content": " op: &'static OpCode,\n\n },\n\n Binary {\n\n addr: u16,\n\n time: u8,\n\n size: u8,\n\n arg0: Argument,\n\n arg1: Argument,\n\n op: &'static OpCode,\n\n },\n\n}\n\n\n\n//--------------------------------------------------------------------------------------------------\n\n// Nullary Instructions\n\n//--------------------------------------------------------------------------------------------------\n\n\n\n/// Mask: 0xFFFF, Value: 0x00, Time: 1, Name: NOP, Type: Nullary\n\n/// Description: No Operation\n\npub const NOP: OpCode = OpCode { name: \"NOP\", code: 0x00, time: 1 };\n\n\n", "file_path": "src/system/isa.rs", "rank": 85, "score": 5.201542645773273 }, { "content": " let u_val = read_arg(self, u_arg) as i16;\n\n if m_val < u_val {\n\n self.execute();\n\n } else {\n\n skip_next(self);\n\n }\n\n }\n\n 0x1A => {\n\n // ADX m, u\n\n // Sets m to m + u + PS, sets PS to 0x0001 if there is an overflow, 0x0000 otherwise\n\n // TODO: Figure out Better Logic Here\n\n self.clk.busy += 3;\n\n let m_val = read_arg(self, m_arg) as u32;\n\n let u_val = read_arg(self, u_arg) as u32;\n\n let result = m_val + u_val + self.ps as u32;\n\n let rv = (result % 65_536) as u16;\n\n let ps = if result / 65_536 > 0 { 0x0001 } else { 0x0000 };\n\n write_arg(self, m_arg, rv);\n\n self.ps = ps;\n\n }\n", "file_path": "src/system/cpu.rs", "rank": 86, "score": 5.157291857534224 }, { "content": "//\n\n// Copyright 2017 Hans W. Uhlig.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n//\n\n\n\nuse std::fmt;\n\nuse super::SystemError;\n\n\n\n/// System Clock\n", "file_path": "src/system2/clock.rs", "rank": 87, "score": 5.02460557107744 }, { "content": " // Specialty Registers\n\n 0x1B => { Argument::Register(Register::SP) }\n\n 0x1C => { Argument::Register(Register::PC) }\n\n 0x1D => { Argument::Register(Register::PS) }\n\n // Value of Memory at Next Word\n\n 0x1E => { Argument::Memory(next_pc(cpu)) }\n\n // Next Word as Literal\n\n 0x1F => { Argument::Literal(next_pc(cpu)) }\n\n // Literal Values\n\n 0x20 => { Argument::Literal(0xFFFF) }\n\n 0x21 => { Argument::Literal(0x0000) }\n\n 0x22 => { Argument::Literal(0x0001) }\n\n 0x23 => { Argument::Literal(0x0002) }\n\n 0x24 => { Argument::Literal(0x0003) }\n\n 0x25 => { Argument::Literal(0x0004) }\n\n 0x26 => { Argument::Literal(0x0005) }\n\n 0x27 => { Argument::Literal(0x0006) }\n\n 0x28 => { Argument::Literal(0x0007) }\n\n 0x29 => { Argument::Literal(0x0008) }\n\n 0x2A => { Argument::Literal(0x0009) }\n", "file_path": "src/system/cpu.rs", "rank": 88, "score": 4.726972500618995 }, { "content": " // Sets m to m / u, sets PS to ((m<<16)/u)&0xFFFF.\n\n // If u==0, sets m and PS to 0 instead. (treats m, u as unsigned)\n\n self.clk.busy += 3;\n\n let m_val = read_arg(self, m_arg) as u32;\n\n let u_val = read_arg(self, u_arg) as u32;\n\n let (rv, ps) = if u_val != 0 {\n\n let rv = ((m_val / u_val) & 0xFFFF) as u16;\n\n let ps = (((m_val << 16) / u_val) & 0xFFFF) as u16;\n\n (rv, ps)\n\n } else {\n\n (0, 0)\n\n };\n\n write_arg(self, m_arg, rv);\n\n self.ps = ps;\n\n }\n\n 0x07 => {\n\n // DVI m, u\n\n // Sets m to m / u, sets PS to ((m<<16)/u)&0xFFFF.\n\n // If u==0, sets m and PS to 0 instead. (treats m, u as signed)\n\n self.clk.busy += 3;\n", "file_path": "src/system/cpu.rs", "rank": 89, "score": 4.711442739354128 }, { "content": "#### 5. Submission of Contributions\n\n\n\nUnless You explicitly state otherwise, any Contribution intentionally submitted\n\nfor inclusion in the Work by You to the Licensor shall be under the terms and\n\nconditions of this License, without any additional terms or conditions.\n\nNotwithstanding the above, nothing herein shall supersede or modify the terms of\n\nany separate license agreement you may have executed with Licensor regarding\n\nsuch Contributions.\n\n\n\n#### 6. Trademarks\n\n\n\nThis License does not grant permission to use the trade names, trademarks,\n\nservice marks, or product names of the Licensor, except as required for\n\nreasonable and customary use in describing the origin of the Work and\n\nreproducing the content of the NOTICE file.\n\n\n\n#### 7. Disclaimer of Warranty\n\n\n\nUnless required by applicable law or agreed to in writing, Licensor provides the\n\nWork (and each Contributor provides its Contributions) on an �AS IS� BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied,\n\nincluding, without limitation, any warranties or conditions of TITLE,\n\nNON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are\n\nsolely responsible for determining the appropriateness of using or\n\nredistributing the Work and assume any risks associated with Your exercise of\n\npermissions under this License.\n\n\n\n#### 8. Limitation of Liability\n\n\n\nIn no event and under no legal theory, whether in tort (including negligence),\n\ncontract, or otherwise, unless required by applicable law (such as deliberate\n\nand grossly negligent acts) or agreed to in writing, shall any Contributor be\n\nliable to You for damages, including any direct, indirect, special, incidental,\n\nor consequential damages of any character arising as a result of this License or\n\nout of the use or inability to use the Work (including but not limited to\n\ndamages for loss of goodwill, work stoppage, computer failure or malfunction, or\n\nany and all other commercial damages or losses), even if such Contributor has\n\nbeen advised of the possibility of such damages.\n\n\n", "file_path": "LICENSE.md", "rank": 90, "score": 4.462224214583484 }, { "content": " self.irq.interrupts.push(message);\n\n }\n\n }\n\n /// Step through next clock cycle\n\n pub fn step(&mut self) {\n\n if self.clk.halted {\n\n // CPU Halted, No further actions taken\n\n return;\n\n }\n\n self.clk.cycles += 1;\n\n if self.clk.busy > 0 {\n\n // CPU is busy\n\n self.clk.busy -= 1;\n\n } else if self.irq.enabled && self.irq.interrupts.len() > 0 {\n\n let message = self.irq.interrupts.pop().unwrap();\n\n\n\n // Interrupt Queued\n\n\n\n } else {\n\n self.execute();\n", "file_path": "src/system/cpu.rs", "rank": 91, "score": 4.432319587872611 }, { "content": " }\n\n}\n\n\n\nimpl fmt::Debug for SystemError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n writeln!(f, \"Clock Halted: {} Cycles: {}\", match f {\n\n &SystemError::ClockHalted => \"SystemError::ClockHalted\",\n\n &SystemError::HardwareFailure => \"SystemError::HardwareFailure\",\n\n &SystemError::AddressOverflow => \"SystemError::AddressOverflow\",\n\n &SystemError::InterruptOverflow => \"SystemError::InterruptOverflow\",\n\n &SystemError::InterruptUnderflow => \"SystemError::InterruptUnderflow\",\n\n })\n\n }\n\n}\n", "file_path": "src/system2/error.rs", "rank": 92, "score": 3.999918873764919 }, { "content": " 0x09 => { Argument::Memory(cpu.b) }\n\n 0x0A => { Argument::Memory(cpu.c) }\n\n 0x0B => { Argument::Memory(cpu.x) }\n\n 0x0C => { Argument::Memory(cpu.y) }\n\n 0x0D => { Argument::Memory(cpu.z) }\n\n 0x0E => { Argument::Memory(cpu.i) }\n\n 0x0F => { Argument::Memory(cpu.j) }\n\n // [register + NEXT_PC]\n\n 0x10 => { Argument::Memory(cpu.a + next_pc(cpu)) }\n\n 0x11 => { Argument::Memory(cpu.b + next_pc(cpu)) }\n\n 0x12 => { Argument::Memory(cpu.c + next_pc(cpu)) }\n\n 0x13 => { Argument::Memory(cpu.x + next_pc(cpu)) }\n\n 0x14 => { Argument::Memory(cpu.y + next_pc(cpu)) }\n\n 0x15 => { Argument::Memory(cpu.z + next_pc(cpu)) }\n\n 0x16 => { Argument::Memory(cpu.i + next_pc(cpu)) }\n\n 0x17 => { Argument::Memory(cpu.j + next_pc(cpu)) }\n\n // Stack Operations\n\n 0x18 => { Argument::Memory(pop_sp(cpu)) }\n\n 0x19 => { Argument::Memory(cpu.sp) }\n\n 0x1A => { Argument::Memory(cpu.sp + next_pc(cpu)) }\n", "file_path": "src/system/cpu.rs", "rank": 93, "score": 3.973949580127606 }, { "content": " address: Word,\n\n },\n\n Decode {\n\n address: Word,\n\n\n\n },\n\n\n\n Execute {\n\n address: Word,\n\n },\n\n}\n\n\n\n#[derive(Copy, Clone, Debug)]\n", "file_path": "src/system2/decoder.rs", "rank": 94, "score": 3.963187867982239 }, { "content": " 0x0C => { Argument::Memory(cpu.y) }\n\n 0x0D => { Argument::Memory(cpu.z) }\n\n 0x0E => { Argument::Memory(cpu.i) }\n\n 0x0F => { Argument::Memory(cpu.j) }\n\n // [register + NEXT_PC]\n\n 0x10 => { Argument::Memory(cpu.a + next_pc(cpu)) }\n\n 0x11 => { Argument::Memory(cpu.b + next_pc(cpu)) }\n\n 0x12 => { Argument::Memory(cpu.c + next_pc(cpu)) }\n\n 0x13 => { Argument::Memory(cpu.x + next_pc(cpu)) }\n\n 0x14 => { Argument::Memory(cpu.y + next_pc(cpu)) }\n\n 0x15 => { Argument::Memory(cpu.z + next_pc(cpu)) }\n\n 0x16 => { Argument::Memory(cpu.i + next_pc(cpu)) }\n\n 0x17 => { Argument::Memory(cpu.j + next_pc(cpu)) }\n\n // Stack Operations\n\n 0x18 => { Argument::Memory(push_sp(cpu)) }\n\n 0x19 => { Argument::Memory(cpu.sp) }\n\n 0x1A => { Argument::Memory(cpu.sp + next_pc(cpu)) }\n\n // Specialty Registers\n\n 0x1B => { Argument::Register(Register::SP) }\n\n 0x1C => { Argument::Register(Register::PC) }\n", "file_path": "src/system/cpu.rs", "rank": 95, "score": 3.8808135223445257 }, { "content": "//! VCPU16 System\n\npub mod bus;\n\npub mod cpu;\n\npub mod mem;\n\npub mod hardware;\n\n\n", "file_path": "src/system/mod.rs", "rank": 96, "score": 3.77049588516327 }, { "content": " // IFG m, u\n\n // Performs next instruction only if m > u (unsigned)\n\n self.clk.busy += 2;\n\n let m_val = read_arg(self, m_arg);\n\n let u_val = read_arg(self, u_arg);\n\n if m_val > u_val {\n\n self.execute();\n\n } else {\n\n skip_next(self);\n\n }\n\n }\n\n 0x15 => {\n\n // IFA m, u\n\n // Performs next instruction only if m > u (signed)\n\n self.clk.busy += 2;\n\n let m_val = read_arg(self, m_arg) as i16;\n\n let u_val = read_arg(self, u_arg) as i16;\n\n if m_val > u_val {\n\n self.execute();\n\n } else {\n", "file_path": "src/system/cpu.rs", "rank": 97, "score": 3.624609439792537 }, { "content": " let u_val = read_arg(self, u_arg);\n\n if m_val == u_val {\n\n self.execute();\n\n } else {\n\n skip_next(self);\n\n }\n\n }\n\n 0x13 => {\n\n // IFN m, u\n\n // Performs next instruction only if m != u\n\n self.clk.busy += 2;\n\n let m_val = read_arg(self, m_arg);\n\n let u_val = read_arg(self, u_arg);\n\n if m_val != u_val {\n\n self.execute();\n\n } else {\n\n skip_next(self);\n\n }\n\n }\n\n 0x14 => {\n", "file_path": "src/system/cpu.rs", "rank": 98, "score": 3.56653690128527 }, { "content": " skip_next(self);\n\n }\n\n }\n\n 0x11 => {\n\n // IFC m, u\n\n // Performs next instruction only if (m & u) == 0\n\n self.clk.busy += 2;\n\n let m_val = read_arg(self, m_arg);\n\n let u_val = read_arg(self, u_arg);\n\n if m_val & u_val == 0 {\n\n self.execute();\n\n } else {\n\n skip_next(self);\n\n }\n\n }\n\n 0x12 => {\n\n // IFE m, u\n\n // Performs next instruction only if m == u\n\n self.clk.busy += 2;\n\n let m_val = read_arg(self, m_arg);\n", "file_path": "src/system/cpu.rs", "rank": 99, "score": 3.5171224066940185 } ]
Rust
program/programs/main-program-final/src/lib.rs
SOLBROS/SOLHUNT
d36a2779b88d500af285fd3ff3bfdb9b31d16ac8
use anchor_lang::prelude::*; use anchor_spl::token::{self, SetAuthority, TokenAccount, Transfer}; use spl_token::instruction::AuthorityType; #[program] pub mod main_program_final { use super::*; pub fn initialize(ctx: Context<Initialize>) -> ProgramResult { let my_account = &mut ctx.accounts.my_account; my_account.data = 0; Ok(()) } pub fn initialize_admin_account( ctx: Context<InitializeAdminAccount>, total_amount: u64 ) -> ProgramResult { ctx.accounts.escrow_account.admin_key = *ctx.accounts.admin.key; ctx.accounts .escrow_account .admin_deposit_token_account = *ctx .accounts .admin_deposit_token_account .to_account_info() .key; ctx.accounts.escrow_account.total_amount = total_amount; let (pda, _bump_seed) = Pubkey::find_program_address(&[b"dungeon"], ctx.program_id); token::set_authority(ctx.accounts.into(), AuthorityType::AccountOwner, Some(pda))?; Ok(()) } pub fn update(ctx: Context<Update>, data: u64) -> ProgramResult { let my_account = &mut ctx.accounts.my_account; let amount = 20; if my_account.data & (1 << data) == 0 { msg!("Collecting token"); my_account.data = my_account.data | 1 << data; let (_pda, bump_seed) = Pubkey::find_program_address(&[b"dungeon"], ctx.program_id); let seeds = &[&b"dungeon"[..], &[bump_seed]]; token::transfer( ctx.accounts .into_transfer_to_game_user_context() .with_signer(&[&seeds[..]]), amount, )?; ctx.accounts.escrow_account.total_amount = ctx.accounts.escrow_account.total_amount - amount; } else { msg!("Token already collected"); } Ok(()) } } #[derive(Accounts)] #[instruction(total_amount: u64)] pub struct InitializeAdminAccount<'info> { #[account(signer)] pub admin: AccountInfo<'info>, #[account( mut, constraint = admin_deposit_token_account.amount >= total_amount )] pub admin_deposit_token_account: CpiAccount<'info, TokenAccount>, #[account(init)] pub escrow_account: ProgramAccount<'info, EscrowAccount>, pub token_program: AccountInfo<'info>, } #[account] pub struct EscrowAccount { pub admin_key: Pubkey, pub admin_deposit_token_account: Pubkey, pub total_amount: u64, } impl<'info> From<&mut InitializeAdminAccount<'info>> for CpiContext<'_, '_, '_, 'info, SetAuthority<'info>> { fn from(accounts: &mut InitializeAdminAccount<'info>) -> Self { let cpi_accounts = SetAuthority { account_or_mint: accounts .admin_deposit_token_account .to_account_info() .clone(), current_authority: accounts.admin.clone(), }; let cpi_program = accounts.token_program.clone(); CpiContext::new(cpi_program, cpi_accounts) } } impl<'info> Update<'info> { fn into_transfer_to_game_user_context(&self) -> CpiContext<'_, '_, '_, 'info, Transfer<'info>> { let cpi_accounts = Transfer { from: self.pda_deposit_token_account.to_account_info().clone(), to: self.game_user_receive_token_account.to_account_info().clone(), authority: self.pda_account.clone(), }; CpiContext::new(self.token_program.clone(), cpi_accounts) } } #[derive(Accounts)] pub struct Initialize<'info> { #[account(init)] pub my_account: ProgramAccount<'info, MyAccount>, } #[account] pub struct MyAccount { pub data: u64, } #[derive(Accounts)] pub struct Update<'info> { #[account(mut)] pub my_account: ProgramAccount<'info, MyAccount>, #[account(signer)] pub game_user: AccountInfo<'info>, #[account(mut)] pub game_user_receive_token_account: CpiAccount<'info, TokenAccount>, #[account(mut)] pub admin_main_account: AccountInfo<'info>, #[account(mut)] pub pda_deposit_token_account: CpiAccount<'info, TokenAccount>, #[account( mut, constraint = escrow_account.admin_deposit_token_account == *pda_deposit_token_account.to_account_info().key, constraint = escrow_account.admin_key == *admin_main_account.key )] pub escrow_account: ProgramAccount<'info, EscrowAccount>, pub pda_account: AccountInfo<'info>, pub token_program: AccountInfo<'info>, }
use anchor_lang::prelude::*; use anchor_spl::token::{self, SetAuthority, TokenAccount, Transfer}; use spl_token::instruction::AuthorityType; #[program] pub mod main_program_final { use super::*; pub fn initialize(ctx: Context<Initialize>) -> ProgramResult { let my_account = &mut ctx.accounts.my_account; my_account.data = 0; Ok(()) } pub fn initialize_admin_account( ctx: Context<InitializeAdminAccount>, total_amount: u64 ) -> ProgramResult { ctx.accounts.escrow_account.admin_key = *ctx.accounts.admin.key; ctx.accounts .escrow_account .admin_deposit_token_account = *ctx .accounts .admin_deposit_token_account .to_account_info() .key; ctx.accounts.escrow_account.total_amount = total_amount; let (pda, _bump_seed) = Pubkey::find_program_add
enAccount>, #[account(mut)] pub admin_main_account: AccountInfo<'info>, #[account(mut)] pub pda_deposit_token_account: CpiAccount<'info, TokenAccount>, #[account( mut, constraint = escrow_account.admin_deposit_token_account == *pda_deposit_token_account.to_account_info().key, constraint = escrow_account.admin_key == *admin_main_account.key )] pub escrow_account: ProgramAccount<'info, EscrowAccount>, pub pda_account: AccountInfo<'info>, pub token_program: AccountInfo<'info>, }
ress(&[b"dungeon"], ctx.program_id); token::set_authority(ctx.accounts.into(), AuthorityType::AccountOwner, Some(pda))?; Ok(()) } pub fn update(ctx: Context<Update>, data: u64) -> ProgramResult { let my_account = &mut ctx.accounts.my_account; let amount = 20; if my_account.data & (1 << data) == 0 { msg!("Collecting token"); my_account.data = my_account.data | 1 << data; let (_pda, bump_seed) = Pubkey::find_program_address(&[b"dungeon"], ctx.program_id); let seeds = &[&b"dungeon"[..], &[bump_seed]]; token::transfer( ctx.accounts .into_transfer_to_game_user_context() .with_signer(&[&seeds[..]]), amount, )?; ctx.accounts.escrow_account.total_amount = ctx.accounts.escrow_account.total_amount - amount; } else { msg!("Token already collected"); } Ok(()) } } #[derive(Accounts)] #[instruction(total_amount: u64)] pub struct InitializeAdminAccount<'info> { #[account(signer)] pub admin: AccountInfo<'info>, #[account( mut, constraint = admin_deposit_token_account.amount >= total_amount )] pub admin_deposit_token_account: CpiAccount<'info, TokenAccount>, #[account(init)] pub escrow_account: ProgramAccount<'info, EscrowAccount>, pub token_program: AccountInfo<'info>, } #[account] pub struct EscrowAccount { pub admin_key: Pubkey, pub admin_deposit_token_account: Pubkey, pub total_amount: u64, } impl<'info> From<&mut InitializeAdminAccount<'info>> for CpiContext<'_, '_, '_, 'info, SetAuthority<'info>> { fn from(accounts: &mut InitializeAdminAccount<'info>) -> Self { let cpi_accounts = SetAuthority { account_or_mint: accounts .admin_deposit_token_account .to_account_info() .clone(), current_authority: accounts.admin.clone(), }; let cpi_program = accounts.token_program.clone(); CpiContext::new(cpi_program, cpi_accounts) } } impl<'info> Update<'info> { fn into_transfer_to_game_user_context(&self) -> CpiContext<'_, '_, '_, 'info, Transfer<'info>> { let cpi_accounts = Transfer { from: self.pda_deposit_token_account.to_account_info().clone(), to: self.game_user_receive_token_account.to_account_info().clone(), authority: self.pda_account.clone(), }; CpiContext::new(self.token_program.clone(), cpi_accounts) } } #[derive(Accounts)] pub struct Initialize<'info> { #[account(init)] pub my_account: ProgramAccount<'info, MyAccount>, } #[account] pub struct MyAccount { pub data: u64, } #[derive(Accounts)] pub struct Update<'info> { #[account(mut)] pub my_account: ProgramAccount<'info, MyAccount>, #[account(signer)] pub game_user: AccountInfo<'info>, #[account(mut)] pub game_user_receive_token_account: CpiAccount<'info, Tok
random
[]
Rust
src/config.rs
cmsd2/codelauf
69a590b4f0bb86ea4d50b9e229baddc8cb8d4c6a
use std::env; use clap::{App, SubCommand, ArgMatches}; use toml::{Table, Parser}; use std::io::{Read,Result,Error,ErrorKind}; use std::fs::File; use super::result::*; #[derive(Debug,Clone)] pub struct Config { pub data_dir: String, pub zookeeper: Option<String>, pub elasticsearch: Option<String>, pub index_config: IndexConfig, pub sync_config: SyncConfig, pub repo_location: Option<RepoLocation>, } impl Config { pub fn new() -> Config { Config { data_dir: ".".to_string(), zookeeper: None, elasticsearch: None, index_config: IndexConfig::new(), sync_config: SyncConfig::new(), repo_location: None, } } pub fn new_from_table(table: &Table) -> Config { let mut cfg = Self::new(); cfg.data_dir = table .get("data_dir") .map(|m| m.as_str().unwrap().to_string()) .unwrap_or(cfg.data_dir); cfg.zookeeper = table .get("zookeeper") .map(|m| m.as_str().unwrap().to_string()); cfg.elasticsearch = table .get("elasticsearch") .map(|m| m.as_str().unwrap().to_string()); cfg.index_config = table .get("index") .map(|m| IndexConfig::new_from_table(m.as_table().unwrap()) ) .unwrap_or(cfg.index_config); cfg.sync_config = table .get("sync") .map(|m| SyncConfig::new_from_table(m.as_table().unwrap()) ) .unwrap_or(cfg.sync_config); cfg } } #[derive(Debug,Clone)] pub struct IndexConfig; impl IndexConfig { pub fn new() -> IndexConfig { IndexConfig } pub fn new_from_table(_table: &Table) -> IndexConfig { let cfg = Self::new(); cfg } } #[derive(Debug,Clone)] pub struct SyncConfig; impl SyncConfig { pub fn new() -> SyncConfig { SyncConfig } pub fn new_from_table(_table: &Table) -> SyncConfig { let cfg = Self::new(); cfg } } #[derive(Debug,Clone)] pub struct RepoLocation { pub remote: Option<String>, pub branches: Vec<String>, pub dir: Option<String>, } impl RepoLocation { pub fn new() -> RepoLocation { RepoLocation { remote: None, branches: vec![], dir: None, } } pub fn get_remote<'a>(&'a self) -> RepoResult<&'a str> { self.remote.as_ref().map(|s| s as &str).ok_or(RepoError::NoRemote) } pub fn new_from_args<'a,'b>(args: &ArgMatches<'a,'b>) -> Option<RepoLocation> { if args.is_present("REMOTE") || args.is_present("REPO_DIR") { let mut repo_loc = RepoLocation::new(); repo_loc.remote = get_config_str(args, "REMOTE") .or(repo_loc.remote); match args.values_of("BRANCH") { Some(branches) => { for branch in branches { repo_loc.branches.push(branch.to_string()); } }, None => { repo_loc.branches.push("master".to_string()); } } repo_loc.dir = get_config_str(args, "REPO_DIR") .or(repo_loc.dir); Some(repo_loc) } else { None } } } pub fn parse_args<'a,'b>() -> ArgMatches<'a,'b> { App::new("codelauf") .version("1.0") .author("Chris Dawes <[email protected]>") .about("Codelauf indexes git repositories for search") .args_from_usage( "-c --config=[CONFIG] 'Sets a custom config file' -z --zookeeper=[ZOOKEEPER] 'Zookeeper host:port[/dir] (env var ZOOKEEPER)' -e --elasticsearch=[ELASTICSEARCH] 'Elasticsearch host:port (env var ELASTICSEARCH)' -d --data-dir=[DATA_DIR] 'Data directory'") .subcommand(SubCommand::with_name("init") .about("creates the local database and exits") .args_from_usage("") ) .subcommand(SubCommand::with_name("index") .about("indexes a single repository and exits") .args_from_usage( "-r --remote=[REMOTE] 'Repository remote url (required if not already cloned)' -b --branch=[BRANCH] 'Branch (default master)' -R --repo-dir=[REPO_DIR] 'Repo dir to use for repo (clones if it does not exist)'") ) .subcommand(SubCommand::with_name("fetch") .about("clones or fetches a repository and exits") .args_from_usage( "-r --remote=[REMOTE] 'Repository remote url (required if not already cloned)' -b --branch=[BRANCH] 'Branch (default master)' -R --repo-dir=[REPO_DIR] 'Repo dir to use for repo (clones if it does not exist)'") ) .subcommand(SubCommand::with_name("sync") .about("starts the worker process to mirror and index repos") .args_from_usage("") ) .get_matches() } pub fn parse_config(path: &str) -> Result<Config> { let mut f = try!(File::open(path)); let mut s = String::new(); try!(f.read_to_string(&mut s)); let mut p = Parser::new(&s); p.parse().map(|m| Config::new_from_table(&m)).ok_or(Error::new(ErrorKind::Other, "config parsing error")) } pub fn read_config(config: Option<String>) -> Result<Config> { match config { Some(path) => parse_config(&path), None => Ok(Config::new()) } } pub fn get_env(name: &str) -> Option<String> { match env::var(name) { Ok(val) => Some(val), Err(e) => { info!("not using environment variable {}: {:?}", name, e); None } } } pub fn apply_config<'a,'b>(cfg: Config, args: &ArgMatches<'a,'b>) -> Config { let mut cfg = cfg; cfg.zookeeper = get_config_str_env(args, "ZOOKEEPER", "ZOOKEEPER") .or(cfg.zookeeper); cfg.elasticsearch = get_config_str_env(args, "ELASTICSEARCH", "ELASTICSEARCH") .or(cfg.elasticsearch); cfg.data_dir = get_config_str(args, "DATA_DIR") .unwrap_or(cfg.data_dir); match args.subcommand() { ("index", Some(indexargs)) => { cfg.repo_location = RepoLocation::new_from_args(&indexargs); }, ("fetch", Some(fetchargs)) => { cfg.repo_location = RepoLocation::new_from_args(&fetchargs); }, ("sync", Some(_syncargs)) => { }, _ => {} } cfg } pub fn get_config_str<'a,'b>(args: &ArgMatches<'a,'b>, key: &str) -> Option<String> { args.value_of(key) .map(|s| s.to_string()) } pub fn get_config_str_env<'a,'b>(args: &ArgMatches<'a,'b>, key: &str, env_key: &str) -> Option<String> { args.value_of(key) .map(|s| s.to_string()) .or(get_env(env_key)) } pub fn get_config<'a,'b>(args: &ArgMatches<'a,'b>) -> Result<Config> { let maybe_config = read_config(get_config_str(args, "CONFIG")); maybe_config.map_err(|err| { error!("error reading config file: {:?}", err); err }).map(|cfg| { apply_config(cfg, args) }) }
use std::env; use clap::{App, SubCommand, ArgMatches}; use toml::{Table, Parser}; use std::io::{Read,Result,Error,ErrorKind}; use std::fs::File; use super::result::*; #[derive(Debug,Clone)] pub struct Config { pub data_dir: String, pub zookeeper: Option<String>, pub elasticsearch: Option<String>, pub index_config: IndexConfig, pub sync_config: SyncConfig, pub repo_location: Option<RepoLocation>, } impl Config { pub fn new() -> Config { Config { data_dir: ".".to_string(), zookeeper: None, elasticsearch: None, index_config: IndexConfig::new(), sync_config: SyncConfig::new(), repo_location: None, } } pub fn new_from_table(table: &Table) -> Config { let mut cfg = Self::new(); cfg.data_dir = table .get("data_dir") .map(|m| m.as_str().unwrap().to_string()) .unwrap_or(cfg.data_dir); cfg.zookeeper = table .get("zookeeper") .map(|m| m.as_str().unwrap().to_string()); cfg.elasticsearch = table .get("elasticsearch") .map(|m| m.as_str().unwrap().to_string()); cfg.index_config = table .get("index") .map(|m| IndexConfig::new_from_table(m.as_table().unwrap()) ) .unwrap_or(cfg.index_config); cfg.sync_config = table .get("sync") .map(|m| SyncConfig::new_from_table(m.as_table().unwrap()) ) .unwrap_or(cfg.sync_config); cfg } } #[derive(Debug,Clone)] pub struct IndexConfig; impl IndexConfig { pub fn new() -> IndexConfig { IndexConfig } pub fn new_from_table(_table: &Table) -> IndexConfig { let cfg = Self::new(); cfg } } #[derive(Debug,Clone)] pub struct SyncConfig; impl SyncConfig { pub fn new() -> SyncConfig { SyncConfig } pub fn new_from_table(_table: &Table) -> SyncConfig { let cfg = Self::new(); cfg } } #[derive(Debug,Clone)] pub struct RepoLocation { pub remote: Option<String>, pub branches: Vec<String>, pub dir: Option<String>, } impl RepoLocation { pub fn new() -> RepoLocation { RepoLocation { remote: None, branches: vec![], dir: None, } } pub fn get_remote<'a>(&'a self) -> RepoResult<&'a str> { self.remote.as_ref().map(|s| s as &str).ok_or(RepoError::NoRemote) } pub fn new_from_args<'a,'b>(args: &ArgMatches<'a,'b>) -> Option<RepoLocation> { if args.is_present("REMOTE") || args.is_present("REPO_DIR") { let mut repo_loc = RepoLocation::new(); repo_loc.remote = get_config_str(args, "REMOTE") .or(repo_loc.remote); match args.values_of("BRANCH") { Some(branches) => { for branch in branches { repo_loc.branches.push(branch.to_string()); } }, None => { repo_loc.branches.push("master".to_string()); } } repo_loc.dir = get_config_str(args, "REPO_DIR") .or(repo_loc.dir); Some(repo_loc) } else { None } } } pub fn parse_args<'a,'b>() -> ArgMatches<'a,'b> { App::new("codelauf") .version("1.0") .author("Chris Dawes <[email protected]>") .about("Codelauf indexes git repositories for search") .args_from_usage( "-c --config=[CONFIG] 'Sets a custom config file' -z --zookeeper=[ZOOKEEPER] 'Zookeeper host:port[/dir] (env var ZOOKEEPER)' -e --elasticsearch=[ELASTICSEARCH] 'Elasticsearch host:port (env var ELASTICSEARCH)' -d --data-dir=[DATA_DIR] 'Data directory'") .subcommand(SubCommand::with_name("init") .about("creates the local database and exits") .args_from_usage("") ) .subcommand(SubCommand::with_name("index") .about("indexes a single repository and exits") .args_from_usage( "-r --remote=[REMOTE] 'Repository remote url (required if not already cloned)' -b --branch=[BRANCH] 'Branch (default master)' -R --repo-dir=[REPO_DIR] 'Repo dir to use for repo (clones if it does not exist)'") ) .subcommand(SubCommand::with_name("fetch") .about("clones or fetches a repository and exits") .args_from_usage( "-r --remote=[REMOTE] 'Repository remote url (required if not already cloned)' -b --branch=[BRANCH] 'Branch (default master)' -R --repo-dir=[REPO_DIR] 'Repo dir to use for repo (clones if it does not exist)'") ) .subcommand(SubCommand::with_name("sync") .about("starts the worker process to mirror and index repos") .args_from_usage("") ) .get_matches() } pub fn parse_config(path: &str) -> Result<Config> { let mut f = try!(File::open(path)); let mut s = String::new(); try!(f.read_to_string(&mut s)); let mut p = Parser::new(&s); p.parse().map(|m| Config::new_from_table(&m)).ok_or(Error::new(ErrorKind::Other, "config parsing error")) } pub fn read_config(config: Option<String>) -> Result<Config> { match config { Some(path) => parse_config(&path), None => Ok(Config::new()) } } pub fn get_env(name: &str) -> Option<String> {
} pub fn apply_config<'a,'b>(cfg: Config, args: &ArgMatches<'a,'b>) -> Config { let mut cfg = cfg; cfg.zookeeper = get_config_str_env(args, "ZOOKEEPER", "ZOOKEEPER") .or(cfg.zookeeper); cfg.elasticsearch = get_config_str_env(args, "ELASTICSEARCH", "ELASTICSEARCH") .or(cfg.elasticsearch); cfg.data_dir = get_config_str(args, "DATA_DIR") .unwrap_or(cfg.data_dir); match args.subcommand() { ("index", Some(indexargs)) => { cfg.repo_location = RepoLocation::new_from_args(&indexargs); }, ("fetch", Some(fetchargs)) => { cfg.repo_location = RepoLocation::new_from_args(&fetchargs); }, ("sync", Some(_syncargs)) => { }, _ => {} } cfg } pub fn get_config_str<'a,'b>(args: &ArgMatches<'a,'b>, key: &str) -> Option<String> { args.value_of(key) .map(|s| s.to_string()) } pub fn get_config_str_env<'a,'b>(args: &ArgMatches<'a,'b>, key: &str, env_key: &str) -> Option<String> { args.value_of(key) .map(|s| s.to_string()) .or(get_env(env_key)) } pub fn get_config<'a,'b>(args: &ArgMatches<'a,'b>) -> Result<Config> { let maybe_config = read_config(get_config_str(args, "CONFIG")); maybe_config.map_err(|err| { error!("error reading config file: {:?}", err); err }).map(|cfg| { apply_config(cfg, args) }) }
match env::var(name) { Ok(val) => Some(val), Err(e) => { info!("not using environment variable {}: {:?}", name, e); None } }
if_condition
[ { "content": "/// open db\n\n/// calc repo dir location\n\n/// create basic db entry if it doesn't exist\n\n/// clone project if it isn't already\n\n/// otherwise:\n\n/// check remote url matches\n\n/// fetch branch\n\n/// checkout branch\n\n/// update db as we go\n\npub fn fetch_repo(config: &Config) -> RepoResult<()> { \n\n let db = try!(open_db(config));\n\n \n\n let mut repo = try!(Repo::new_for_config(&config));\n\n\n\n try!(repo.probe_fs());\n\n try!(repo.update_repo_in_db(&db));\n\n\n\n try!(ensure_fetched(&config, &db, &mut repo));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/commands.rs", "rank": 4, "score": 148523.36565790154 }, { "content": "pub fn index_repo(config: &Config) -> RepoResult<()> {\n\n let db = try!(open_db(config));\n\n \n\n let mut repo = try!(Repo::new_for_config(&config));\n\n\n\n try!(repo.probe_fs());\n\n try!(repo.update_repo_in_db(&db));\n\n \n\n try!(ensure_indexed(&config, &db, &mut repo));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/commands.rs", "rank": 5, "score": 147931.19107717669 }, { "content": "fn ensure_fetched(config: &Config, db: &Db, repo: &mut Repo) -> RepoResult<()> {\n\n info!(\"ensuring fetched {:?}\", repo);\n\n if repo.is_cloned() {\n\n try!(repo.open_repo());\n\n \n\n try!(repo.pull_repo());\n\n\n\n try!(repo.revwalk(db));\n\n \n\n repo.update_repo_in_db(db)\n\n } else { \n\n ensure_cloned(config, db, repo)\n\n }\n\n}\n\n\n", "file_path": "src/commands.rs", "rank": 6, "score": 145175.32523926586 }, { "content": "fn ensure_indexed(config: &Config, db: &Db, repo: &mut Repo) -> RepoResult<()> {\n\n info!(\"ensuring indexed {:?}\", repo);\n\n try!(ensure_fetched(&config, db, repo));\n\n\n\n try!(repo.treewalks(db));\n\n \n\n let index = try!(Index::new_for_config(config));\n\n\n\n try!(index.index_repo(db, repo));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/commands.rs", "rank": 7, "score": 144692.32701775566 }, { "content": "fn ensure_cloned(_config: &Config, db: &Db, repo: &mut Repo) -> RepoResult<()> {\n\n info!(\"ensuring cloned {:?}\", repo);\n\n let _git_repo = try!(repo.clone_repo());\n\n\n\n try!(repo.revwalk(db));\n\n \n\n repo.update_repo_in_db(db)\n\n}\n\n\n", "file_path": "src/commands.rs", "rank": 8, "score": 139608.27798786826 }, { "content": "pub fn init(config: &Config) -> RepoResult<()> {\n\n info!(\"initialising\");\n\n let _db = try!(open_db(config));\n\n\n\n Ok(())\n\n}\n\n\n\n/// 1. find repo dir and check consistency against sqlite db:\n\n/// 2. if dir doesn't exist, clone it\n\n/// 3. if sqlite commit id doesn't exist in repo clear it\n\n/// 4. git fetch all to manually sync with remote\n\n/// 5. if local and remote branches have diverged, find latest commit that we have in common,\n\n/// and delete from the search index all local commits since then\n\n/// 6. now we can fast forward through the remote commits and add them to the search index,\n\n/// updating sqlite with the processed commit id as we go\n\n/// 7. any files that were deleted would have been removed from the index when processing commits\n\n/// 8. spider the entire repo and add all the files to the index, replacing any existing docs in index\n\n\n\n\n", "file_path": "src/commands.rs", "rank": 11, "score": 125722.95843406762 }, { "content": "pub fn run_sync(_config: &Config) -> RepoResult<()> {\n\n Ok(())\n\n}\n", "file_path": "src/commands.rs", "rank": 14, "score": 116184.54354974833 }, { "content": "pub fn path_to_bytes_vec(path: &Path) -> RepoResult<Vec<u8>> {\n\n let mut result: Vec<u8> = vec![];\n\n \n\n path_to_bytes(path).map(|bytes| {\n\n for b in bytes {\n\n result.push(*b);\n\n }\n\n \n\n result\n\n })\n\n}\n", "file_path": "src/models/types.rs", "rank": 15, "score": 101186.74396556684 }, { "content": "fn open_db(config: &Config) -> RepoResult<Db> {\n\n let dbpath = Path::new(&config.data_dir).join(\"db.sqlite\");\n\n info!(\"opening db\");\n\n let database = try!(Db::open(dbpath.as_path()).map_err(|e| RepoError::SqlError(e)));\n\n database.migrate();\n\n Ok(database)\n\n}\n\n\n", "file_path": "src/commands.rs", "rank": 16, "score": 91550.01383118102 }, { "content": "pub fn path_buf_from_bytes_vec(bytes: Vec<u8>) -> PathBuf {\n\n path_buf_from_bytes(&bytes[..])\n\n}\n\n\n\n\n", "file_path": "src/models/types.rs", "rank": 17, "score": 90003.59126834868 }, { "content": "#[cfg(not(unstable))]\n\npub fn path_to_bytes<'a>(path: &'a Path) -> RepoResult<&'a [u8]> {\n\n path.as_os_str().to_str().map(|s| s.as_bytes()).ok_or(RepoError::PathUnicodeError)\n\n}\n\n\n", "file_path": "src/models/types.rs", "rank": 18, "score": 66423.5820933389 }, { "content": "fn run() -> RepoResult<()> {\n\n env_logger::init().unwrap();\n\n \n\n let args = config::parse_args();\n\n\n\n let config = config::get_config(&args).unwrap();\n\n println!(\"using config:\\n {:?}\", config);\n\n\n\n \n\n match args.subcommand_name() {\n\n Some(\"init\") => {\n\n commands::init(&config)\n\n },\n\n Some(\"index\") => {\n\n commands::index_repo(&config)\n\n },\n\n Some(\"fetch\") => {\n\n commands::fetch_repo(&config)\n\n },\n\n Some(\"sync\") => {\n\n commands::run_sync(&config)\n\n },\n\n _ => {\n\n println!(\"{}\", args.usage());\n\n Err(RepoError::InvalidArgs(\"unrecognised command\".to_string()))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 19, "score": 58180.34148384632 }, { "content": "pub fn path_buf_from_bytes(bytes: &[u8]) -> PathBuf {\n\n let os_str: &OsStr = OsStr::from_bytes(bytes);\n\n\n\n PathBuf::from(os_str)\n\n}\n\n\n", "file_path": "src/models/types.rs", "rank": 20, "score": 57916.63023561655 }, { "content": "use std::path::PathBuf;\n\nuse rusqlite::{SqliteConnection,SqliteResult,SqliteRow};\n\nuse schemamama_rusqlite::{SqliteMigration};\n\nuse result::*;\n\nuse models::types;\n\n\n\n#[derive(Debug,Clone)]\n\npub struct RepoFile {\n\n pub repo_id: String,\n\n pub branch: String,\n\n pub path: PathBuf,\n\n pub changed_commit_id: String,\n\n pub indexed_commit_id: Option<String>,\n\n}\n\n\n\nimpl RepoFile {\n\n pub fn new(repo_id: String, branch: String, path: PathBuf, changed_commit_id: String, indexed_commit_id: Option<String>) -> RepoFile {\n\n RepoFile {\n\n repo_id: repo_id,\n\n branch: branch,\n", "file_path": "src/models/repo_file.rs", "rank": 21, "score": 45289.02015064182 }, { "content": "use rusqlite::{SqliteConnection,SqliteResult,SqliteRow};\n\nuse schemamama_rusqlite::SqliteMigration;\n\nuse result::*;\n\n\n\n#[derive(Debug,Clone)]\n\npub struct RepoBranch {\n\n pub repo_id: String,\n\n pub name: String,\n\n pub indexed_commit_id: Option<String>,\n\n}\n\n\n\nimpl RepoBranch {\n\n pub fn new(repo_id: String, name: String, indexed_commit_id: Option<String>) -> RepoBranch {\n\n RepoBranch {\n\n repo_id: repo_id,\n\n name: name,\n\n indexed_commit_id: indexed_commit_id,\n\n }\n\n }\n\n \n", "file_path": "src/models/repo_branch.rs", "rank": 22, "score": 45287.24868098839 }, { "content": " path: path,\n\n changed_commit_id: changed_commit_id,\n\n indexed_commit_id: indexed_commit_id,\n\n }\n\n }\n\n \n\n pub fn new_from_sql_row(row0: &SqliteRow) -> RepoResult<RepoFile> {\n\n Ok(RepoFile {\n\n repo_id: row0.get(0),\n\n branch: row0.get(1),\n\n path: types::path_buf_from_bytes_vec(row0.get(2)),\n\n changed_commit_id: row0.get(3),\n\n indexed_commit_id: row0.get(4),\n\n })\n\n }\n\n}\n\n\n\npub struct CreateFilesTable;\n\nmigration!(CreateFilesTable, 4, \"create files table\");\n\n\n", "file_path": "src/models/repo_file.rs", "rank": 23, "score": 45282.97884552804 }, { "content": " pub fn new_from_sql_row(row0: &SqliteRow) -> RepoResult<RepoBranch> {\n\n Ok(RepoBranch {\n\n repo_id: row0.get(0),\n\n name: row0.get(1),\n\n indexed_commit_id: row0.get(2),\n\n })\n\n }\n\n}\n\n\n\npub struct CreateBranchesTable;\n\nmigration!(CreateBranchesTable, 2, \"create branches table\");\n\n\n\nimpl SqliteMigration for CreateBranchesTable {\n\n fn up(&self, conn: &SqliteConnection) -> SqliteResult<()> {\n\n info!(\"creating branches table\");\n\n \n\n const CREATE_BRANCHES: &'static str = \"\\\n\n CREATE TABLE branches ( \\\n\n repo_id TEXT, \\\n\n name TEXT, \\\n", "file_path": "src/models/repo_branch.rs", "rank": 24, "score": 45282.93741085285 }, { "content": "impl SqliteMigration for CreateFilesTable {\n\n fn up(&self, conn: &SqliteConnection) -> SqliteResult<()> {\n\n const CREATE_FILES: &'static str = \"\\\n\n CREATE TABLE files ( \\\n\n repo_id TEXT, \\\n\n branch TEXT, \\\n\n path TEXT, \\\n\n changed_commit_id TEXT, \\\n\n indexed_commit_id TEXT \\\n\n );\";\n\n\n\n const CREATE_FILES_NATURAL_KEY: &'static str = \"\\\n\n CREATE UNIQUE INDEX files_repo_id_path_idx ON files(repo_id,branch,path)\";\n\n\n\n Ok(())\n\n .and(conn.execute(CREATE_FILES, &[]))\n\n .and(conn.execute(CREATE_FILES_NATURAL_KEY, &[]))\n\n .map(|_| (()))\n\n }\n\n\n\n fn down(&self, conn: &SqliteConnection) -> SqliteResult<()> {\n\n conn.execute(\"DROP TABLE files;\", &[]).map(|_| ())\n\n }\n\n}\n", "file_path": "src/models/repo_file.rs", "rank": 25, "score": 45280.9872499385 }, { "content": " indexed_commit_id TEXT \\\n\n );\";\n\n\n\n const CREATE_BRANCHES_NATURAL_KEY: &'static str = \"\\\n\n CREATE UNIQUE INDEX branches_repo_id_name_idx ON branches(repo_id,name)\";\n\n\n\n Ok(())\n\n .and(conn.execute(CREATE_BRANCHES, &[]))\n\n .and(conn.execute(CREATE_BRANCHES_NATURAL_KEY, &[]))\n\n .map(|_| (()))\n\n }\n\n\n\n fn down(&self, conn: &SqliteConnection) -> SqliteResult<()> {\n\n conn.execute(\"DROP TABLE branches;\", &[]).map(|_| ())\n\n }\n\n}\n", "file_path": "src/models/repo_branch.rs", "rank": 26, "score": 45276.79084418111 }, { "content": "fn main() {\n\n match run() {\n\n Ok(()) => {}\n\n Err(e) => {\n\n println!(\"error: {:?}\", e);\n\n process::exit(1);\n\n }\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 27, "score": 43396.64222859868 }, { "content": " h.hexdigest()\n\n }\n\n}\n\n\n\npub struct Index {\n\n pub es_client: RefCell<rs_es::Client>,\n\n}\n\n\n\nimpl Index {\n\n pub fn new_for_config(config: &Config) -> RepoResult<Index> {\n\n let es_url_str: &str = try!(config.elasticsearch.as_ref().ok_or(RepoError::NoElasticSearch));\n\n\n\n let mut es_url_parts = es_url_str.split(\":\");\n\n \n\n let es_host = try!(es_url_parts.next().ok_or(RepoError::NoElasticSearch));\n\n let es_port = try!(es_url_parts.next().map(|s| s.parse::<u32>()).unwrap_or(Ok(9200)));\n\n\n\n info!(\"es host: {} port: {}\", es_host, es_port);\n\n\n\n Ok(Index {\n", "file_path": "src/index.rs", "rank": 28, "score": 26712.982595510308 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug,Clone,RustcEncodable,RustcDecodable)]\n\npub struct Commit {\n\n pub parents: Vec<CommitId>,\n\n pub repo_id: String,\n\n pub author: Signature,\n\n pub committer: Signature,\n\n pub commit_date: String,\n\n pub message: Option<String>,\n\n}\n\n\n\nimpl Commit {\n\n pub fn new_for_git_commit(repo_id: &str, commit: &git2::Commit) -> RepoResult<Commit> {\n\n let time = Index::datetime_convert_git_to_chrono(&commit.time());\n\n\n\n let mut parents = vec![];\n\n\n\n for parent in commit.parents() {\n", "file_path": "src/index.rs", "rank": 29, "score": 26708.679508291007 }, { "content": " pub changed_commit_id: Option<String>,\n\n pub changed_date: Option<String>,\n\n}\n\n\n\nimpl IndexedFile {\n\n pub fn new(repo_id: String, path: PathBuf) -> IndexedFile {\n\n IndexedFile {\n\n repo_id: repo_id,\n\n path: path,\n\n text: None,\n\n keywords: None,\n\n changed_commit_id: None,\n\n changed_date: None,\n\n }\n\n }\n\n\n\n pub fn id(&self) -> String {\n\n let mut h = Sha1::new();\n\n h.update(self.repo_id.as_bytes());\n\n h.update(path_to_bytes(&self.path).unwrap());\n", "file_path": "src/index.rs", "rank": 30, "score": 26708.46581934932 }, { "content": " let diff = try!(git2::Diff::tree_to_tree(git_repo, old_tree.as_ref(), Some(&new_tree), Some(&mut diff_opts)));\n\n\n\n try!(self.index_diff(db, repo, &branch_commit_id_str, &diff));\n\n\n\n try!(db.mark_branch_as_indexed(&repo.id, &branch.name, &branch_commit_id_str));\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn index_diff(&self, db: &Db, repo: &Repo, commit_id: &str, diff: &git2::Diff) -> RepoResult<()> {\n\n let git_repo = try!(repo.git_repo());\n\n \n\n for delta in diff.deltas() {\n\n let old_file = delta.old_file();\n\n let new_file = delta.new_file();\n\n \n\n info!(\"delta: {:?} {:?} {:?} {:?} {:?}\", delta.status(), old_file.id(), old_file.path(), new_file.id(), new_file.path());\n\n\n\n let path = new_file.path();\n", "file_path": "src/index.rs", "rank": 31, "score": 26707.39347951716 }, { "content": " }\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn index_blob_str(&self, db: &Db, repo: &Repo, path: &Path, commit_id: &str, blob: &str) -> RepoResult<()> {\n\n let mut indexed_file = IndexedFile::new(repo.id.clone(), path.to_owned());\n\n indexed_file.text = Some(blob.to_owned());\n\n indexed_file.changed_commit_id = Some(commit_id.to_owned());\n\n let file_id = indexed_file.id();\n\n \n\n let mut es_client = self.es_client.borrow_mut();\n\n let mut op = es_client.index(\"codelauf\", \"file\");\n\n \n\n try!(op\n\n .with_id(&file_id)\n\n .with_doc(&indexed_file)\n\n .send());\n\n\n\n try!(db.mark_file_as_indexed(&repo.id, path, commit_id));\n", "file_path": "src/index.rs", "rank": 32, "score": 26704.31569360952 }, { "content": " let old_tree = match repo_branch.indexed_commit_id {\n\n Some(commit) => {\n\n let commit = try!(repo.get_commit(&commit));\n\n let tree = try!(commit.tree());\n\n Some(tree)\n\n },\n\n None => None\n\n };\n\n\n\n let branch_commit_id = try!(repo.branch_commit_id(&branch.name));\n\n let branch_commit_id_str = format!(\"{}\", branch_commit_id);\n\n \n\n let new_tree_commit = try!(repo.get_commit(&branch_commit_id_str));\n\n let new_tree = try!(new_tree_commit.tree());\n\n\n\n let mut diff_opts = git2::DiffOptions::new();\n\n diff_opts.ignore_whitespace(true)\n\n .ignore_filemode(true)\n\n ;\n\n\n", "file_path": "src/index.rs", "rank": 33, "score": 26703.78704858412 }, { "content": " parents.push(CommitId::new_for_git_commit(&parent));\n\n }\n\n \n\n Ok(Commit {\n\n parents: parents,\n\n repo_id: repo_id.to_owned(),\n\n author: Signature::new_for_git_signature(&commit.author()),\n\n committer: Signature::new_for_git_signature(&commit.committer()),\n\n commit_date: time.to_rfc3339(),\n\n message: commit.message().map(|s| s.to_owned()),\n\n })\n\n }\n\n}\n\n\n\n#[derive(Debug,Clone,RustcEncodable,RustcDecodable)]\n\npub struct IndexedFile {\n\n pub repo_id: String,\n\n pub path: PathBuf,\n\n pub text: Option<String>,\n\n pub keywords: Option<String>,\n", "file_path": "src/index.rs", "rank": 34, "score": 26703.46574655238 }, { "content": " es_client: RefCell::new(rs_es::Client::new(es_host, es_port)),\n\n })\n\n }\n\n\n\n pub fn index_tree(&self, db: &Db, repo: &Repo) -> RepoResult<()> {\n\n let files = try!(db.find_files_not_indexed(&repo.id));\n\n\n\n for file in files {\n\n match self.index_file(db, repo, &file.path, &file.changed_commit_id) {\n\n Err(err) => {\n\n info!(\"error indexing file {:?}: {:?}\", file.path, err);\n\n },\n\n _ => {}\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn index_file(&self, db: &Db, repo: &Repo, path: &Path, commit_id: &str) -> RepoResult<()> {\n", "file_path": "src/index.rs", "rank": 35, "score": 26703.28211190875 }, { "content": " info!(\"indexing file {:?}\", path);\n\n\n\n let mut f = try!(File::open(path));\n\n let mut s = String::new();\n\n try!(f.read_to_string(&mut s));\n\n //todo analyse file instead of sending verbatim\n\n\n\n let mut indexed_file = IndexedFile::new(repo.id.clone(), path.to_owned());\n\n indexed_file.text = Some(s);\n\n indexed_file.changed_commit_id = Some(commit_id.to_owned());\n\n let file_id = indexed_file.id();\n\n \n\n let mut es_client = self.es_client.borrow_mut();\n\n let mut op = es_client.index(\"codelauf\", \"file\");\n\n \n\n try!(op\n\n .with_id(&file_id)\n\n .with_doc(&indexed_file)\n\n .send());\n\n\n", "file_path": "src/index.rs", "rank": 36, "score": 26703.2254837059 }, { "content": "\n\n Ok(())\n\n }\n\n \n\n pub fn index_repo(&self, db: &Db, repo: &Repo) -> RepoResult<()> {\n\n try!(self.index_commits(db, repo));\n\n\n\n try!(self.index_branches(db, repo));\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn index_branches(&self, db: &Db, repo: &Repo) -> RepoResult<()> {\n\n let git_repo = try!(repo.git_repo());\n\n \n\n for branch in repo.branches.iter() {\n\n let maybe_repo_branch = try!(db.find_branch(&repo.id, &branch.name));\n\n\n\n let repo_branch = try!(maybe_repo_branch.ok_or(RepoError::BranchNotFound));\n\n\n", "file_path": "src/index.rs", "rank": 37, "score": 26701.404441558458 }, { "content": " try!(db.mark_file_as_indexed(&repo.id, path, commit_id));\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn index_blob(&self, db: &Db, repo: &Repo, path: &Path, commit_id: &str, blob: &git2::Blob) -> RepoResult<()> {\n\n if blob.is_binary() {\n\n info!(\"not indexing binary file {:?}\", path);\n\n } else {\n\n let blob_data = blob.content();\n\n\n\n let maybe_blob_str = UTF_8.decode(blob_data, DecoderTrap::Replace);\n\n\n\n if maybe_blob_str.is_err() {\n\n info!(\"error decoding blob data: {:?}\", maybe_blob_str);\n\n } else {\n\n let blob_str = maybe_blob_str.unwrap();\n\n \n\n try!(self.index_blob_str(db, repo, path, commit_id, &blob_str));\n\n }\n", "file_path": "src/index.rs", "rank": 38, "score": 26701.293597190877 }, { "content": " }\n\n\n\n pub fn index_commit(&self, db: &Db, repo: &Repo, commit_id: &str) -> RepoResult<()> {\n\n let commit = try!(repo.get_commit(commit_id));\n\n\n\n let indexed_commit = try!(Commit::new_for_git_commit(&repo.id, &commit));\n\n\n\n info!(\"commit {:?}\", indexed_commit);\n\n\n\n let mut es_client = self.es_client.borrow_mut();\n\n \n\n let mut op = es_client.index(\"codelauf\", \"commit\");\n\n try!(op\n\n .with_id(commit_id)\n\n .with_doc(&indexed_commit)\n\n .send());\n\n\n\n try!(db.mark_commit_as_indexed(&repo.id, commit_id));\n\n\n\n/* let mut diff_opts = git2::DiffOptions::new();\n", "file_path": "src/index.rs", "rank": 39, "score": 26701.005997856028 }, { "content": "use repo::Repo;\n\nuse result::*;\n\nuse config::*;\n\nuse db::*;\n\nuse git2;\n\nuse chrono::*;\n\nuse rs_es;\n\nuse sha1::Sha1;\n\nuse std::fs::File;\n\nuse std::path::{Path,PathBuf};\n\nuse std::cell::RefCell;\n\nuse std::io::Read;\n\nuse encoding::{Encoding, DecoderTrap};\n\nuse encoding::all::UTF_8;\n\n\n\n#[derive(Debug,Clone,RustcEncodable,RustcDecodable)]\n\npub struct CommitId {\n\n pub id: String\n\n}\n\n\n", "file_path": "src/index.rs", "rank": 40, "score": 26700.56635386475 }, { "content": "impl CommitId {\n\n pub fn new_for_git_commit(commit: &git2::Commit) -> CommitId {\n\n CommitId {\n\n id: format!(\"{}\", commit.id())\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug,Clone,RustcEncodable,RustcDecodable)]\n\npub struct Signature {\n\n pub name: Option<String>,\n\n pub email: Option<String>,\n\n}\n\n\n\nimpl Signature {\n\n pub fn new_for_git_signature(sig: &git2::Signature) -> Signature {\n\n Signature {\n\n name: sig.name().map(|s| s.to_owned()),\n\n email: sig.email().map(|s| s.to_owned()),\n\n }\n", "file_path": "src/index.rs", "rank": 41, "score": 26700.29482254799 }, { "content": " \n\n if !new_file.id().is_zero() && path.is_some() {\n\n let blob = try!(git_repo.find_blob(new_file.id()));\n\n try!(self.index_blob(db, repo, path.unwrap(), commit_id, &blob));\n\n }\n\n }\n\n \n\n Ok(())\n\n }\n\n \n\n pub fn index_commits(&self, db: &Db, repo: &Repo) -> RepoResult<()> {\n\n let commits = try!(db.find_commits_not_indexed(&repo.id));\n\n\n\n for commit in commits {\n\n info!(\"indexing {:?}\", commit);\n\n\n\n try!(self.index_commit(db, repo, &commit));\n\n }\n\n \n\n Ok(())\n", "file_path": "src/index.rs", "rank": 42, "score": 26699.938825357145 }, { "content": " diff_opts.ignore_whitespace(true)\n\n .ignore_filemode(true)\n\n ;\n\n \n\n\n\n for parent in commit.parents() {\n\n let commit_tree = try!(commit.tree());\n\n let parent_tree = try!(parent.tree());\n\n \n\n let diff = try!(git2::Diff::tree_to_tree(git_repo, Some(&parent_tree), Some(&commit_tree), Some(&mut diff_opts)));\n\n\n\n for delta in diff.deltas() {\n\n info!(\"delta: {:?} {:?} {:?}\", delta.status(), delta.old_file().path(), delta.new_file().path());\n\n }\n\n }\n\n */\n\n Ok(())\n\n }\n\n\n\n pub fn datetime_convert_git_to_chrono(git_time: &git2::Time) -> DateTime<offset::fixed::FixedOffset> {\n\n let tz = offset::fixed::FixedOffset::east(git_time.offset_minutes() * 60);\n\n \n\n let time = tz.timestamp(git_time.seconds(), 0);\n\n\n\n time\n\n }\n\n}\n", "file_path": "src/index.rs", "rank": 43, "score": 26696.271557666227 }, { "content": "\n\nimpl Repository {\n\n pub fn new_from_remote(id: String, uri: String, path: PathBuf) -> Repository {\n\n Repository {\n\n id: id,\n\n uri: uri,\n\n path: path,\n\n sync_state: SyncState::NotCloned,\n\n added_datetime: Some(time::get_time()),\n\n fetched_datetime: None,\n\n indexed_datetime: None,\n\n }\n\n }\n\n \n\n pub fn new_from_sql_row(row0: &SqliteRow) -> RepoResult<Repository> {\n\n let sync_state: String = row0.get(3);\n\n\n\n Ok(Repository {\n\n id: row0.get(0),\n\n uri: row0.get(1),\n", "file_path": "src/models/repository.rs", "rank": 51, "score": 25790.36132720867 }, { "content": "use std::path::PathBuf;\n\nuse rusqlite::{SqliteConnection,SqliteResult,SqliteRow};\n\nuse schemamama_rusqlite::SqliteMigration;\n\nuse std::str::FromStr;\n\nuse time;\n\nuse time::Timespec;\n\nuse result::*;\n\nuse repo::SyncState;\n\nuse models::types;\n\n\n\n#[derive(Debug,Clone)]\n\npub struct Repository {\n\n pub id: String,\n\n pub uri: String,\n\n pub path: PathBuf,\n\n pub sync_state: SyncState,\n\n pub added_datetime: Option<Timespec>,\n\n pub fetched_datetime: Option<Timespec>,\n\n pub indexed_datetime: Option<Timespec>,\n\n}\n", "file_path": "src/models/repository.rs", "rank": 52, "score": 25785.784699584667 }, { "content": " path: types::path_buf_from_bytes_vec(row0.get(2)),\n\n sync_state: try!(SyncState::from_str(&sync_state)),\n\n added_datetime: row0.get(4),\n\n fetched_datetime: row0.get(5),\n\n indexed_datetime: row0.get(6),\n\n })\n\n }\n\n}\n\n\n\npub struct CreateRepositoriesTable;\n\nmigration!(CreateRepositoriesTable, 1, \"create repositories table\");\n\n\n\nimpl SqliteMigration for CreateRepositoriesTable {\n\n fn up(&self, conn: &SqliteConnection) -> SqliteResult<()> {\n\n const CREATE_REPOS: &'static str = \"\\\n\n CREATE TABLE repositories ( \\\n\n id TEXT, \\\n\n uri TEXT, \\\n\n path TEXT,\n\n sync_state TEXT, \\\n", "file_path": "src/models/repository.rs", "rank": 53, "score": 25785.511800951717 }, { "content": " added_datetime DATETIME,\n\n fetched_datetime DATETIME, \\\n\n indexed_datetime DATETIME \\\n\n );\";\n\n\n\n const CREATE_REPOS_PKEY: &'static str = \"\\\n\n CREATE UNIQUE INDEX repositories_id_idx ON repositories(id)\";\n\n\n\n const CREATE_REPOS_NATURAL_KEY: &'static str = \"\\\n\n CREATE UNIQUE INDEX repositories_uri_idx ON repositories(uri)\";\n\n\n\n Ok(())\n\n .and(conn.execute(CREATE_REPOS, &[]))\n\n .and(conn.execute(CREATE_REPOS_PKEY, &[]))\n\n .and(conn.execute(CREATE_REPOS_NATURAL_KEY, &[]))\n\n .map(|_| (()))\n\n }\n\n\n\n fn down(&self, conn: &SqliteConnection) -> SqliteResult<()> {\n\n conn.execute(\"DROP TABLE repositories;\", &[]).map(|_| ())\n\n }\n\n}\n", "file_path": "src/models/repository.rs", "rank": 54, "score": 25781.04001314135 }, { "content": " pub uri: String,\n\n pub branches: Vec<Branch>,\n\n pub sync_state: SyncState,\n\n pub git_repo: Option<Rc<git2::Repository>>,\n\n}\n\n\n\nimpl fmt::Debug for Repo {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"Repo ({:?}, {}, {:?}, {:?})\", self.path, self.uri, self.branches, self.sync_state)\n\n }\n\n}\n\n\n\nimpl Repo {\n\n pub fn new_for_config(config: &Config) -> RepoResult<Repo> {\n\n let repo_loc = try!(config.repo_location.as_ref().ok_or(RepoError::NoRemote));\n\n\n\n let uri = try!(repo_loc.remote.as_ref().ok_or(RepoError::NoRemote));\n\n let branches = repo_loc.branches.iter().map(|b| Branch::new(b.clone(), None) ).collect();\n\n\n\n Ok(Repo::new(try!(Repo::get_repo_path(config, repo_loc)), uri.clone(), branches, SyncState::NotCloned))\n", "file_path": "src/repo.rs", "rank": 55, "score": 23602.993796209415 }, { "content": " fo.prune(git2::FetchPrune::On);\n\n fo.remote_callbacks(grcs);\n\n\n\n let mut remote = try!(self.find_or_create_git_remote(&git_repo));\n\n\n\n info!(\"fetching from remote\");\n\n let branch_names: Vec<&str> = self.branches.iter().map(|s| &s.name[..]).collect();\n\n try!(remote.fetch(&branch_names, Some(&mut fo), None));\n\n info!(\"fetched.\");\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn find_branch(&self, git_repo: &git2::Repository, branch_name: &str) -> RepoResult<String> {\n\n info!(\"finding branch {}\", branch_name);\n\n \n\n let branch = try!(git_repo.find_branch(branch_name, git2::BranchType::Local));\n\n \n\n let branch_fullname = try!(branch.get().name().ok_or(RepoError::StringUnicodeError).map(|s| s.to_string()));\n\n\n", "file_path": "src/repo.rs", "rank": 56, "score": 23602.912821330305 }, { "content": " None => Err(RepoError::InvalidState(\"git repo not opened\".to_string())),\n\n }\n\n }\n\n \n\n pub fn set_state(&mut self, new_state: SyncState) {\n\n info!(\"repo {} {:?} --> {:?}\", self.uri, self.sync_state, new_state);\n\n self.sync_state = new_state;\n\n }\n\n\n\n pub fn get_repo_path(config: &Config, repo_loc: &RepoLocation) -> RepoResult<PathBuf> {\n\n let id = Repo::id(try!(repo_loc.get_remote()));\n\n Ok(Path::new(&config.data_dir).join(\"repos\").join(id))\n\n }\n\n\n\n pub fn id(remote: &str) -> String {\n\n let mut h = Sha1::new();\n\n h.update(remote.as_bytes());\n\n h.hexdigest()\n\n }\n\n}\n\n\n\n\n\n\n\n\n", "file_path": "src/repo.rs", "rank": 57, "score": 23600.844766356397 }, { "content": " Ok(())\n\n }\n\n\n\n pub fn open_repo(&mut self) -> RepoResult<()> {\n\n self.git_repo = Some(Rc::new(try!(git2::Repository::open(self.path.clone()))));\n\n\n\n Ok(())\n\n }\n\n\n\n fn find_or_create_git_remote<'a> (&'a self, repo: &'a git2::Repository) -> RepoResult<git2::Remote> {\n\n // TODO: ensure returned remote has correct uri\n\n repo.find_remote(\"origin\").map_err(|e| RepoError::GitError(e))\n\n }\n\n\n\n pub fn fetch_repo(&self) -> RepoResult<()> {\n\n let git_repo = try!(self.git_repo());\n\n\n\n let mut fo = git2::FetchOptions::new();\n\n let grcs = Repo::new_git_callbacks();\n\n \n", "file_path": "src/repo.rs", "rank": 58, "score": 23596.598398393257 }, { "content": " }\n\n \n\n pub fn new(path: PathBuf, uri: String, branches: Vec<Branch>, sync_state: SyncState) -> Repo {\n\n Repo {\n\n id: Repo::id(&uri),\n\n path: path,\n\n uri: uri,\n\n branches: branches,\n\n sync_state: sync_state,\n\n git_repo: None,\n\n }\n\n }\n\n\n\n fn new_git_callbacks<'a>() -> git2::RemoteCallbacks<'a> {\n\n let mut grcs = git2::RemoteCallbacks::<'a>::new();\n\n\n\n grcs\n\n .transfer_progress(|prog| {\n\n info!(\"total: {} received: {} indexed: {}\",\n\n prog.total_objects(),\n", "file_path": "src/repo.rs", "rank": 59, "score": 23594.901795848025 }, { "content": "\n\n if !self.dot_git_exists() {\n\n self.set_state(SyncState::NotCloned);\n\n Ok(())\n\n } else {\n\n match self.sync_state {\n\n SyncState::NotCloned => {\n\n self.set_state(SyncState::Cloned);\n\n }\n\n _ => {}\n\n }\n\n Ok(())\n\n }\n\n }\n\n\n\n pub fn clone_repo(&mut self) -> RepoResult<()> {\n\n self.git_repo = Some(Rc::new(try!(git2::Repository::clone(&self.uri, self.path.clone()))));\n\n\n\n self.sync_state = SyncState::Cloned;\n\n\n", "file_path": "src/repo.rs", "rank": 60, "score": 23594.768390789355 }, { "content": "\n\n Ok(())\n\n }\n\n\n\n pub fn pull_repo(&self) -> RepoResult<()> {\n\n try!(self.fetch_repo());\n\n\n\n for branch in &self.branches {\n\n try!(self.repoint_branch_to_origin(&branch.name));\n\n }\n\n \n\n //try!(self.checkout_head());\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn revwalk_add_branch(&self, git_repo: &git2::Repository, revwalk: &mut git2::Revwalk, branch_name: &str, indexed_commit: &Option<String>) -> RepoResult<()> {\n\n\n\n let branch_commit = try!(self.branch_commit_id(branch_name));\n\n \n", "file_path": "src/repo.rs", "rank": 61, "score": 23593.009252547705 }, { "content": " info!(\"found branch {}\", branch_fullname);\n\n\n\n Ok(branch_fullname)\n\n }\n\n\n\n pub fn checkout_branch(&mut self, branch_name: &str) -> RepoResult<()> {\n\n let git_repo = try!(self.git_repo());\n\n\n\n let branch_fullname = try!(self.find_branch(&git_repo, branch_name));\n\n\n\n info!(\"setting head to {}\", branch_fullname);\n\n try!(git_repo.set_head(&branch_fullname));\n\n \n\n let mut cb = git2::build::CheckoutBuilder::new();\n\n cb.force();\n\n\n\n info!(\"checkout {}\", branch_name);\n\n try!(git_repo.checkout_head(Some(&mut cb)).map_err(|e| RepoError::GitError(e)));\n\n\n\n Ok(())\n", "file_path": "src/repo.rs", "rank": 62, "score": 23592.91264633916 }, { "content": "\n\n#[derive(Debug,Clone)]\n\npub struct Branch {\n\n pub name: String,\n\n pub indexed_commit: Option<String>,\n\n}\n\n\n\nimpl Branch {\n\n pub fn new(name: String, indexed_commit: Option<String>) -> Branch {\n\n Branch {\n\n name: name,\n\n indexed_commit: indexed_commit,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Repo {\n\n pub id: String,\n\n pub path: PathBuf,\n", "file_path": "src/repo.rs", "rank": 63, "score": 23592.069063456656 }, { "content": "\n\n let mut revwalk = try!(git_repo.revwalk());\n\n\n\n if self.branches.is_empty() {\n\n try!(self.revwalk_add_branch(&git_repo, &mut revwalk, \"master\", &None));\n\n } else {\n\n for branch in &self.branches {\n\n try!(self.revwalk_add_branch(&git_repo, &mut revwalk, &branch.name, &branch.indexed_commit));\n\n }\n\n }\n\n\n\n info!(\"commit history:\");\n\n for oid in revwalk {\n\n try!(self.add_commit(db, &oid));\n\n }\n\n \n\n Ok(())\n\n }\n\n\n\n pub fn get_commit<'a>(&'a self, commit_id: &str) -> RepoResult<git2::Commit<'a> > {\n", "file_path": "src/repo.rs", "rank": 64, "score": 23592.047672295543 }, { "content": " type Err = RepoError;\n\n fn from_str(s: &str) -> Result<SyncState, Self::Err> {\n\n match s {\n\n \"NotCloned\" => Ok(SyncState::NotCloned),\n\n \"Cloned\" => Ok(SyncState::Cloned),\n\n \"Corrupted\" => Ok(SyncState::Corrupted),\n\n _ => Err(RepoError::EnumParseError(s.to_string()))\n\n }\n\n }\n\n}\n\n\n\nimpl ToString for SyncState {\n\n fn to_string(&self) -> String {\n\n match *self {\n\n SyncState::NotCloned => \"NotCloned\".to_string(),\n\n SyncState::Cloned => \"Cloned\".to_string(),\n\n SyncState::Corrupted => \"Corrupted\".to_string(),\n\n }\n\n }\n\n}\n", "file_path": "src/repo.rs", "rank": 65, "score": 23591.91896723281 }, { "content": " pub fn create_in_db(&self, db: &db::Db) -> RepoResult<db::Repository> {\n\n info!(\"creating new db repo entry for {:?}\", self);\n\n\n\n let remote_uri = &self.uri;\n\n \n\n let new_repo = db::Repository::new_from_remote(self.id.clone(), remote_uri.clone(), self.path.clone());\n\n try!(db.insert_repo(&new_repo));\n\n \n\n info!(\"created db repo entry {:?}\", new_repo);\n\n\n\n for branch in &self.branches {\n\n let new_branch = db::RepoBranch::new(new_repo.id.clone(), branch.name.clone(), None);\n\n \n\n try!(db.insert_branch(&new_branch));\n\n \n\n info!(\"created db repo branch entry {:?}\", new_branch);\n\n }\n\n\n\n Ok(new_repo)\n\n }\n", "file_path": "src/repo.rs", "rank": 66, "score": 23591.850470470625 }, { "content": " \n\n pub fn find_or_create_in_db(&mut self, db: &db::Db) -> RepoResult<db::Repository> {\n\n let maybe_repo = try!(self.find_in_db(db));\n\n\n\n match maybe_repo {\n\n Some(existing_repo) => { \n\n Ok(existing_repo)\n\n }\n\n None => { \n\n let new_repo = try!(self.create_in_db(db));\n\n \n\n Ok(new_repo)\n\n }\n\n }\n\n }\n\n\n\n pub fn update_repo_in_db(&mut self, db: &db::Db) -> RepoResult<()> {\n\n info!(\"updating db repo entry to match cloned repo...\");\n\n\n\n let mut db_repo = try!(self.find_or_create_in_db(db));\n", "file_path": "src/repo.rs", "rank": 67, "score": 23591.6983105029 }, { "content": " }\n\n\n\n /// like git update-ref refs/heads/master refs/remotes/origin/master\n\n pub fn repoint_branch_to_origin(&self, branch_name: &str) -> RepoResult<()> {\n\n let git_repo = try!(self.git_repo());\n\n \n\n let remote = try!(self.find_or_create_git_remote(&git_repo));\n\n\n\n let remote_name = remote.name().unwrap();\n\n let remote_ref = format!(\"refs/remotes/{}/{}\", remote_name, branch_name);\n\n let local_ref = format!(\"refs/heads/{}\", branch_name);\n\n\n\n info!(\"getting commit id for local branch {}\", local_ref);\n\n let local_oid = try!(git_repo.refname_to_id(&local_ref));\n\n\n\n info!(\"getting commit id for remote branch {}\", remote_ref);\n\n let remote_oid = try!(git_repo.refname_to_id(&remote_ref));\n\n\n\n let reflog_msg = format!(\"update-ref: moving {} from {} to {}\", local_ref, local_oid, remote_oid);\n\n try!(git_repo.reference(&local_ref, remote_oid, true, &reflog_msg));\n", "file_path": "src/repo.rs", "rank": 68, "score": 23591.454411459523 }, { "content": " let oid = try!(head.target().ok_or(RepoError::HeadRefHasNoDirectTarget));\n\n \n\n let commit = try!(git_repo.find_commit(oid));\n\n\n\n Ok(format!(\"{}\", commit.id()))\n\n }\n\n\n\n pub fn branch_commit_id(&self, branch: &str) -> RepoResult<git2::Oid> {\n\n let git_repo = try!(self.git_repo());\n\n\n\n let branch_fullname = try!(self.find_branch(git_repo, branch));\n\n \n\n let id = try!(git_repo.refname_to_id(&branch_fullname));\n\n\n\n return Ok(id);\n\n }\n\n\n\n pub fn git_repo<'a>(&'a self) -> RepoResult<&'a git2::Repository> {\n\n match self.git_repo.as_ref() {\n\n Some(gr) => Ok(gr),\n", "file_path": "src/repo.rs", "rank": 69, "score": 23589.348434231124 }, { "content": " \n\n // get commit id for last time we indexed the repo\n\n let repo_branch = try!(db.find_branch(&self.id, &branch.name)).unwrap();\n\n let indexed_commit_id = repo_branch.indexed_commit_id;\n\n \n\n if indexed_commit_id.is_some() {\n\n // tree-to-tree diff it and head, adding changed files to table:\n\n\n\n try!(self.treediff(db, indexed_commit_id.as_ref().unwrap(), &branch_commit_id_str));\n\n } else {\n\n // add all files to files table\n\n try!(self.treewalk(db, &repo_branch.name, &branch_commit_id_str));\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn treewalk(&self, db: &db::Db, branch: &str, commit_id: &str) -> RepoResult<()> {\n\n //let git_repo = try!(self.git_repo());\n", "file_path": "src/repo.rs", "rank": 70, "score": 23588.590999314292 }, { "content": "use std::rc::Rc;\n\nuse std::path::{PathBuf,Path};\n\nuse std::str;\n\nuse std::str::FromStr;\n\nuse std::fs;\n\nuse std::fmt;\n\nuse git2;\n\nuse sha1::Sha1;\n\nuse super::config::{Config,RepoLocation};\n\nuse super::result::*;\n\nuse super::db;\n\n\n\n#[derive(Debug,Copy,Clone)]\n\npub enum SyncState {\n\n NotCloned,\n\n Cloned,\n\n Corrupted,\n\n}\n\n\n\nimpl FromStr for SyncState {\n", "file_path": "src/repo.rs", "rank": 71, "score": 23588.373930533817 }, { "content": " }\n\n \n\n pub fn dot_git_path(&self) -> PathBuf {\n\n self.path.join(\".git\")\n\n }\n\n \n\n pub fn dot_git_exists(&self) -> bool {\n\n match fs::metadata(self.dot_git_path().as_path()) {\n\n Ok(_) => true,\n\n Err(_) => {\n\n info!(\"repo doesn't exist at {:?}\", self.path);\n\n false\n\n }\n\n }\n\n }\n\n\n\n pub fn find_in_db(&self, db: &db::Db) -> RepoResult<Option<db::Repository>> {\n\n db.find_repo_by_remote(&self.uri)\n\n }\n\n\n", "file_path": "src/repo.rs", "rank": 72, "score": 23588.31949202752 }, { "content": " })\n\n }\n\n}\n\n\n\npub struct RecursiveTreeIter<'a> {\n\n entries: Vec<RepoTreeEntry>,\n\n repo: &'a git2::Repository,\n\n}\n\n\n\nimpl<'a> Iterator for RecursiveTreeIter<'a> {\n\n type Item = RepoTreeEntry;\n\n \n\n fn next(&mut self) -> Option<RepoTreeEntry> {\n\n if self.entries.is_empty() {\n\n None\n\n } else {\n\n let repo_entry = self.entries.remove(0);\n\n let entry = &repo_entry.entry;\n\n \n\n match entry.kind() {\n", "file_path": "src/repo.rs", "rank": 73, "score": 23587.96327959247 }, { "content": " prog.received_objects(),\n\n prog.indexed_objects());\n\n true\n\n })\n\n .sideband_progress(|data| {\n\n match str::from_utf8(data) {\n\n Ok(v) => println!(\"{}\", v),\n\n Err(e) => println!(\"not utf8 data: {:?}\", e)\n\n };\n\n true\n\n });\n\n\n\n grcs\n\n }\n\n\n\n pub fn is_cloned(&self) -> bool {\n\n match self.sync_state {\n\n SyncState::NotCloned => false,\n\n _ => true\n\n }\n", "file_path": "src/repo.rs", "rank": 74, "score": 23586.51075031196 }, { "content": " info!(\"getting commit {:?}\", commit_id);\n\n let git_repo = try!(self.git_repo());\n\n\n\n let oid = try!(git2::Oid::from_str(commit_id));\n\n \n\n let commit = try!(git_repo.find_commit(oid));\n\n\n\n Ok(commit)\n\n }\n\n\n\n //todo what's this for?\n\n pub fn treediff(&self, _db: &db::Db, _indexed_commit_id: &str, _branch_commit_id: &str) -> RepoResult<()> {\n\n Ok(())\n\n }\n\n\n\n //todo what's this for?\n\n pub fn treewalks(&self, db: &db::Db) -> RepoResult<()> {\n\n for branch in self.branches.iter() {\n\n let branch_commit_id = try!(self.branch_commit_id(&branch.name));\n\n let branch_commit_id_str = format!(\"{}\", branch_commit_id);\n", "file_path": "src/repo.rs", "rank": 75, "score": 23586.08555125374 }, { "content": " if indexed_commit.is_some() {\n\n let indexed_commit_id = try!(git_repo.revparse_single(indexed_commit.as_ref().unwrap())).id();\n\n \n\n let bases = try!(git_repo.merge_bases(branch_commit, indexed_commit_id));\n\n \n\n for base in bases.iter() {\n\n try!(revwalk.hide(*base));\n\n }\n\n }\n\n \n\n try!(revwalk.push(branch_commit));\n\n\n\n Ok(())\n\n }\n\n\n\n /// walks commits from current head to merge-base of self.commit if any\n\n pub fn revwalk(&self, db: &db::Db) -> RepoResult<()> {\n\n info!(\"walking revision tree\");\n\n \n\n let git_repo = try!(self.git_repo());\n", "file_path": "src/repo.rs", "rank": 76, "score": 23585.362896704635 }, { "content": " }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn tree_iter<'tree,'repo>(&'repo self, tree: &'tree git2::Tree<'tree>) -> RecursiveTreeIter<'repo> {\n\n let repo = self.git_repo().unwrap();\n\n \n\n let mut initial = vec![];\n\n \n\n for entry in tree.iter() {\n\n let repo_entry = RepoTreeEntry::from_ref(&entry, Path::new(\"\"));\n\n\n\n if repo_entry.is_some() {\n\n initial.push(repo_entry.unwrap());\n\n }\n\n }\n\n \n\n RecursiveTreeIter {\n", "file_path": "src/repo.rs", "rank": 77, "score": 23583.277145635617 }, { "content": " entries: initial,\n\n repo: repo,\n\n }\n\n }\n\n\n\n pub fn add_commit(&self, db: &db::Db, oid: &git2::Oid) -> RepoResult<()> {\n\n info!(\"adding commit {:?}\", oid);\n\n\n\n let commit_id = format!(\"{}\", oid);\n\n \n\n try!(db.create_commit_unless_exists(&commit_id, &self.id));\n\n \n\n Ok(())\n\n }\n\n\n\n pub fn head_commit_id(&self) -> RepoResult<String> {\n\n let git_repo = try!(self.git_repo());\n\n\n\n let head = try!(git_repo.head());\n\n\n", "file_path": "src/repo.rs", "rank": 78, "score": 23582.880447819512 }, { "content": "\n\n match db_repo.sync_state {\n\n SyncState::NotCloned => {\n\n db_repo.sync_state = self.sync_state;\n\n },\n\n SyncState::Cloned => {\n\n db_repo.sync_state = self.sync_state;\n\n },\n\n other_state => {\n\n self.sync_state = other_state;\n\n }\n\n }\n\n \n\n try!(db.update_repo(&db_repo));\n\n \n\n Ok(())\n\n }\n\n \n\n pub fn probe_fs(&mut self) -> RepoResult<()> {\n\n info!(\"probing cloned repo {}\", self.uri);\n", "file_path": "src/repo.rs", "rank": 79, "score": 23582.415000953242 }, { "content": "\n\n#[derive(Clone)]\n\npub struct RepoTreeEntry {\n\n pub entry: git2::TreeEntry<'static>,\n\n pub path: PathBuf,\n\n}\n\n\n\nimpl RepoTreeEntry {\n\n pub fn new(entry: git2::TreeEntry<'static>, path: PathBuf) -> RepoTreeEntry {\n\n RepoTreeEntry {\n\n entry: entry,\n\n path: path\n\n }\n\n }\n\n\n\n pub fn from_ref(entry: &git2::TreeEntry, path: &Path) -> Option<RepoTreeEntry> {\n\n let maybe_name = entry.name();\n\n \n\n maybe_name.map(|name| {\n\n RepoTreeEntry::new(entry.to_owned(), path.join(name).to_owned())\n", "file_path": "src/repo.rs", "rank": 80, "score": 23581.573616410667 }, { "content": "\n\n let commit = try!(self.get_commit(commit_id));\n\n\n\n let tree = try!(commit.tree());\n\n \n\n let iter = self.tree_iter(&tree);\n\n\n\n for repo_entry in iter {\n\n let entry = repo_entry.entry;\n\n \n\n match entry.kind() {\n\n Some(git2::ObjectType::Blob) => {\n\n //let obj: git2::Object = entry.to_object(git_repo).unwrap();\n\n\n\n //todo get contents of file from blob\n\n //let blob: &git2::Blob = obj.as_blob().unwrap();\n\n\n\n try!(db.upsert_file(&self.id, branch, &repo_entry.path, Some(commit_id)));\n\n },\n\n _ => {}\n", "file_path": "src/repo.rs", "rank": 81, "score": 23580.65420588585 }, { "content": " Some(git2::ObjectType::Tree) => {\n\n let obj: git2::Object<'a> = entry.to_object(self.repo).unwrap();\n\n \n\n let tree: &git2::Tree<'a> = obj.as_tree().unwrap();\n\n \n\n for entry in tree.iter() {\n\n let child_repo_entry = RepoTreeEntry::from_ref(&entry, &repo_entry.path);\n\n\n\n if child_repo_entry.is_some() {\n\n self.entries.push(child_repo_entry.unwrap());\n\n }\n\n }\n\n }\n\n _ => {}\n\n }\n\n \n\n Some(repo_entry.clone())\n\n }\n\n }\n\n}\n", "file_path": "src/repo.rs", "rank": 82, "score": 23575.661105571322 }, { "content": "use rusqlite::{SqliteConnection,SqliteResult,SqliteRow};\n\nuse schemamama_rusqlite::{SqliteMigration};\n\nuse std::str::FromStr;\n\nuse result::*;\n\n\n\n#[derive(Debug,Copy,Clone)]\n\npub enum CommitState {\n\n Indexed,\n\n NotIndexed,\n\n}\n\n\n\nimpl FromStr for CommitState {\n\n type Err = RepoError;\n\n fn from_str(s: &str) -> Result<CommitState, Self::Err> {\n\n match s {\n\n \"Indexed\" => Ok(CommitState::Indexed),\n\n \"NotIndexed\" => Ok(CommitState::NotIndexed),\n\n _ => Err(RepoError::EnumParseError(s.to_string()))\n\n }\n\n }\n", "file_path": "src/models/repo_commit.rs", "rank": 83, "score": 20933.66235251812 }, { "content": "}\n\n\n\nimpl ToString for CommitState {\n\n fn to_string(&self) -> String {\n\n match *self {\n\n CommitState::Indexed => \"Indexed\".to_string(),\n\n CommitState::NotIndexed => \"NotIndexed\".to_string(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug,Clone)]\n\npub struct RepoCommit {\n\n pub id: String,\n\n pub repo_id: String,\n\n pub state: CommitState,\n\n}\n\n\n\nimpl RepoCommit {\n\n pub fn new(id: String, repo_id: String, state: CommitState) -> RepoCommit {\n", "file_path": "src/models/repo_commit.rs", "rank": 84, "score": 20929.388350694604 }, { "content": " RepoCommit {\n\n id: id,\n\n repo_id: repo_id,\n\n state: state,\n\n }\n\n }\n\n \n\n pub fn new_from_sql_row(row0: &SqliteRow) -> RepoResult<RepoCommit> {\n\n let commit_state: String = row0.get(2);\n\n \n\n Ok(RepoCommit {\n\n id: row0.get(0),\n\n repo_id: row0.get(1),\n\n state: try!(CommitState::from_str(&commit_state)),\n\n })\n\n }\n\n}\n\n\n\npub struct CreateCommitsTable;\n\nmigration!(CreateCommitsTable, 3, \"create commits table\");\n", "file_path": "src/models/repo_commit.rs", "rank": 85, "score": 20922.216443896472 }, { "content": "\n\nimpl SqliteMigration for CreateCommitsTable {\n\n fn up(&self, conn: &SqliteConnection) -> SqliteResult<()> {\n\n const CREATE_COMMITS: &'static str = \"\\\n\n CREATE TABLE commits ( \\\n\n id TEXT, \\\n\n repo_id TEXT, \\\n\n state TEXT \\\n\n );\";\n\n\n\n const CREATE_COMMITS_PKEY: &'static str = \"\\\n\n CREATE UNIQUE INDEX commits_repo_id_id_idx ON commits(repo_id,id)\";\n\n\n\n Ok(())\n\n .and(conn.execute(CREATE_COMMITS, &[]))\n\n .and(conn.execute(CREATE_COMMITS_PKEY, &[]))\n\n .map(|_| (()))\n\n }\n\n\n\n fn down(&self, conn: &SqliteConnection) -> SqliteResult<()> {\n\n conn.execute(\"DROP TABLE commits;\", &[]).map(|_| ())\n\n }\n\n}\n", "file_path": "src/models/repo_commit.rs", "rank": 86, "score": 20919.601140930004 }, { "content": "### sync thread states\n\n\n\n 1. started\n\n 2. start_fail couldn't open sqlite db or find data dir? or zk?\n\n 3. cloning\n\n 4. clone_fail couldn't access remote repo\n\n 5. cloned\n\n 6. fetching\n\n 7. fetch_fail couldn't access remote repo\n\n 8. fetched\n\n 11. indexing_commits\n\n 12. index_commits_fail error twiddling git or poking elasticsearch or sqlite\n\n 13. indexed_commits\n\n 14. indexing_files\n\n 15. index_files_fail error poking elasticsearch or sqlite or git\n\n 16. indexed_files\n\n\n\n\n\n## SQLite db schema\n\n\n\n### repositories table\n\n\n\n 1. id uuid string (hyphen formatted, 36 chars)\n\n 2. repo uri (e.g. https://github.com/me/foo.git)\n\n 3. indexed_datetime for information only\n\n 4. sync state (see above)\n\n 5. local filesystem path\n\n\n\nunique indexes on id and repo\n\n\n\n### branches table\n\n\n\n 1. repo_id\n\n 2. name\n\n 3. indexed_commit_id\n\n\n\nunique index on (repo_id,name)\n\n\n\n### commits work table\n\n\n\n 1. id git oid of commit 20 char ascii\n\n 2. repo_id uuid string of repo\n\n 3. state enum indexed or not_indexed\n\n\n\nunique index on (repo_id, id)\n\n\n\n### repo_files table\n\n\n\n 1. repo_id uuid string of repo\n\n 2. path relative path in repo of file\n\n 3. commit_id id of commit when last changed\n\n 4. indexed_commit_id id of commit when last indexed\n\n \n\nunique index on (repo_id, path)\n\n\n\n## a note on paths, strings and unicode:\n\n\n\nthe rust code uses Paths where appropriate.\n\nthe sqlite db uses c strings.\n\nconverting between the two is done in modules::types.rs\n\n\n\nthere are no paths created from things that aren't already paths,\n\nor are otherwise known to be something safe like ascii e.g.\n\na hash of the remote url is used instead of the url itself as\n\nthe dir to clone the repo into, and the branch names aren't used\n\nin paths anywhere.\n\n\n\nhopefully that's enough to be reasonably cross platform and\n", "file_path": "README.md", "rank": 87, "score": 14435.33321623767 }, { "content": "Codelauf is a source code search system\n\n\n\n[![Build Status](https://travis-ci.org/cmsd2/codelauf.svg)](https://travis-ci.org/cmsd2/codelauf)\n\n\n\n[Documentation](https://cmsd2.github.io/rust-docs/codelauf/codelauf/)\n\n\n\nIt is a work-in-progress.\n\nThis design document describes how it will be architected.\n\n\n\n# Codelauf\n\n\n\nCodelauf mirrors git repositories and uses elasticsearch to index files and commits on tracked branches.\n\n\n\nCode is passed through some language specific syntax analysers before being loaded into the index.\n\n\n\nYou can search the indexes given a commit id or a string that appears in the codebase on one of the\n\ntracked remotes and branches.\n\n\n\n## Design\n\n\n\n```\n\nELB -> ASG[ Web Frontends ] -> ElasticSearch <- codelauf worker -> sqlite\n\n -> ZooKeeper <-\n\n```\n\n\n\nthere can be any number of web frontends, each of which is stateless.\n\n\n\na separate project provides the web front-end and API.\n\n\n\nthe web frontends provide an api that can be used to query the cluster state as it\n\nis in zookeeper, and also to perform searches.\n\n\n\nthere is a single codelauf worker at any one time and this is enforced via zookeeper.\n\nin future we could use leader election to allow failover, or partition the repositories\n\ninto buckets spread across a cluster of workers.\n\n\n\nzookeeper is used for two things:\n\n 1. long lived configuration data:\n\n 1. list of repositories that need to be indexed\n\n 2. ephemeral state of worker process:\n\n 1. when it started\n\n 2. what it's doing\n\n\n\ncodelauf stores mirrored git repositories on its local filesystem,\n\nand also uses sqlite to track program state that should persist across application restarts,\n\nbut does not need to outlive the mirrored git repositories themselves.\n\n\n\nif the worker machine is lost, it can be recovered by starting a new one and re-mirroring\n\nthe git repositories named in zookeeper. this process is automatic.\n", "file_path": "README.md", "rank": 88, "score": 14432.573226509649 }, { "content": "### sync thread\n\n\n\n 1. find repo dir and check consistency against sqlite db:\n\n 2. if dir doesn't exist, clone it\n\n 3. if sqlite commit id doesn't exist in repo clear it\n\n 4. git fetch all to manually sync with remote\n\n 5. use revwalk to find all the commits back to the merge base(s):\n\n include in the revwalk all the repo's tracked branches in the branches table\n\n for each tracked branch:\n\n hide merge bases of (branch tip commit id, indexed commit id)\n\n 6. add all commits found by revwalk to commits work table in sqlite\n\n crash recovery: ignore duplicate row errors\n\n 7. scroll through commits work table and add each commit to elastic search\n\n mark row in work table as done\n\n periodically commit elasticsearch batch as we go\n\n all updates to search index are idempotent\n\n remove from search index any files deleted or renamed by a commit\n\n add to repo_files table any files that are added or updated\n\n if they're already in there then update the change commit id if newer\n\n crash recovery: no special logic needed. elasticsearch will eventually converge\n\n 8. when all rows done, save each branch tip commit id as indexed commit id in branches table\n\n and clear work table.\n\n update each branch commit id in zookeeper\n\n crash recovery: update branches table and delete work table rows in same transaction.\n\n zookeeper branch commit id is eventually consistent.\n\n 9. for each file in repo_files table, add to search index\n\n update repo_files indexed commit id as we go if change commit id is newer than indexed commit id\n\n crash recovery: it's monotonic. no special logic needed.\n\n\n", "file_path": "README.md", "rank": 89, "score": 14431.276811096248 }, { "content": "## Zookeeper file structure\n\n\n\n```\n\n/codelauf (root)\n\n /repositories\n\n /{43223-21998392-3232-123294}\n\n - type: git\n\n url: https://github.com/...\n\n branches:\n\n\t - name: master\n\n\t indexed_commit_id: blah\n\n last_indexed: Monday\n\n wanted_indexed: Tuesday\n\n /{09238-24234233-3242-432981}\n\n - type: hg?\n\n url: blah\n\n blah: blah\n\n /workers\n\n /0\n\n - start_time: Tuesday\n\n /repositories\n\n /{43223-21998392-3232-123294}\n\n\t - status: cloning\n\n\t - progress: 80%\n\n\t/{09238-24234233-3242-432981}\n\n\t - status: indexing_files\n\n\t - progress: 20%\n\n```\n\n\n\n## Frontend web API calls\n\n\n\n```\n\n/repositories index,get,patch,delete\n\n/workers index,get\n\n/search get\n\n```\n\n\n\n## Worker management API calls\n\n\n\nnote that there's no way to directly add or remove repos to a worker.\n\nthis is done via the worker watching zk /repositories at the moment.\n\nthis API is a bit redundant at the moment.\n\nin future it will be used to coordinate ownership of repos among workers,\n\n\n\n```\n\n/repositories index,get\n\n/repositories/{id}/sync post // trigger immediate fetch and sync\n\n/repositories/{id}/recreate post // clone fresh copy and sync\n\n/status get\n\n```\n\n\n\n\n\n## Worker design\n\n\n\n### start\n\n\n\n 1. open sqlite db\n\n 2. create top-level nodes in zookeeper under /workers\n\n 3. start watch on zk repositories node\n\n 4. create nodes per project as per rows in sqlite db\n\n 5. begin sync tasks:\n\n 1. loop over projects defined in sqlite db\n\n 2. for each watched remote start sync thread\n\n\n\n### adding new project to sync\n\n\n\n 1. create entry in sqlite\n\n 2. start new sync thread\n\n\n", "file_path": "README.md", "rank": 90, "score": 14428.39334677631 }, { "content": "zookeeper also holds the indexed commit id of each branch as a backup, so no re-indexing is needed\n\n\n\nif zookeeper is lost, its configuration will need to be recreated, and the codelauf worker\n\nrestarted.\n\n\n\nif the elasticsearch cluster is lost, the worker will need to re-index everything.\n\n\n\nit is recommended that if your repository setup is anything other than trivial, that you\n\ncreate a script to drive the web api to add the repos automatically.\n\n\n", "file_path": "README.md", "rank": 91, "score": 14416.802559492762 }, { "content": " the conditions stated in this License.\n\n\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n\n any Contribution intentionally submitted for inclusion in the Work\n\n by You to the Licensor shall be under the terms and conditions of\n\n this License, without any additional terms or conditions.\n\n Notwithstanding the above, nothing herein shall supersede or modify\n\n the terms of any separate license agreement you may have executed\n\n with Licensor regarding such Contributions.\n\n\n\n 6. Trademarks. This License does not grant permission to use the trade\n\n names, trademarks, service marks, or product names of the Licensor,\n\n except as required for reasonable and customary use in describing the\n\n origin of the Work and reproducing the content of the NOTICE file.\n\n\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n\n agreed to in writing, Licensor provides the Work (and each\n\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n\n implied, including, without limitation, any warranties or conditions\n\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n\n PARTICULAR PURPOSE. You are solely responsible for determining the\n\n appropriateness of using or redistributing the Work and assume any\n", "file_path": "LICENSE.md", "rank": 92, "score": 14408.72157812261 }, { "content": " APPENDIX: How to apply the Apache License to your work.\n\n\n\n To apply the Apache License to your work, attach the following\n\n boilerplate notice, with the fields enclosed by brackets \"{}\"\n\n replaced with your own identifying information. (Don't include\n\n the brackets!) The text should be enclosed in the appropriate\n\n comment syntax for the file format. We also recommend that a\n\n file or class name and description of purpose be included on the\n\n same \"printed page\" as the copyright notice for easier\n\n identification within third-party archives.\n\n\n\n Copyright {yyyy} {name of copyright owner}\n\n\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n\n you may not use this file except in compliance with the License.\n\n You may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n Unless required by applicable law or agreed to in writing, software\n\n distributed under the License is distributed on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n See the License for the specific language governing permissions and\n\n limitations under the License.\n\n\n", "file_path": "LICENSE.md", "rank": 93, "score": 14408.293924288757 }, { "content": " stating that You changed the files; and\n\n\n\n (c) You must retain, in the Source form of any Derivative Works\n\n that You distribute, all copyright, patent, trademark, and\n\n attribution notices from the Source form of the Work,\n\n excluding those notices that do not pertain to any part of\n\n the Derivative Works; and\n\n\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n\n distribution, then any Derivative Works that You distribute must\n\n include a readable copy of the attribution notices contained\n\n within such NOTICE file, excluding those notices that do not\n\n pertain to any part of the Derivative Works, in at least one\n\n of the following places: within a NOTICE text file distributed\n\n as part of the Derivative Works; within the Source form or\n\n documentation, if provided along with the Derivative Works; or,\n\n within a display generated by the Derivative Works, if and\n\n wherever such third-party notices normally appear. The contents\n\n of the NOTICE file are for informational purposes only and\n\n do not modify the License. You may add Your own attribution\n\n notices within Derivative Works that You distribute, alongside\n\n or as an addendum to the NOTICE text from the Work, provided\n\n that such additional attribution notices cannot be construed\n\n as modifying the License.\n\n\n\n You may add Your own copyright statement to Your modifications and\n\n may provide additional or different license terms and conditions\n\n for use, reproduction, or distribution of Your modifications, or\n\n for any such Derivative Works as a whole, provided Your use,\n\n reproduction, and distribution of the Work otherwise complies with\n", "file_path": "LICENSE.md", "rank": 94, "score": 14407.167036321993 }, { "content": " risks associated with Your exercise of permissions under this License.\n\n\n\n 8. Limitation of Liability. In no event and under no legal theory,\n\n whether in tort (including negligence), contract, or otherwise,\n\n unless required by applicable law (such as deliberate and grossly\n\n negligent acts) or agreed to in writing, shall any Contributor be\n\n liable to You for damages, including any direct, indirect, special,\n\n incidental, or consequential damages of any character arising as a\n\n result of this License or out of the use or inability to use the\n\n Work (including but not limited to damages for loss of goodwill,\n\n work stoppage, computer failure or malfunction, or any and all\n\n other commercial damages or losses), even if such Contributor\n\n has been advised of the possibility of such damages.\n\n\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n\n the Work or Derivative Works thereof, You may choose to offer,\n\n and charge a fee for, acceptance of support, warranty, indemnity,\n\n or other liability obligations and/or rights consistent with this\n\n License. However, in accepting such obligations, You may act only\n\n on Your own behalf and on Your sole responsibility, not on behalf\n\n of any other Contributor, and only if You agree to indemnify,\n\n defend, and hold each Contributor harmless for any liability\n\n incurred by, or claims asserted against, such Contributor by reason\n\n of your accepting any such warranty or additional liability.\n\n\n\n END OF TERMS AND CONDITIONS\n\n\n", "file_path": "LICENSE.md", "rank": 95, "score": 14406.123675523782 }, { "content": " subsequently incorporated within the Work.\n\n\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n\n this License, each Contributor hereby grants to You a perpetual,\n\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n\n copyright license to reproduce, prepare Derivative Works of,\n\n publicly display, publicly perform, sublicense, and distribute the\n\n Work and such Derivative Works in Source or Object form.\n\n\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n\n this License, each Contributor hereby grants to You a perpetual,\n\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n\n (except as stated in this section) patent license to make, have made,\n\n use, offer to sell, sell, import, and otherwise transfer the Work,\n\n where such license applies only to those patent claims licensable\n\n by such Contributor that are necessarily infringed by their\n\n Contribution(s) alone or by combination of their Contribution(s)\n\n with the Work to which such Contribution(s) was submitted. If You\n\n institute patent litigation against any entity (including a\n\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n\n or a Contribution incorporated within the Work constitutes direct\n\n or contributory patent infringement, then any patent licenses\n\n granted to You under this License for that Work shall terminate\n\n as of the date such litigation is filed.\n\n\n\n 4. Redistribution. You may reproduce and distribute copies of the\n\n Work or Derivative Works thereof in any medium, with or without\n\n modifications, and in Source or Object form, provided that You\n\n meet the following conditions:\n\n\n\n (a) You must give any other recipients of the Work or\n\n Derivative Works a copy of this License; and\n\n\n\n (b) You must cause any modified files to carry prominent notices\n", "file_path": "LICENSE.md", "rank": 96, "score": 14405.993573790996 }, { "content": "\n\n\n\n Apache License\n\n Version 2.0, January 2004\n\n http://www.apache.org/licenses/\n\n\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n\n\n 1. Definitions.\n\n\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n\n and distribution as defined by Sections 1 through 9 of this document.\n\n\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n\n the copyright owner that is granting the License.\n\n\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n\n other entities that control, are controlled by, or are under common\n\n control with that entity. For the purposes of this definition,\n\n \"control\" means (i) the power, direct or indirect, to cause the\n\n direction or management of such entity, whether by contract or\n\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n\n exercising permissions granted by this License.\n\n\n\n \"Source\" form shall mean the preferred form for making modifications,\n\n including but not limited to software source code, documentation\n\n source, and configuration files.\n\n\n\n \"Object\" form shall mean any form resulting from mechanical\n\n transformation or translation of a Source form, including but\n\n not limited to compiled object code, generated documentation,\n\n and conversions to other media types.\n\n\n\n \"Work\" shall mean the work of authorship, whether in Source or\n\n Object form, made available under the License, as indicated by a\n\n copyright notice that is included in or attached to the work\n", "file_path": "LICENSE.md", "rank": 97, "score": 14405.67861029717 }, { "content": " (an example is provided in the Appendix below).\n\n\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n\n form, that is based on (or derived from) the Work and for which the\n\n editorial revisions, annotations, elaborations, or other modifications\n\n represent, as a whole, an original work of authorship. For the purposes\n\n of this License, Derivative Works shall not include works that remain\n\n separable from, or merely link (or bind by name) to the interfaces of,\n\n the Work and Derivative Works thereof.\n\n\n\n \"Contribution\" shall mean any work of authorship, including\n\n the original version of the Work and any modifications or additions\n\n to that Work or Derivative Works thereof, that is intentionally\n\n submitted to Licensor for inclusion in the Work by the copyright owner\n\n or by an individual or Legal Entity authorized to submit on behalf of\n\n the copyright owner. For the purposes of this definition, \"submitted\"\n\n means any form of electronic, verbal, or written communication sent\n\n to the Licensor or its representatives, including but not limited to\n\n communication on electronic mailing lists, source code control systems,\n\n and issue tracking systems that are managed by, or on behalf of, the\n\n Licensor for the purpose of discussing and improving the Work, but\n\n excluding communication that is conspicuously marked or otherwise\n\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n\n on behalf of whom a Contribution has been received by Licensor and\n", "file_path": "LICENSE.md", "rank": 98, "score": 14402.862120801092 }, { "content": " migrator.register(Box::new(CreateBranchesTable));\n\n migrator.register(Box::new(CreateCommitsTable));\n\n migrator.register(Box::new(CreateFilesTable));\n\n\n\n migrator.up(4);\n\n assert_eq!(migrator.current_version(), Some(4));\n\n }\n\n\n\n pub fn find_repo_by_remote(&self, remote: &String) -> RepoResult<Option<Repository>> {\n\n let mut stmt = try!(self.conn.prepare(\"SELECT * FROM repositories WHERE uri = ?\").map_err(|e| RepoError::SqlError(e)));\n\n let mut rows = try!(stmt.query(&[remote]));\n\n\n\n match rows.next() {\n\n None => Ok(None),\n\n Some(row_result) => {\n\n let row = try!(row_result);\n\n Repository::new_from_sql_row(&row).map(|r| Some(r))\n\n }\n\n }\n\n }\n", "file_path": "src/db.rs", "rank": 99, "score": 28.069335251287118 } ]
Rust
src/lib.rs
mihail-milev/pam_blox
05086179dca40a5c46343dab814906c0fdc0cda4
#![allow(non_camel_case_types)] include!("pam_appl.rs"); use std::ffi::{CString, CStr}; use std::os::raw::{c_int, c_char}; use std::ptr; use std::process::Command; use regex::Regex; use std::fs; use std::os::linux::fs::MetadataExt; use std::fs::File; use std::io::{BufRead, BufReader}; #[no_mangle] pub extern fn pam_sm_authenticate(pamh: *mut pam_handle_t, _flags: c_int, _argc: c_int, _argv: *const *const c_char) -> u32 { let mut username : *const c_char = ptr::null(); let prompt = match CString::new("Username:") { Ok(s) => s, Err(e) => { eprintln!("Unable to convert prompt to C-type string: {}", e); return PAM_AUTH_ERR; }, }; let get_user_result = unsafe { pam_get_user(pamh, &mut username, prompt.as_ptr()) }; if get_user_result != (PAM_SUCCESS as i32) || username == ptr::null() { return PAM_AUTH_ERR; } let username_cstr = unsafe { CStr::from_ptr(username) }; let username_str = match username_cstr.to_str() { Ok(s) => s, Err(e) => { eprintln!("Unable to convert username to Rust-type string: {}", e); return PAM_AUTH_ERR; }, }; let bdaddr = match get_device_id_from_users_file(username_str, "/etc/blox_users.conf") { Some(a) => a, None => { return PAM_AUTH_ERR; }, }; println!("Authenticating {:?} using device ID \"{}\"", username_str, bdaddr); let check_result = read_bluetooth_signal_strength_and_decide(&bdaddr, -5); if check_result { return PAM_SUCCESS; } return PAM_AUTH_ERR; } #[no_mangle] pub extern fn pam_sm_setcred(_pamh: *mut pam_handle_t, _flags: c_int, _argc: c_int, _argv: *const *const c_char) -> u32 { return PAM_SUCCESS; } fn get_device_id_from_users_file(username: &str, filename: &str) -> Option<String> { let meta = match fs::metadata(filename) { Ok(m) => m, Err(e) => { eprintln!("Unable to fetch information for file {}: {}", filename, e); return None; }, }; if meta.st_uid() != 0 || meta.st_gid() != 0 { eprintln!("The file {} is not owned by root:root", filename); return None; } if (meta.st_mode() & 3967) != 256 { eprintln!("The file {} must be readable only by root", filename); return None; } let uname_re = match Regex::new("^\"?([a-zA-Z0-9].*?)\"?$") { Ok(r) => r, Err(e) => { eprintln!("Unable to compile username regular expression: {}", e); return None; }, }; let mat = match uname_re.captures(username) { Some(m) => m, None => { eprintln!("Invalid username format supplied: {}", username); return None; }, }; let uname = match mat.get(1) { Some(u) => u.as_str(), None => { eprintln!("Empty username supplied: {}", username); return None; }, }; let f = match File::open(filename) { Ok(f) => f, Err(e) => { eprintln!("Unable to open file {}: {}", filename, e); return None; }, }; let reader = BufReader::new(f); let bdre = match Regex::new(r"^(?:[0-9A-F]{2})(?::[0-9A-F]{2}){5}$") { Ok(r) => r, Err(e) => { eprintln!("Unable to create BT address regular expression: {}", e); return None; }, }; let mut user_found = false; for line in reader.lines() { let ln_text = match line { Ok(l) => l, Err(_e) => continue, }; let items : Vec<&str> = ln_text.split('\t').collect(); if items.len() < 2 { continue; } if items[0] == uname { user_found = true; if !bdre.is_match(items[1]) { eprintln!("User {} found, but the supplied BT address ({}) is not valid, skipping ...", items[0], items[1]); continue; } return Some(String::from(items[1])); } } if !user_found { eprintln!("User {} not found in {}", uname, filename); } return None; } fn read_bluetooth_signal_strength_and_decide(bdaddr: &str, threshold: i32) -> bool { let cmd = format!("hcitool rssi {}", bdaddr); let error_text = format!("Unable to read RSSI value for {}", bdaddr); let rssi_output = match Command::new("sh").arg("-c").arg(cmd).output() { Ok(o) => o.stdout, Err(e) => { eprintln!("{}: {}", &error_text, e); return false; }, }; let rssi_output_str = match std::str::from_utf8(&rssi_output) { Ok(s) => s, Err(e) => { eprintln!("Unable to convert output command to UTF-8: {}", e); return false; }, }; let re = match Regex::new(r"RSSI return value: (-?\d+)") { Ok(r) => r, Err(e) => { eprintln!("Unable to create regular expression for parsing command output: {}", e); return false; }, }; let mut val_found = false; for mat in re.captures_iter(rssi_output_str) { let val = match mat.get(1) { Some(v) => v, None => continue, }; let val_i32 = match val.as_str().parse::<i32>() { Ok(v) => v, Err(_e) => continue, }; if val_i32 >= threshold { println!("Success: signal strength {} is above or equal to threshold {}", val_i32, threshold); return true; } val_found = true; } if val_found { println!("Device not close enough!"); } else { println!("Device not connected!"); } return false; } #[cfg(test)] mod tests { #[test] fn it_works() { assert_eq!(2 + 2, 4); } }
#![allow(non_camel_case_types)] include!("pam_appl.rs"); use std::ffi::{CString, CStr}; use std::os::raw::{c_int, c_char}; use std::ptr; use std::process::Command; use regex::Regex; use std::fs; use std::os::linux::fs::MetadataExt; use std::fs::File; use std::io::{BufRead, BufReader}; #[no_mangle] pub extern fn pam_sm_authenticate(pamh: *mut pam_handle_t, _flags: c_int, _argc: c_int, _argv: *const *const c_char) -> u32 { let mut username : *const c_char = ptr::null(); let prompt = match CString::new("Username:") { Ok(s) => s, Err(e) => { eprintln!("Unable to convert prompt to C-type string: {}", e); return PAM_AUTH_ERR; }, }; let get_user_result = unsafe { pam_get_user(pamh, &mut username, prompt.as_ptr()) }; if get_user_result != (PAM_SUCCESS as i32) || username == ptr::null() { return PAM_AUTH_ERR; } let username_cstr = unsafe { CStr::from_ptr(username) }; let username_str = match username_cstr.to_str() { Ok(s) => s, Err(e) => { eprintln!("Unable to convert username to Rust-type string: {}", e); return PAM_AUTH_ERR; }, }; let bdaddr = match get_device_id_from_users_file(username_str, "/etc/blox_users.conf") { Some(a) => a, None => { return PAM_AUTH_ERR; }, }; println!("Authenticating {:?} using device ID \"{}\"", username_str, bdaddr); let check_result = read_bluetooth_signal_strength_and_decide(&bdaddr, -5); if check_result { return PAM_SUCCESS; } return PAM_AUTH_ERR; } #[no_mangle] pub extern fn pam_sm_setcred(_pamh: *mut pam_handle_t, _flags: c_int, _argc: c_int, _argv: *const *const c_char) -> u32 { return PAM_SUCCESS; }
fn read_bluetooth_signal_strength_and_decide(bdaddr: &str, threshold: i32) -> bool { let cmd = format!("hcitool rssi {}", bdaddr); let error_text = format!("Unable to read RSSI value for {}", bdaddr); let rssi_output = match Command::new("sh").arg("-c").arg(cmd).output() { Ok(o) => o.stdout, Err(e) => { eprintln!("{}: {}", &error_text, e); return false; }, }; let rssi_output_str = match std::str::from_utf8(&rssi_output) { Ok(s) => s, Err(e) => { eprintln!("Unable to convert output command to UTF-8: {}", e); return false; }, }; let re = match Regex::new(r"RSSI return value: (-?\d+)") { Ok(r) => r, Err(e) => { eprintln!("Unable to create regular expression for parsing command output: {}", e); return false; }, }; let mut val_found = false; for mat in re.captures_iter(rssi_output_str) { let val = match mat.get(1) { Some(v) => v, None => continue, }; let val_i32 = match val.as_str().parse::<i32>() { Ok(v) => v, Err(_e) => continue, }; if val_i32 >= threshold { println!("Success: signal strength {} is above or equal to threshold {}", val_i32, threshold); return true; } val_found = true; } if val_found { println!("Device not close enough!"); } else { println!("Device not connected!"); } return false; } #[cfg(test)] mod tests { #[test] fn it_works() { assert_eq!(2 + 2, 4); } }
fn get_device_id_from_users_file(username: &str, filename: &str) -> Option<String> { let meta = match fs::metadata(filename) { Ok(m) => m, Err(e) => { eprintln!("Unable to fetch information for file {}: {}", filename, e); return None; }, }; if meta.st_uid() != 0 || meta.st_gid() != 0 { eprintln!("The file {} is not owned by root:root", filename); return None; } if (meta.st_mode() & 3967) != 256 { eprintln!("The file {} must be readable only by root", filename); return None; } let uname_re = match Regex::new("^\"?([a-zA-Z0-9].*?)\"?$") { Ok(r) => r, Err(e) => { eprintln!("Unable to compile username regular expression: {}", e); return None; }, }; let mat = match uname_re.captures(username) { Some(m) => m, None => { eprintln!("Invalid username format supplied: {}", username); return None; }, }; let uname = match mat.get(1) { Some(u) => u.as_str(), None => { eprintln!("Empty username supplied: {}", username); return None; }, }; let f = match File::open(filename) { Ok(f) => f, Err(e) => { eprintln!("Unable to open file {}: {}", filename, e); return None; }, }; let reader = BufReader::new(f); let bdre = match Regex::new(r"^(?:[0-9A-F]{2})(?::[0-9A-F]{2}){5}$") { Ok(r) => r, Err(e) => { eprintln!("Unable to create BT address regular expression: {}", e); return None; }, }; let mut user_found = false; for line in reader.lines() { let ln_text = match line { Ok(l) => l, Err(_e) => continue, }; let items : Vec<&str> = ln_text.split('\t').collect(); if items.len() < 2 { continue; } if items[0] == uname { user_found = true; if !bdre.is_match(items[1]) { eprintln!("User {} found, but the supplied BT address ({}) is not valid, skipping ...", items[0], items[1]); continue; } return Some(String::from(items[1])); } } if !user_found { eprintln!("User {} not found in {}", uname, filename); } return None; }
function_block-full_function
[ { "content": "#[test]\n\nfn bindgen_test_layout_pam_response() {\n\n assert_eq!(\n\n ::std::mem::size_of::<pam_response>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(pam_response))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<pam_response>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(pam_response))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<pam_response>())).resp as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(pam_response),\n\n \"::\",\n\n stringify!(resp)\n\n )\n", "file_path": "src/pam_appl.rs", "rank": 2, "score": 16541.488982077124 }, { "content": "#[test]\n\nfn bindgen_test_layout_pam_conv() {\n\n assert_eq!(\n\n ::std::mem::size_of::<pam_conv>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(pam_conv))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<pam_conv>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(pam_conv))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<pam_conv>())).conv as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(pam_conv),\n\n \"::\",\n\n stringify!(conv)\n\n )\n", "file_path": "src/pam_appl.rs", "rank": 3, "score": 16541.488982077124 }, { "content": "#[test]\n\nfn bindgen_test_layout_pam_message() {\n\n assert_eq!(\n\n ::std::mem::size_of::<pam_message>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(pam_message))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<pam_message>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(pam_message))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<pam_message>())).msg_style as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(pam_message),\n\n \"::\",\n\n stringify!(msg_style)\n\n )\n", "file_path": "src/pam_appl.rs", "rank": 4, "score": 16541.488982077124 }, { "content": "#[test]\n\nfn bindgen_test_layout_pam_xauth_data() {\n\n assert_eq!(\n\n ::std::mem::size_of::<pam_xauth_data>(),\n\n 32usize,\n\n concat!(\"Size of: \", stringify!(pam_xauth_data))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<pam_xauth_data>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(pam_xauth_data))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<pam_xauth_data>())).namelen as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(pam_xauth_data),\n\n \"::\",\n\n stringify!(namelen)\n\n )\n", "file_path": "src/pam_appl.rs", "rank": 5, "score": 15951.203378705055 }, { "content": "pub const PAM_ERROR_MSG: u32 = 3;\n\npub const PAM_TEXT_INFO: u32 = 4;\n\npub const PAM_RADIO_TYPE: u32 = 5;\n\npub const PAM_BINARY_PROMPT: u32 = 7;\n\npub const PAM_MAX_NUM_MSG: u32 = 32;\n\npub const PAM_MAX_MSG_SIZE: u32 = 512;\n\npub const PAM_MAX_RESP_SIZE: u32 = 512;\n\npub const PAM_AUTHTOK_RECOVER_ERR: u32 = 21;\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct pam_handle {\n\n _unused: [u8; 0],\n\n}\n\npub type pam_handle_t = pam_handle;\n\nextern \"C\" {\n\n pub fn pam_set_item(\n\n pamh: *mut pam_handle_t,\n\n item_type: ::std::os::raw::c_int,\n\n item: *const ::std::os::raw::c_void,\n\n ) -> ::std::os::raw::c_int;\n", "file_path": "src/pam_appl.rs", "rank": 12, "score": 7.444207485294496 }, { "content": "pub const PAM_CRED_EXPIRED: u32 = 16;\n\npub const PAM_CRED_ERR: u32 = 17;\n\npub const PAM_NO_MODULE_DATA: u32 = 18;\n\npub const PAM_CONV_ERR: u32 = 19;\n\npub const PAM_AUTHTOK_ERR: u32 = 20;\n\npub const PAM_AUTHTOK_RECOVERY_ERR: u32 = 21;\n\npub const PAM_AUTHTOK_LOCK_BUSY: u32 = 22;\n\npub const PAM_AUTHTOK_DISABLE_AGING: u32 = 23;\n\npub const PAM_TRY_AGAIN: u32 = 24;\n\npub const PAM_IGNORE: u32 = 25;\n\npub const PAM_ABORT: u32 = 26;\n\npub const PAM_AUTHTOK_EXPIRED: u32 = 27;\n\npub const PAM_MODULE_UNKNOWN: u32 = 28;\n\npub const PAM_BAD_ITEM: u32 = 29;\n\npub const PAM_CONV_AGAIN: u32 = 30;\n\npub const PAM_INCOMPLETE: u32 = 31;\n\npub const _PAM_RETURN_VALUES: u32 = 32;\n\npub const PAM_SILENT: u32 = 32768;\n\npub const PAM_DISALLOW_NULL_AUTHTOK: u32 = 1;\n\npub const PAM_ESTABLISH_CRED: u32 = 2;\n", "file_path": "src/pam_appl.rs", "rank": 13, "score": 7.404103701522591 }, { "content": "/* automatically generated by rust-bindgen 0.56.0 */\n\n\n\npub const __LINUX_PAM__: u32 = 1;\n\npub const __LINUX_PAM_MINOR__: u32 = 0;\n\npub const PAM_SUCCESS: u32 = 0;\n\npub const PAM_OPEN_ERR: u32 = 1;\n\npub const PAM_SYMBOL_ERR: u32 = 2;\n\npub const PAM_SERVICE_ERR: u32 = 3;\n\npub const PAM_SYSTEM_ERR: u32 = 4;\n\npub const PAM_BUF_ERR: u32 = 5;\n\npub const PAM_PERM_DENIED: u32 = 6;\n\npub const PAM_AUTH_ERR: u32 = 7;\n\npub const PAM_CRED_INSUFFICIENT: u32 = 8;\n\npub const PAM_AUTHINFO_UNAVAIL: u32 = 9;\n\npub const PAM_USER_UNKNOWN: u32 = 10;\n\npub const PAM_MAXTRIES: u32 = 11;\n\npub const PAM_NEW_AUTHTOK_REQD: u32 = 12;\n\npub const PAM_ACCT_EXPIRED: u32 = 13;\n\npub const PAM_SESSION_ERR: u32 = 14;\n\npub const PAM_CRED_UNAVAIL: u32 = 15;\n", "file_path": "src/pam_appl.rs", "rank": 14, "score": 7.016435515493441 }, { "content": "pub const PAM_DELETE_CRED: u32 = 4;\n\npub const PAM_REINITIALIZE_CRED: u32 = 8;\n\npub const PAM_REFRESH_CRED: u32 = 16;\n\npub const PAM_CHANGE_EXPIRED_AUTHTOK: u32 = 32;\n\npub const PAM_SERVICE: u32 = 1;\n\npub const PAM_USER: u32 = 2;\n\npub const PAM_TTY: u32 = 3;\n\npub const PAM_RHOST: u32 = 4;\n\npub const PAM_CONV: u32 = 5;\n\npub const PAM_AUTHTOK: u32 = 6;\n\npub const PAM_OLDAUTHTOK: u32 = 7;\n\npub const PAM_RUSER: u32 = 8;\n\npub const PAM_USER_PROMPT: u32 = 9;\n\npub const PAM_FAIL_DELAY: u32 = 10;\n\npub const PAM_XDISPLAY: u32 = 11;\n\npub const PAM_XAUTHDATA: u32 = 12;\n\npub const PAM_AUTHTOK_TYPE: u32 = 13;\n\npub const PAM_DATA_SILENT: u32 = 1073741824;\n\npub const PAM_PROMPT_ECHO_OFF: u32 = 1;\n\npub const PAM_PROMPT_ECHO_ON: u32 = 2;\n", "file_path": "src/pam_appl.rs", "rank": 15, "score": 6.591713599238669 }, { "content": "}\n\nextern \"C\" {\n\n pub fn pam_get_item(\n\n pamh: *const pam_handle_t,\n\n item_type: ::std::os::raw::c_int,\n\n item: *mut *const ::std::os::raw::c_void,\n\n ) -> ::std::os::raw::c_int;\n\n}\n\nextern \"C\" {\n\n pub fn pam_get_user(\n\n pamh: *mut pam_handle_t,\n\n user: *mut *const ::std::os::raw::c_char,\n\n prompt: *const ::std::os::raw::c_char,\n\n ) -> ::std::os::raw::c_int;\n\n}\n\nextern \"C\" {\n\n pub fn pam_strerror(\n\n pamh: *mut pam_handle_t,\n\n errnum: ::std::os::raw::c_int,\n\n ) -> *const ::std::os::raw::c_char;\n", "file_path": "src/pam_appl.rs", "rank": 16, "score": 5.739207616884048 }, { "content": " );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<pam_response>())).resp_retcode as *const _ as usize },\n\n 8usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(pam_response),\n\n \"::\",\n\n stringify!(resp_retcode)\n\n )\n\n );\n\n}\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct pam_conv {\n\n pub conv: ::std::option::Option<\n\n unsafe extern \"C\" fn(\n\n num_msg: ::std::os::raw::c_int,\n\n msg: *mut *const pam_message,\n\n resp: *mut *mut pam_response,\n\n appdata_ptr: *mut ::std::os::raw::c_void,\n\n ) -> ::std::os::raw::c_int,\n\n >,\n\n pub appdata_ptr: *mut ::std::os::raw::c_void,\n\n}\n", "file_path": "src/pam_appl.rs", "rank": 17, "score": 5.461653860698852 }, { "content": "}\n\nextern \"C\" {\n\n pub fn pam_putenv(\n\n pamh: *mut pam_handle_t,\n\n name_value: *const ::std::os::raw::c_char,\n\n ) -> ::std::os::raw::c_int;\n\n}\n\nextern \"C\" {\n\n pub fn pam_getenv(\n\n pamh: *mut pam_handle_t,\n\n name: *const ::std::os::raw::c_char,\n\n ) -> *const ::std::os::raw::c_char;\n\n}\n\nextern \"C\" {\n\n pub fn pam_getenvlist(pamh: *mut pam_handle_t) -> *mut *mut ::std::os::raw::c_char;\n\n}\n\nextern \"C\" {\n\n pub fn pam_fail_delay(\n\n pamh: *mut pam_handle_t,\n\n musec_delay: ::std::os::raw::c_uint,\n\n ) -> ::std::os::raw::c_int;\n\n}\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct pam_message {\n\n pub msg_style: ::std::os::raw::c_int,\n\n pub msg: *const ::std::os::raw::c_char,\n\n}\n\n#[test]\n", "file_path": "src/pam_appl.rs", "rank": 18, "score": 5.187990662209821 }, { "content": "}\n\nextern \"C\" {\n\n pub fn pam_setcred(\n\n pamh: *mut pam_handle_t,\n\n flags: ::std::os::raw::c_int,\n\n ) -> ::std::os::raw::c_int;\n\n}\n\nextern \"C\" {\n\n pub fn pam_acct_mgmt(\n\n pamh: *mut pam_handle_t,\n\n flags: ::std::os::raw::c_int,\n\n ) -> ::std::os::raw::c_int;\n\n}\n\nextern \"C\" {\n\n pub fn pam_open_session(\n\n pamh: *mut pam_handle_t,\n\n flags: ::std::os::raw::c_int,\n\n ) -> ::std::os::raw::c_int;\n\n}\n\nextern \"C\" {\n", "file_path": "src/pam_appl.rs", "rank": 19, "score": 5.082282712805714 }, { "content": "extern \"C\" {\n\n pub fn pam_start_confdir(\n\n service_name: *const ::std::os::raw::c_char,\n\n user: *const ::std::os::raw::c_char,\n\n pam_conversation: *const pam_conv,\n\n confdir: *const ::std::os::raw::c_char,\n\n pamh: *mut *mut pam_handle_t,\n\n ) -> ::std::os::raw::c_int;\n\n}\n\nextern \"C\" {\n\n pub fn pam_end(\n\n pamh: *mut pam_handle_t,\n\n pam_status: ::std::os::raw::c_int,\n\n ) -> ::std::os::raw::c_int;\n\n}\n\nextern \"C\" {\n\n pub fn pam_authenticate(\n\n pamh: *mut pam_handle_t,\n\n flags: ::std::os::raw::c_int,\n\n ) -> ::std::os::raw::c_int;\n", "file_path": "src/pam_appl.rs", "rank": 20, "score": 4.691254983031262 }, { "content": " );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<pam_xauth_data>())).data as *const _ as usize },\n\n 24usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(pam_xauth_data),\n\n \"::\",\n\n stringify!(data)\n\n )\n\n );\n\n}\n\nextern \"C\" {\n\n pub fn pam_start(\n\n service_name: *const ::std::os::raw::c_char,\n\n user: *const ::std::os::raw::c_char,\n\n pam_conversation: *const pam_conv,\n\n pamh: *mut *mut pam_handle_t,\n\n ) -> ::std::os::raw::c_int;\n\n}\n", "file_path": "src/pam_appl.rs", "rank": 21, "score": 4.488457794036958 }, { "content": " pub fn pam_close_session(\n\n pamh: *mut pam_handle_t,\n\n flags: ::std::os::raw::c_int,\n\n ) -> ::std::os::raw::c_int;\n\n}\n\nextern \"C\" {\n\n pub fn pam_chauthtok(\n\n pamh: *mut pam_handle_t,\n\n flags: ::std::os::raw::c_int,\n\n ) -> ::std::os::raw::c_int;\n\n}\n", "file_path": "src/pam_appl.rs", "rank": 22, "score": 4.3949032645890185 }, { "content": " );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<pam_conv>())).appdata_ptr as *const _ as usize },\n\n 8usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(pam_conv),\n\n \"::\",\n\n stringify!(appdata_ptr)\n\n )\n\n );\n\n}\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct pam_xauth_data {\n\n pub namelen: ::std::os::raw::c_int,\n\n pub name: *mut ::std::os::raw::c_char,\n\n pub datalen: ::std::os::raw::c_int,\n\n pub data: *mut ::std::os::raw::c_char,\n\n}\n", "file_path": "src/pam_appl.rs", "rank": 24, "score": 4.230668369678655 }, { "content": " );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<pam_message>())).msg as *const _ as usize },\n\n 8usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(pam_message),\n\n \"::\",\n\n stringify!(msg)\n\n )\n\n );\n\n}\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct pam_response {\n\n pub resp: *mut ::std::os::raw::c_char,\n\n pub resp_retcode: ::std::os::raw::c_int,\n\n}\n", "file_path": "src/pam_appl.rs", "rank": 25, "score": 3.982297681752663 }, { "content": "This is a small Linux PAM module written in Rust, which authenticates a given username by searching for a connected Bluetooth device. Depending on the pam.d configuration this module can be used as a 2FA or as a stand-alone authentication mechanism.\n\n\n\n# Important about security!\n\n\n\nThis module is not quite secure, as it checks for the signal strength of a BT device with a givem BT hardware address. BT hardware addresses can be spoofed, so be careful when using this! My intention to write it, is to stop writing 32 character passwords everytime I need sudo. So it is a good idea to configure PAM to use this module, only if the user is already logged in.\n\n\n\n# How to use?\n\n\n\nObtain the source code:\n\n\n\n $ git clone https://github.com/mihail-milev/pam_blox.git\n\n\n\nCompile the source code:\n\n\n\n $ cargo build --release\n\n\n\nCopy the library to your PAM modules folder, e.g.:\n\n\n\n $ sudo cp target/release/libpam_blox.so /usr/lib64/security/pam_blox.so\n\n\n\nCreate a blox configuration file:\n\n\n\n $ echo -e \"username\\t00:11:22:33:44:55\" | sudo tee /etc/blox_users.conf\n\n\n\nThe format is \"{username}TAB{BT_address}\"\n\n\n\nChange the configuration file's permissions:\n\n\n\n $ sudo chmod a-rwx,u+rw /etc/blox_users.conf\n\n $ sudo chown root:root /etc/blox_users.conf\n\n\n\nModify your PAM configuration accordingly, for example I modified /etc/pam.d/system-auth and added the following line in the beginning:\n\n\n\n auth [success=9 default=ignore] pam_blox.so\n\n\n\nThat's it.\n\n\n\n# TODO\n\n\n\n1. Make it possible to supply arguments, which can set another path for the configuration file, another BT threshold, etc.\n\n2. Do not execute the hcitool, but use a BT library directly.\n", "file_path": "README.md", "rank": 26, "score": 3.4651610620625566 }, { "content": " );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<pam_xauth_data>())).name as *const _ as usize },\n\n 8usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(pam_xauth_data),\n\n \"::\",\n\n stringify!(name)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<pam_xauth_data>())).datalen as *const _ as usize },\n\n 16usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(pam_xauth_data),\n\n \"::\",\n\n stringify!(datalen)\n\n )\n", "file_path": "src/pam_appl.rs", "rank": 28, "score": 1.7581602303891035 } ]
Rust
driver/src/command.rs
rise-lang/2021-CGO-artifact
f6c2c4e916810f734bb1983f93103269e620c931
use std::io::prelude::*; use std::process; use crate::*; pub fn setup(env: &Env) { if env.benchmark { let r = &env.target.remote; if !remote_output(&"mkdir", &vec!["-p", r.dir.to_str().unwrap()], &[], &r.dst, Path::new(".")) .expect("could not create remote directory").status.success() { panic!("could not create remote directory"); } } } pub type HostCommand = std::process::Command; pub struct TargetCommand { program: String, args: Vec<String>, env: Vec<(String, String)> } pub struct UploadCommand<'a> { host_path: &'a Path, remote_path: Option<&'a str>, } pub fn host_run<S: AsRef<ffi::OsStr>>(program: S) -> HostCommand { process::Command::new(program) } pub fn target_run<S: AsRef<str>>(program: S) -> TargetCommand { TargetCommand { program: program.as_ref().to_owned(), args: Vec::new(), env: Vec::new() } } pub fn upload_file<'a>(path: &'a Path) -> UploadCommand<'a> { UploadCommand { host_path: path, remote_path: None } } pub fn upload_file_to<'a>(host: &'a Path, remote: &'a str) -> UploadCommand<'a> { UploadCommand { host_path: host, remote_path: Some(remote) } } pub trait CommandExt { fn prompt(&self, env: &Env) -> ColoredString; fn output(&mut self, env: &Env) -> io::Result<process::Output>; #[must_use] fn log<W: Write>(&mut self, w: &mut W, env: &Env) -> Option<String> { let prompt = self.prompt(env); writeln!(w, "{}", prompt).unwrap(); println!("{}", prompt); self.log_no_prompt(w, env) } #[must_use] fn log_no_println<W: Write>(&mut self, w: &mut W, env: &Env) -> Option<String> { let prompt = self.prompt(env); writeln!(w, "{}", prompt).unwrap(); self.log_no_prompt(w, env) } #[must_use] fn log_no_prompt<W: Write>(&mut self, w: &mut W, env: &Env) -> Option<String> { match self.output(env) { Ok(output) => { if !output.status.success() { let s = format!("{} ({})", "failure".red(), output.status); println!("{}", s); writeln!(w, "{}", s).unwrap(); } let out = String::from_utf8_lossy(&output.stdout).into(); let err = String::from_utf8_lossy(&output.stderr); write!(w, "{}", out).unwrap(); if !err.is_empty() { write!(w, "!: {}", err).unwrap(); } if output.status.success() { Some(out) } else { None } } Err(error) => { let s = format!("{}: {}", "could not run command".red(), error); println!("{}", s); writeln!(w, "{}", s).unwrap(); None } } } } impl CommandExt for HostCommand { fn prompt(&self, _: &Env) -> ColoredString { format!("h> {:?}", self).blue() } fn output(&mut self, _: &Env) -> io::Result<process::Output> { self.output() } } impl CommandExt for TargetCommand { fn prompt(&self, _: &Env) -> ColoredString { format!("t>{:?} {:?}{:?}", FlatDbg(&self.env), self.program, FlatDbg(&self.args)).purple() } fn output(&mut self, env: &Env) -> io::Result<process::Output> { let r = &env.target.remote; remote_output(&self.program, &self.args, &self.env, &r.dst, &r.dir) } } struct FlatDbg<I: IntoIterator + Clone>(I); impl<I: IntoIterator<Item = E> + Clone, E: fmt::Debug> fmt::Debug for FlatDbg<I> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for e in self.0.clone().into_iter() { write!(f, " {:?}", e)?; } Ok(()) } } impl<'a> CommandExt for UploadCommand<'a> { fn prompt(&self, env: &Env) -> ColoredString { let r = &env.target.remote; let remote_path = &match self.remote_path { Some(rp) => r.dir.join(rp), None => r.dir.join(self.host_path) }; format!("u> {:?} --> {:?}", self.host_path, remote_path).purple() } fn output(&mut self, env: &Env) -> io::Result<process::Output> { let r = &env.target.remote; let remote_path = &match self.remote_path { Some(rp) => r.dir.join(rp), None => r.dir.join(self.host_path) }; assert!(remote_output(&"mkdir", &vec!["-p", remote_path.parent().unwrap().to_str().unwrap()], &[], &r.dst, Path::new(".") ).expect("could not create upload directory").status.success()); assert!(remote_output(&"rm", &vec!["-rf", remote_path.to_str().unwrap()], &[], &r.dst, Path::new(".") ).expect("could not clear upload directory").status.success()); let mut cmd = process::Command::new("scp"); cmd.arg("-r") .args(&["-o", "ControlMaster=auto", "-o", "ControlPersist=1m"]) .arg(self.host_path) .arg(format!("scp://{}/{:?}", r.dst, remote_path)); cmd.output() } } fn remote_output<S: AsRef<str>>(program: &S, args: &[S], env: &[(S, S)], dst: &str, dir: &Path) -> io::Result<process::Output> { let mut r = process::Command::new("ssh"); r.args(&["-o", "ControlMaster=auto", "-o", "ControlPersist=1m"]) .arg(format!("ssh://{}", dst)); for (k, v) in env { r.arg("export").arg(format!("{}=\"{}\";", k.as_ref(), v.as_ref())); } let r = r.arg("cd").arg(dir).arg(";") .arg(program.as_ref()) .args(args.iter().map(|a| format!("\"{}\"", a.as_ref()))); r.output() } impl TargetCommand { pub fn arg<S: AsRef<str>>(&mut self, s: S) -> &mut TargetCommand { self.args.push(s.as_ref().to_owned()); self } pub fn args<I, S>(&mut self, i: I) -> &mut TargetCommand where I: IntoIterator<Item = S>, S: AsRef<str> { self.args.extend(i.into_iter().map(|a| a.as_ref().to_owned())); self } pub fn env<K, V>(&mut self, k: K, v: V) -> &mut TargetCommand where K: AsRef<str> , V: AsRef<str> { self.env.push((k.as_ref().to_owned(), v.as_ref().to_owned())); self } pub fn envs<I, K, V>(&mut self, i: I) -> &mut TargetCommand where I: IntoIterator<Item = (K, V)>, K: AsRef<str>, V: AsRef<str> { self.env.extend(i.into_iter().map(|(k, v)| (k.as_ref().to_owned(), v.as_ref().to_owned()))); self } }
use std::io::prelude::*; use std::process; use crate::*; pub fn setup(env: &Env) { if env.benchmark { let r = &env.target.remote; if !remote_output(&"mkdir", &vec!["-p", r.dir.to_str().unwrap()], &[], &r.dst, Path::new(".")) .expect("could not create remote directory").status.success() { panic!("could not create remote directory"); } } } pub type HostCommand = std::process::Command; pub struct TargetCommand { program: String, args: Vec<String>, env: Vec<(String, String)> } pub struct UploadCommand<'a> { host_path: &'a Path, remote_path: Option<&'a str>, } pub fn host_run<S: AsRef<ffi::OsStr>>(program: S) -> HostCommand { process::Command::new(program) } pub fn target_run<S: AsRef<str>>(program: S) -> TargetCommand { TargetCommand { program: program.as_ref().to_owned(), args: Vec::new(), env: Vec::new() } } pub fn upload_file<'a>(path: &'a Path) -> UploadCommand<'a> { UploadCommand { host_path: path, remote_path: None } } pub fn upload_file_to<'a>(host: &'a Path, remote: &'a str) -> UploadCommand<'a> { UploadCommand { host_path: host, remote_path: Some(remote) } } pub trait CommandExt { fn prompt(&self, env: &Env) -> ColoredString; fn output(&mut self, env: &Env) -> io::Result<process::Output>; #[must_use] fn log<W: Write>(&mut self, w: &mut W, env: &Env) -> Option<String> { let prompt = self.prompt(env); writeln!(w, "{}", prompt).unwrap(); println!("{}", prompt); self.log_no_prompt(w, env) } #[must_use] fn log_no_println<W: Write>(&mut self, w: &mut W, env: &Env) -> Option<String> { let prompt = self.prompt(env); writeln!(w, "{}", prompt).unwrap(); self.log_no_prompt(w, env) } #[must_use] fn log_no_prompt<W: Write>(&mut self, w: &mut W, env: &Env) -> Option<String> { match self.output(env) { Ok(output) => { if !output.status.success() { let s = format!("{} ({})", "failure".red(), output.status); println!("{}", s); writeln!(w, "{}", s).unwrap(); } let out = String::from_utf8_lossy(&output.stdout).into(); let err = String::from_utf8_lossy(&output.stderr); write!(w, "{}", out).unwrap(); if !err.is_empty() { write!(w, "!: {}", err).unwrap(); } if output.status.success() { Some(out) } else { None } } Err(error) => { let s = format!("{}: {}", "could not run command".red(), error); println!("{}", s); writeln!(w, "{}", s).unwrap(); None } } } } impl CommandExt for HostCommand { fn prompt(&self, _: &Env) -> ColoredString { format!("h> {:?}", self).blue() } fn output(&mut self, _: &Env) -> io::Result<process::Output> { self.output() } } impl CommandExt for TargetCommand { fn prompt(&self, _: &Env) -> ColoredString { format!("t>{:?} {:?}{:?}", FlatDbg(&self.env), self.program, FlatDbg(&self.args)).purple() } fn output(&mut self, env: &Env) -> io::Result<process::Output> { let r = &env.target.remote; remote_output(&self.program, &self.args, &self.env, &r.dst, &r.dir) } } struct FlatDbg<I: IntoIterator + Clone>(I); impl<I: IntoIterator<Item = E> + Clone, E: fmt::Debug> fmt::Debug for FlatDbg<I> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for e in self.0.clone().into_iter() { write!(f, " {:?}", e)?; } Ok(()) } } impl<'a> CommandExt for UploadCommand<'a> { fn prompt(&self, env: &Env) -> ColoredString { let r = &env.target.remote; let remote_path = &
; format!("u> {:?} --> {:?}", self.host_path, remote_path).purple() } fn output(&mut self, env: &Env) -> io::Result<process::Output> { let r = &env.target.remote; let remote_path = &match self.remote_path { Some(rp) => r.dir.join(rp), None => r.dir.join(self.host_path) }; assert!(remote_output(&"mkdir", &vec!["-p", remote_path.parent().unwrap().to_str().unwrap()], &[], &r.dst, Path::new(".") ).expect("could not create upload directory").status.success()); assert!(remote_output(&"rm", &vec!["-rf", remote_path.to_str().unwrap()], &[], &r.dst, Path::new(".") ).expect("could not clear upload directory").status.success()); let mut cmd = process::Command::new("scp"); cmd.arg("-r") .args(&["-o", "ControlMaster=auto", "-o", "ControlPersist=1m"]) .arg(self.host_path) .arg(format!("scp://{}/{:?}", r.dst, remote_path)); cmd.output() } } fn remote_output<S: AsRef<str>>(program: &S, args: &[S], env: &[(S, S)], dst: &str, dir: &Path) -> io::Result<process::Output> { let mut r = process::Command::new("ssh"); r.args(&["-o", "ControlMaster=auto", "-o", "ControlPersist=1m"]) .arg(format!("ssh://{}", dst)); for (k, v) in env { r.arg("export").arg(format!("{}=\"{}\";", k.as_ref(), v.as_ref())); } let r = r.arg("cd").arg(dir).arg(";") .arg(program.as_ref()) .args(args.iter().map(|a| format!("\"{}\"", a.as_ref()))); r.output() } impl TargetCommand { pub fn arg<S: AsRef<str>>(&mut self, s: S) -> &mut TargetCommand { self.args.push(s.as_ref().to_owned()); self } pub fn args<I, S>(&mut self, i: I) -> &mut TargetCommand where I: IntoIterator<Item = S>, S: AsRef<str> { self.args.extend(i.into_iter().map(|a| a.as_ref().to_owned())); self } pub fn env<K, V>(&mut self, k: K, v: V) -> &mut TargetCommand where K: AsRef<str> , V: AsRef<str> { self.env.push((k.as_ref().to_owned(), v.as_ref().to_owned())); self } pub fn envs<I, K, V>(&mut self, i: I) -> &mut TargetCommand where I: IntoIterator<Item = (K, V)>, K: AsRef<str>, V: AsRef<str> { self.env.extend(i.into_iter().map(|(k, v)| (k.as_ref().to_owned(), v.as_ref().to_owned()))); self } }
match self.remote_path { Some(rp) => r.dir.join(rp), None => r.dir.join(self.host_path) }
if_condition
[ { "content": "pub fn setup<F>(use_env: F) where F: FnOnce(&Env) {\n\n println!(\"{}\", \"-- setting environment up\".yellow());\n\n\n\n let opt = Opt::from_args();\n\n\n\n let target = &Target::load(&opt.target);\n\n let target_name = opt.target.file_stem().unwrap();\n\n let target_name_str = target_name.to_str().unwrap();\n\n let env = Env {\n\n lib: &Path::new(\"lib\"),\n\n results: &Path::new(\"results\").join(target_name),\n\n remote_bin: \"bin\",\n\n target_name: target_name_str,\n\n target,\n\n codegen: opt.codegen,\n\n benchmark: opt.benchmark,\n\n };\n\n\n\n fs::create_dir_all(&env.results)\n\n .expect(\"could not create results directory\");\n\n\n\n command::setup(&env);\n\n\n\n use_env(&env);\n\n}", "file_path": "driver/src/env.rs", "rank": 0, "score": 171915.57149310282 }, { "content": "pub fn halide(env: &Env) {\n\n let ref mut log = codegen_result(\"halide\", env);\n\n\n\n let halide_path = env.lib.join(\"halide\");\n\n host_run(\"make\").arg(\"-j2\")\n\n .current_dir(&halide_path)\n\n .log(log, env).expect(\"could not build Halide\");\n\n\n\n let halide_harris = env.lib.join(\"halide\")\n\n .join(\"apps\").join(\"harris\");\n\n host_run(\"make\").arg(format!(\"bin/{}/harris.a\", env.target.halide))\n\n .current_dir(&halide_harris)\n\n .log(log, env).expect(\"could not build harris\");\n\n host_run(\"make\").arg(format!(\"bin/{}/harris_auto_schedule.a\", env.target.halide))\n\n .current_dir(&halide_harris)\n\n .log(log, env).expect(\"could not build harris\");\n\n host_run(\"make\").arg(format!(\"bin/{}/runtime.a\", env.target.halide))\n\n .current_dir(&halide_harris)\n\n .log(log, env).expect(\"could not build runtime\");\n\n}\n\n\n", "file_path": "driver/src/codegen.rs", "rank": 4, "score": 119725.34777157233 }, { "content": "pub fn harris(env: &Env) {\n\n let (ref mut log, ref mut res) = benchmark_result(&env);\n\n\n\n if let Some(ref cmd) = env.target.before_measuring {\n\n target_run(cmd).log(log, &env)\n\n .expect(\"could not run required command before measuring\");\n\n }\n\n\n\n target_run(\"mkdir\").arg(\"-p\").arg(env.remote_bin)\n\n .log(log, &env).expect(\"could not create remote binary directory\");\n\n upload_file_to(&Path::new(\"driver\").join(\"cpp\"), \"src\")\n\n .log(log, &env).expect(\"could not upload C++ driver sources\");\n\n\n\n let halide_path = env.lib.join(\"halide\");\n\n let polymage_path = env.lib.join(\"polymage\");\n\n\n\n upload_file(&halide_path.join(\"include\").join(\"HalideRuntime.h\"))\n\n .log(log, env).expect(\"could not upload Halide headers\");\n\n upload_file(&halide_path.join(\"include\").join(\"HalideBuffer.h\"))\n\n .log(log, env).expect(\"could not upload Halide headers\");\n", "file_path": "driver/src/benchmark.rs", "rank": 5, "score": 119725.34777157233 }, { "content": "pub fn rise(env: &Env) {\n\n let ref mut log = codegen_result(\"rise\", env);\n\n\n\n host_run(\"./setup.sh\")\n\n .current_dir(env.lib.join(\"shine\"))\n\n .log(log, env).expect(\"could not setup shine repository\");\n\n\n\n let rise_n_shine_path = env.lib.join(\"harris-rise-and-shine\");\n\n fs::create_dir_all(rise_n_shine_path.join(\"gen\")\n\n .join(format!(\"vec{}\", env.target.vector_width)))\n\n .expect(\"could not create Rise codegen directory\");\n\n\n\n let rise_n_shine_path = env.lib.join(\"harris-rise-and-shine\");\n\n host_run(\"sbt\").arg(format!(\"run {}\", env.target.vector_width))\n\n .current_dir(&rise_n_shine_path)\n\n .log(log, env).expect(\"could not rise & shine\");\n\n}\n\n\n", "file_path": "driver/src/codegen.rs", "rank": 7, "score": 119725.34777157233 }, { "content": "pub fn info(env: &Env) {\n\n let f = &mut collection_result(\"info\", \"general information\", env);\n\n\n\n writeln!(f, \"{:?}\", env).unwrap();\n\n let _ = host_run(\"date\").log(f, env);\n\n let _ = host_run(\"hostname\").log(f, env);\n\n let _ = target_run(\"hostname\").log(f, env);\n\n let _ = host_run(\"clang\").arg(\"--version\").log(f, env);\n\n let _ = target_run(&env.target.remote_cc).arg(\"--version\").log(f, env);\n\n let _ = host_run(\"which\").arg(\"sbt\").log(f, env);\n\n let _ = host_run(\"java\").arg(\"-version\").log(f, env);\n\n let _ = host_run(\"R\").arg(\"--version\").log(f, env);\n\n let _ = host_run(\"git\").arg(\"--version\").log(f, env);\n\n let _ = host_run(\"git\").args(&[\"show\", \"-s\", \"--color\"]).log(f, env);\n\n let _ = host_run(\"git\").args(&[\"diff\", \"--color\"]).log(f, env);\n\n}\n\n\n", "file_path": "driver/src/collect.rs", "rank": 8, "score": 119725.34777157233 }, { "content": "pub fn hardware_info(env: &Env) {\n\n let f = &mut collection_result(\"hwinfo\", \"hardware information\", env);\n\n\n\n let _ = target_run(\"lscpu\").log(f, env);\n\n let _ = target_run(\"clinfo\").log(f, env);\n\n}\n\n\n", "file_path": "driver/src/collect.rs", "rank": 9, "score": 117204.60733351519 }, { "content": "fn collection_result(name: &str, desc: &str, env: &Env) -> fs::File {\n\n let path = env.results.join(name);\n\n println!(\"{} -> {}\", format!(\"-- collecting {}\", desc).yellow(), path.to_str().unwrap());\n\n fs::File::create(path).expect(\"could not create collection file\")\n\n}", "file_path": "driver/src/collect.rs", "rank": 12, "score": 108849.92984744179 }, { "content": "fn codegen_result(name: &str, env: &Env) -> fs::File {\n\n let path = env.results.join(format!(\"codegen-{}\", name));\n\n println!(\"{} -> {}.log\", format!(\"-- generating code with {}\", name).yellow(), path.to_str().unwrap());\n\n fs::File::create(path.with_extension(\"log\")).unwrap()\n\n}", "file_path": "driver/src/codegen.rs", "rank": 13, "score": 103414.45774733828 }, { "content": "fn record_result(out: &str, res: &mut fs::File) {\n\n let mut sp = out.split_whitespace();\n\n let err = \"could not record result\";\n\n while let Some(size) = sp.next() {\n\n let generator = sp.next().expect(err);\n\n let variant = sp.next().expect(err);\n\n let med = sp.next().expect(err);\n\n let min = sp.next().expect(err);\n\n let max = sp.next().expect(err);\n\n println!(\"[{}] {:8} {:12}: {:6} median ms [{:6} - {:6}]\", size, generator, variant, med, min, max);\n\n }\n\n write!(res, \"{}\", out).expect(err);\n\n}\n", "file_path": "driver/src/benchmark.rs", "rank": 15, "score": 93951.1577694668 }, { "content": " def genKernel(e: rise.core.Expr, name: String, path: String): Unit = {\n\n val lowered = rewrite.unrollDots(rise.core.types.infer(e))\n\n val kernel = util.gen.OpenCLKernel(lowered, name)\n\n util.writeToPath(path, kernel.code)\n\n }\n\n\n", "file_path": "lib/harris-rise-and-shine/Main.scala", "rank": 16, "score": 83523.3404804614 }, { "content": "fn benchmark_result(env: &Env) -> (fs::File, fs::File) {\n\n let path = env.results.join(\"benchmark\");\n\n println!(\"{} -> {}[.log/.data]\", \"-- benchmarking\".yellow(), path.to_str().unwrap());\n\n (fs::File::create(path.with_extension(\"log\"))\n\n .expect(\"could not create log file\"),\n\n fs::File::create(path.with_extension(\"data\"))\n\n .expect(\"could not create data file\"))\n\n}\n\n\n", "file_path": "driver/src/benchmark.rs", "rank": 17, "score": 72340.1841891171 }, { "content": "#[derive(StructOpt, Debug)]\n\n#[structopt(name = \"harris corner detection experiment\")]\n\nstruct Opt {\n\n /// Target\n\n #[structopt(long, short)]\n\n target: PathBuf,\n\n\n\n /// Codegen?\n\n #[structopt(long)]\n\n codegen: bool,\n\n\n\n /// Benchmark?\n\n #[structopt(long)]\n\n benchmark: bool,\n\n}\n\n\n", "file_path": "driver/src/env.rs", "rank": 19, "score": 67960.40404545657 }, { "content": "struct OCLData : OCLKernelArg {\n\n T value;\n\n OCLData(T t) { value = t; }\n\n cl_int setFor(cl_kernel k, cl_uint index) override {\n\n return clSetKernelArg(k, index, sizeof(T), &value);\n\n }\n\n};\n\n\n\ntemplate <typename T>\n\nstd::shared_ptr<OCLKernelArg> ocl_data(T t) { return std::make_shared<OCLData<T>>(t); }\n\n\n", "file_path": "driver/cpp/ocl.cpp", "rank": 20, "score": 58609.80373248135 }, { "content": "struct OCLLocalMem : OCLKernelArg {\n\n size_t size;\n\n OCLLocalMem(size_t s) { size = s; }\n\n cl_int setFor(cl_kernel k, cl_uint index) override {\n\n return clSetKernelArg(k, index, size, NULL);\n\n }\n\n};\n\n\n\nstd::shared_ptr<OCLKernelArg> ocl_local_mem(size_t size) { return std::make_shared<OCLLocalMem>(size); }\n\n\n\n// shared pointer is to allow initializer lists ...\n\nvoid ocl_set_kernel_args(OCLKernel& k, std::vector<std::shared_ptr<OCLKernelArg>> args) {\n\n for (size_t i = 0; i < args.size(); i++) {\n\n ocl_unwrap(args[i]->setFor(k.inner, i));\n\n }\n\n}\n\n\n\ncl_event ocl_enqueue_kernel(OCLExecutor* ocl, OCLKernel* k,\n\n std::vector<size_t> global_work_size, std::vector<size_t> local_work_size)\n\n{\n\n cl_uint work_dim = global_work_size.size();\n\n if (global_work_size.size() != local_work_size.size()) {\n\n fprintf(stderr, \"the number of dimensions used to specify global and local work sizes differs\\n\");\n\n exit(EXIT_FAILURE);\n\n }\n\n\n\n cl_event e;\n\n ocl_unwrap(clEnqueueNDRangeKernel(ocl->queue, k->inner, work_dim, NULL, global_work_size.data(), local_work_size.data(), 0, NULL, &e));\n\n return e;\n\n}", "file_path": "driver/cpp/ocl.cpp", "rank": 21, "score": 57001.94605926455 }, { "content": " def main(args: Array[String]): Unit = {\n\n val strip = 32\n\n val vWidth = args(0).toInt\n\n val highLevel = rise.core.types.infer(harris(strip, vWidth))\n\n // genKernel(rewrite.harrisBufferedSplitPar(strip)(highLevel),\n\n // \"harris\", \"gen/harrisB3SPRW.cl\")\n\n // genKernel(harrisBufferedVecUnaligned(3, vWidth), \"harris\", \"gen/harrisBVU.cl\")\n\n // genKernel(harrisBufferedVecAligned(3, vWidth), \"harris\", \"gen/harrisBVA.cl\")\n\n // genKernel(harrisSplitPar(strip, vWidth, harrisBufferedVecUnaligned(3, vWidth)),\n\n // \"harris\", \"gen/harrisB3VUSP.cl\")\n\n // genKernel(harrisSplitPar(strip, vWidth, harrisBufferedVecUnaligned(4, vWidth)),\n\n // \"harris\", \"gen/harrisB4VUSP.cl\")\n\n // genKernel(rewrite.harrisBufferedVecUnalignedSplitPar(vWidth, strip)(highLevel),\n\n // \"harris\", \"gen/harrisB3VUSPRW.cl\")\n\n //genKernel(harrisSplitPar(strip, vWidth, harrisBufferedVecAligned(3, vWidth)),\n\n // \"harris\", \"gen/harrisB3VASP.cl\")\n\n //genKernel(harrisSplitPar(strip, vWidth, harrisBufferedVecAligned(4, vWidth)),\n\n // \"harris\", \"gen/harrisB4VASP.cl\")\n\n genKernel(rewrite.harrisBufferedVecAlignedSplitPar(vWidth, strip)(highLevel),\n\n \"harris\", s\"gen/vec${vWidth}/cbuf.cl\")\n\n //genKernel(harrisSplitPar(strip, vWidth, harrisBufferedRegRotVecAligned(3, vWidth)),\n\n // \"harris\", \"gen/harrisB3VASPRR.cl\")\n\n //genKernel(harrisSplitPar(strip, vWidth, harrisBufferedRegRotVecAligned(4, vWidth)),\n\n // \"harris\", \"gen/harrisB4VASPRR.cl\")\n\n genKernel(rewrite.harrisBufferedRegRotVecAlignedSplitPar(vWidth, strip)(highLevel),\n\n \"harris\", s\"gen/vec${vWidth}/cbuf+rrot.cl\")\n\n }\n\n}", "file_path": "lib/harris-rise-and-shine/Main.scala", "rank": 22, "score": 53404.74312137578 }, { "content": "struct OCLKernel {\n\n cl_program program;\n\n cl_kernel inner;\n\n};\n\n\n\nvoid ocl_load_kernel(const char* name, const char* path, OCLExecutor* ocl, OCLKernel* k) {\n\n FILE* f = fopen(path, \"rb\");\n\n if (!f) {\n\n fprintf(stderr, \"could not open source\\n\");\n\n exit(EXIT_FAILURE);\n\n }\n\n fseek(f, 0, SEEK_END);\n\n size_t length = ftell(f);\n\n rewind(f);\n\n char* source = (char*) malloc(length * sizeof(char));\n\n if (fread(source, sizeof(char), length, f) != length) {\n\n fprintf(stderr, \"could not read source\\n\");\n\n exit(EXIT_FAILURE);\n\n }\n\n fclose(f);\n", "file_path": "driver/cpp/ocl.cpp", "rank": 23, "score": 42737.037366185294 }, { "content": "struct ClockT {\n\n using type = std::chrono::high_resolution_clock;\n\n};\n\n\n\n// ...otherwise use steady_clock.\n\ntemplate <>\n", "file_path": "driver/cpp/time.hpp", "rank": 24, "score": 42737.037366185294 }, { "content": "struct TimeStats {\n\n double min_ms;\n\n double max_ms;\n\n double median_ms;\n\n};\n\n\n\nTimeStats time_stats(std::vector<double>& samples) {\n\n auto to_ms = [&](double ms) { return ms; };\n\n std::sort(samples.begin(), samples.end());\n\n return {\n\n .min_ms = to_ms(samples.front()),\n\n .max_ms = to_ms(samples.back()),\n\n .median_ms = to_ms(samples[samples.size() / 2])\n\n };\n\n}\n\n\n\n#ifdef CL_TARGET_OPENCL_VERSION\n\nTimeStats ocl_time_stats(std::vector<cl_ulong>& samples) {\n\n auto to_ms = [&](cl_ulong nanoseconds) {\n\n double ms = (double)(nanoseconds) * 1e-6;\n\n return ms;\n\n };\n\n std::sort(samples.begin(), samples.end());\n\n return {\n\n .min_ms = to_ms(samples.front()),\n\n .max_ms = to_ms(samples.back()),\n\n .median_ms = to_ms(samples[samples.size() / 2])\n\n };\n\n}\n\n#endif", "file_path": "driver/cpp/stats.hpp", "rank": 25, "score": 42737.037366185294 }, { "content": "struct OCLExecutor {\n\n cl_platform_id platform;\n\n cl_device_id device;\n\n cl_context context;\n\n cl_command_queue queue;\n\n};\n\n\n\ncl_platform_id find_platform(cl_uint platform_count, const cl_platform_id* platform_ids, const char* subname) {\n\n for (cl_uint i = 0; i < platform_count; i++) {\n\n char name_buf[512];\n\n size_t name_size;\n\n ocl_unwrap(clGetPlatformInfo(platform_ids[i], CL_PLATFORM_NAME, sizeof(name_buf), name_buf, &name_size));\n\n if (name_size > sizeof(name_buf)) {\n\n fprintf(stderr, \"did not expect such a long OpenCL platform name (%zu)\\n\", name_size);\n\n name_buf[511] = '\\0';\n\n }\n\n\n\n if (strstr(name_buf, subname) != NULL) {\n\n fprintf(stderr, \"using OpenCL platform '%s'\\n\", name_buf);\n\n return platform_ids[i];\n", "file_path": "driver/cpp/ocl.cpp", "rank": 26, "score": 42737.037366185294 }, { "content": "fn main() {\n\n env::setup(|env| {\n\n if env.codegen {\n\n codegen::halide(env);\n\n codegen::rise(env);\n\n }\n\n\n\n if env.benchmark {\n\n collect::info(&env);\n\n collect::hardware_info(&env);\n\n\n\n benchmark::harris(&env);\n\n }\n\n });\n\n}", "file_path": "driver/src/main.rs", "rank": 27, "score": 41795.08617042219 }, { "content": "struct LiftContext {\n\n OCLKernel kernels[HARRIS_KERNELS];\n\n\n\n cl_mem input;\n\n cl_mem output;\n\n cl_mem bufs[HARRIS_BUFS];\n\n};\n\n\n\nvoid init_lift_context(OCLExecutor* ocl, LiftContext* ctx, size_t h, size_t w) {\n\n for (int i = 0; i < HARRIS_KERNELS; i++) {\n\n fprintf(stderr, \"loading kernel %s\\n\", HARRIS_KERNEL_SOURCES[i]);\n\n ocl_load_kernel(HARRIS_KERNEL_NAMES[i], HARRIS_KERNEL_SOURCES[i], ocl, &ctx->kernels[i]);\n\n }\n\n\n\n // rounding up output lines to 32\n\n size_t ho = (((h - 4) + 31) / 32) * 32;\n\n size_t hi = ho + 4;\n\n\n\n cl_int ocl_err;\n\n ctx->input = clCreateBuffer(ocl->context, CL_MEM_READ_ONLY | CL_MEM_HOST_WRITE_ONLY,\n", "file_path": "driver/cpp/lift_harris.cpp", "rank": 28, "score": 41705.908559174815 }, { "content": "struct ShineContext {\n\n OCLKernel harris[SHINE_VERSIONS];\n\n\n\n cl_mem input;\n\n cl_mem output;\n\n cl_mem cbuf1;\n\n cl_mem cbuf2;\n\n cl_mem cbuf3;\n\n};\n\n\n\nvoid init_context(OCLExecutor* ocl, ShineContext* ctx, size_t h, size_t w) {\n\n for (int i = 0; i < SHINE_VERSIONS; i++) {\n\n fprintf(stderr, \"loading kernel %s\\n\", SHINE_SOURCES[i]);\n\n ocl_load_kernel(\"harris\", SHINE_SOURCES[i], ocl, &ctx->harris[i]);\n\n }\n\n\n\n // rounding up output lines to 32\n\n size_t ho = (((h - 4) + 31) / 32) * 32;\n\n size_t hi = ho + 4;\n\n\n", "file_path": "driver/cpp/shine_harris.cpp", "rank": 29, "score": 41705.908559174815 }, { "content": "struct ClockT<false> {\n\n using type = std::chrono::steady_clock;\n\n};\n\n\n\nusing Clock = ClockT<>::type;", "file_path": "driver/cpp/time.hpp", "rank": 30, "score": 39880.16397597371 }, { "content": "use crate::*;\n\n\n\nuse structopt::StructOpt;\n\n\n\n#[derive(Debug)]\n\npub struct Env<'a> {\n\n pub lib: &'a Path,\n\n pub results: &'a Path,\n\n pub remote_bin: &'a str,\n\n pub target_name: &'a str,\n\n pub target: &'a Target,\n\n pub codegen: bool,\n\n pub benchmark: bool,\n\n}\n\n\n\n#[derive(StructOpt, Debug)]\n\n#[structopt(name = \"harris corner detection experiment\")]\n", "file_path": "driver/src/env.rs", "rank": 31, "score": 25353.881271366135 }, { "content": "class OCLKernelArg {\n\n public:\n\n virtual cl_int setFor(cl_kernel k, cl_uint index) = 0;\n\n};\n\n\n\ntemplate <typename T>\n", "file_path": "driver/cpp/ocl.cpp", "rank": 32, "score": 22377.49602879957 }, { "content": " .log(log, env).is_none()\n\n {\n\n return;\n\n }\n\n\n\n let (envs, device_type_str) = match env.target.kind {\n\n TargetKind::GPU => (vec![\n\n (\"HL_OCL_DEVICE_TYPE\", \"gpu\"),\n\n (\"HL_OCL_PLATFORM_NAME\", &env.target.ocl_platform_name)\n\n ], \"gpu\"),\n\n TargetKind::CPU => (vec![], \"cpu\")\n\n };\n\n let output1 = if let Some(ref cpu_a) = env.target.cpu_affinity {\n\n target_run(\"taskset\").arg(\"-c\").arg(cpu_a).arg(bin)\n\n .arg(\"lib/halide/apps/images/rgb.png\")\n\n .arg(&env.target.ocl_platform_name).arg(device_type_str).arg(\"30\")\n\n .arg(\"harris.png\")\n\n .envs(envs.iter().cloned())\n\n .log(log, env)\n\n .expect(\"benchmark run failed\")\n", "file_path": "driver/src/benchmark.rs", "rank": 36, "score": 17.067460277116247 }, { "content": "use serde::{Serialize, Deserialize};\n\nuse serde_yaml;\n\nuse crate::*;\n\n\n\nimpl Target {\n\n pub fn load<P: AsRef<Path>>(path: P) -> Target {\n\n let f = fs::File::open(path.as_ref())\n\n .expect(\"could not open target file\");\n\n let r = io::BufReader::new(f);\n\n serde_yaml::from_reader(r)\n\n .expect(\"could not read target file\")\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct Target {\n\n /// Run the experiment on this remote target (ssh destination)\n\n pub remote: Remote,\n\n\n\n /// C/C++ compiler on the remote target\n", "file_path": "driver/src/target.rs", "rank": 37, "score": 15.853515223731467 }, { "content": " } else {\n\n target_run(bin)\n\n .arg(\"lib/halide/apps/images/rgb.png\")\n\n .arg(&env.target.ocl_platform_name).arg(device_type_str).arg(\"30\")\n\n .arg(\"harris.png\")\n\n .log(log, env)\n\n .expect(\"benchmark run failed\")\n\n };\n\n record_result(&output1, res);\n\n let output2 = if let Some(ref cpu_a) = env.target.cpu_affinity {\n\n target_run(\"taskset\").arg(\"-c\").arg(cpu_a).arg(bin)\n\n .arg(\"lib/polymage/images/venice_wikimedia.jpg\")\n\n .arg(&env.target.ocl_platform_name).arg(device_type_str).arg(\"30\")\n\n .arg(\"venice_harris.jpg\")\n\n .envs(envs.iter().cloned())\n\n .log(log, env)\n\n .expect(\"benchmark run failed\")\n\n } else {\n\n target_run(bin)\n\n .arg(\"lib/polymage/images/venice_wikimedia.jpg\")\n", "file_path": "driver/src/benchmark.rs", "rank": 39, "score": 14.405956216842238 }, { "content": "\n\n /// OpenCL platform name substring\n\n #[serde(rename = \"ocl-platform-name\")]\n\n pub ocl_platform_name: String,\n\n\n\n /// Halide target string\n\n pub halide: String,\n\n\n\n /// OpenCV headers directory\n\n #[serde(rename = \"opencv-headers\")]\n\n pub opencv_headers: String,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\n#[serde(from = \"String\", into = \"String\")]\n\npub struct Remote {\n\n pub dst: String,\n\n pub dir: PathBuf,\n\n}\n\n\n", "file_path": "driver/src/target.rs", "rank": 40, "score": 13.777931942246061 }, { "content": "impl From<String> for Remote {\n\n fn from(dst: String) -> Remote {\n\n Remote { dst, dir: PathBuf::from(\"2021-CGO-experiment\") }\n\n }\n\n}\n\n\n\nimpl From<Remote> for String {\n\n fn from(r: Remote) -> String {\n\n r.dst\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub enum TargetKind {\n\n #[serde(rename = \"cpu\")]\n\n CPU,\n\n #[serde(rename = \"gpu\")]\n\n GPU,\n\n}\n", "file_path": "driver/src/target.rs", "rank": 41, "score": 13.459371261886727 }, { "content": " .arg(&env.target.ocl_platform_name).arg(device_type_str).arg(\"30\")\n\n .arg(\"venice_harris.jpg\")\n\n .log(log, env)\n\n .expect(\"benchmark run failed\")\n\n };\n\n record_result(&output2, res);\n\n\n\n if let Some(ref cmd) = env.target.after_measuring {\n\n target_run(cmd).log(log, &env)\n\n .expect(\"could not run required command after measuring\");\n\n }\n\n}\n\n\n", "file_path": "driver/src/benchmark.rs", "rank": 42, "score": 12.836125615975742 }, { "content": "pub use std::{fs, io, fmt, ffi};\n\npub use io::Write;\n\npub use std::path::{Path, PathBuf};\n\npub use colored::*;\n\npub use command::*;\n\npub use target::*;\n\npub use env::Env;\n\n\n\nmod command;\n\nmod target;\n\nmod env;\n\nmod collect;\n\nmod benchmark;\n\nmod codegen;\n\n\n", "file_path": "driver/src/main.rs", "rank": 43, "score": 12.663710460331258 }, { "content": " #[serde(rename = \"remote-cc\")]\n\n pub remote_cc: String,\n\n\n\n /// Target processor kind, CPU or GPU?\n\n pub kind: TargetKind,\n\n\n\n /// The size of vectors that should be used\n\n #[serde(rename = \"vector-width\")]\n\n pub vector_width: u16,\n\n\n\n /// Command to run on the target before measuring\n\n #[serde(rename = \"before-measuring\")]\n\n pub before_measuring: Option<String>,\n\n /// Command to run on the target after measuring\n\n #[serde(rename = \"after-measuring\")]\n\n pub after_measuring: Option<String>,\n\n\n\n /// CPU affinity for the benchmark\n\n #[serde(rename = \"cpu-affinity\")]\n\n pub cpu_affinity: Option<String>,\n", "file_path": "driver/src/target.rs", "rank": 44, "score": 12.175287302623454 }, { "content": " cl_kernel kernel = clCreateKernel(program, name, &err);\n\n ocl_unwrap(err);\n\n\n\n k->program = program;\n\n k->inner = kernel;\n\n}\n\n\n\nvoid ocl_release_kernel(OCLKernel* k) {\n\n ocl_error(clReleaseKernel(k->inner));\n\n ocl_error(clReleaseProgram(k->program));\n\n}\n\n\n\nvoid ocl_create_compute_buffer(OCLExecutor* ocl, size_t byte_size, cl_mem* buffer) {\n\n cl_int ocl_err;\n\n *buffer = clCreateBuffer(ocl->context, CL_MEM_READ_WRITE | CL_MEM_HOST_NO_ACCESS,\n\n byte_size, NULL, &ocl_err);\n\n ocl_unwrap(ocl_err);\n\n}\n\n\n\nvoid ocl_release_mem(cl_mem m) {\n\n ocl_error(clReleaseMemObject(m));\n\n}\n\n\n", "file_path": "driver/cpp/ocl.cpp", "rank": 45, "score": 11.799261907320997 }, { "content": "\n\n cl_int err;\n\n const char* sources[] = { source };\n\n const size_t lengths[] = { length };\n\n cl_program program = clCreateProgramWithSource(ocl->context, 1, sources, lengths, &err);\n\n free(source);\n\n ocl_unwrap(err);\n\n\n\n // 2.0: -cl-uniform-work-group-size\n\n const char* options = \"-cl-fast-relaxed-math -Werror -cl-std=CL1.2\";\n\n if (ocl_error(clBuildProgram(program, 1, &ocl->device, options, NULL, NULL))) {\n\n size_t log_size;\n\n ocl_unwrap(clGetProgramBuildInfo(program, ocl->device, CL_PROGRAM_BUILD_LOG, 0, NULL, &log_size));\n\n char* log_string = (char*) malloc(log_size * sizeof(char));\n\n ocl_unwrap(clGetProgramBuildInfo(program, ocl->device, CL_PROGRAM_BUILD_LOG, log_size, log_string, NULL));\n\n fprintf(stderr, \"%s\\n\", log_string);\n\n free(log_string);\n\n exit(EXIT_FAILURE);\n\n }\n\n\n", "file_path": "driver/cpp/ocl.cpp", "rank": 46, "score": 11.413505347915013 }, { "content": " variant(CL_DEVICE_NOT_FOUND)\n\n variant(CL_DEVICE_NOT_AVAILABLE)\n\n variant(CL_COMPILER_NOT_AVAILABLE)\n\n variant(CL_MEM_OBJECT_ALLOCATION_FAILURE)\n\n variant(CL_OUT_OF_RESOURCES)\n\n variant(CL_OUT_OF_HOST_MEMORY)\n\n variant(CL_PROFILING_INFO_NOT_AVAILABLE)\n\n variant(CL_MEM_COPY_OVERLAP)\n\n variant(CL_IMAGE_FORMAT_MISMATCH)\n\n variant(CL_IMAGE_FORMAT_NOT_SUPPORTED)\n\n variant(CL_BUILD_PROGRAM_FAILURE)\n\n variant(CL_MAP_FAILURE)\n\n variant(CL_MISALIGNED_SUB_BUFFER_OFFSET)\n\n variant(CL_EXEC_STATUS_ERROR_FOR_EVENTS_IN_WAIT_LIST)\n\n variant(CL_COMPILE_PROGRAM_FAILURE)\n\n variant(CL_LINKER_NOT_AVAILABLE)\n\n variant(CL_LINK_PROGRAM_FAILURE)\n\n variant(CL_DEVICE_PARTITION_FAILED)\n\n variant(CL_KERNEL_ARG_INFO_NOT_AVAILABLE)\n\n variant(CL_INVALID_VALUE)\n", "file_path": "driver/cpp/ocl.cpp", "rank": 47, "score": 10.642531544236485 }, { "content": " }\n\n }\n\n\n\n fprintf(stderr, \"did not find any OpenCL platform with subname '%s'\\n\", subname);\n\n exit(EXIT_FAILURE);\n\n}\n\n\n\nvoid ocl_init(OCLExecutor* ocl, const char* platform_subname, const char* device_type_str) {\n\n cl_device_type device_type;\n\n if (strcmp(device_type_str, \"cpu\") == 0) {\n\n device_type = CL_DEVICE_TYPE_CPU;\n\n } else if (strcmp(device_type_str, \"gpu\") == 0) {\n\n device_type = CL_DEVICE_TYPE_GPU;\n\n } else {\n\n fprintf(stderr, \"unexpected device type string: %s\\n\", device_type_str);\n\n exit(EXIT_FAILURE);\n\n }\n\n\n\n const cl_uint platform_entries = 256;\n\n cl_platform_id platform_ids[platform_entries];\n", "file_path": "driver/cpp/ocl.cpp", "rank": 48, "score": 9.835220333980258 }, { "content": "### Original Odroid N2 configuration\n\n\n\n- [`cortex-a53.yaml`](cortex-a53.yaml)\n\n- [`cortex-a73.yaml`](cortex-a73.yaml)\n\n\n\nSimilarly to the XU4, we set the CPU frequency using the scripts in [`scripts/odroid-n2/`](scripts/odroid-n2/).\n\nYou can allow password-less `sudo` by adding the line `odroid ALL =(ALL) NOPASSWD: /home/odroid/perf_on_a53, /home/odroid/perf_on_a73, /home/odroid/perf_off` to `/etc/sudoers`.\n\n\n\nThe following software was used:\n\n- clang 10 from LLVM 10. Built from source.\n\n- [POCL](portablecl.org) 1.5 OpenCL implementation for the CPUs. Built from source along with LLVM 10.\n\n- [OpenCV](https://opencv.org/) 4.3.0 built from source with the following flags:\n\n```\n\ncmake -D CMAKE_BUILD_TYPE=RELEASE \\\n\n -D CMAKE_INSTALL_PREFIX=/usr/local \\\n\n -D ENABLE_NEON=ON \\\n\n -D WITH_OPENCL=ON \\\n\n -D WITH_JASPER=OFF \\\n\n -D BUILD_TESTS=OFF \\\n\n -D INSTALL_PYTHON_EXAMPLES=OFF \\\n\n -D BUILD_EXAMPLES=OFF ..\n\n```\n\n\n\n## 5. Running benchmarks\n\n\n\nRunning `./benchmark -t $TARGET.yaml` on the host will:\n\n- create a `2021-CGO-experiment` folder in the home directory of the remote user, where the necessary files will be automatically uploaded.\n\n- benchmark the performance of the Harris operator using OpenCV, Halide, Rise and Lift implementations; checking output correctness\n\n - for the small image `lib/halide/apps/images/rgb.png`\n\n - for the big image `lib/polymage/images/venice_wikimedia.jpg`\n\n- record the benchmark results on the host in `results/$TARGET/benchmark.data`.\n\n\n\nAt this point SSH access to a properly configured target is required (see target configuration section).\n\nBenchmarking takes roughly between 2 and 10mn depending on the target.\n\n\n\n## 6. Plotting the figures\n\n\n\nIf you could not run the benchmarks on all the processors used in the paper,\n\nyou will still be able to plot the figures using our own benchmark data, which is included in this artifact.\n\n\n\nFirst, either create or symlink `lib/Rlibs`, where R libraries will be fetched and stored:\n\n```sh\n\n# use a fresh directory\n\nmkdir lib/Rlibs\n", "file_path": "README.md", "rank": 49, "score": 9.398125746557007 }, { "content": " .log(log, env).expect(\"could not upload Lift OpenCL kernels\");\n\n\n\n let bin = &format!(\"{}/harris\", env.remote_bin);\n\n if target_run(&env.target.remote_cc)\n\n .arg(\"src/harris.cpp\")\n\n .arg(\"halide-gen/harris.a\")\n\n .arg(\"halide-gen/harris_auto_schedule.a\")\n\n .arg(\"halide-gen/runtime.a\")\n\n .arg(\"-I\").arg(\"src\")\n\n .arg(\"-I\").arg(\"halide-gen\")\n\n .arg(\"-I\").arg(\"lib/halide/include\")\n\n .arg(\"-I\").arg(\"lib/halide/tools\")\n\n .arg(\"-I\").arg(&env.target.opencv_headers)\n\n .arg(\"-no-pie\")\n\n .arg(\"-fdiagnostics-color\")\n\n .arg(\"-O2\").arg(\"-lstdc++\").arg(\"-std=c++14\")\n\n .arg(\"-lm\").arg(\"-lpthread\").arg(\"-ldl\").arg(\"-lpng\").arg(\"-ljpeg\")\n\n .arg(\"-lOpenCL\").arg(\"-fopenmp\")\n\n .arg(\"-lopencv_core\").arg(\"-lopencv_imgproc\")\n\n .arg(\"-o\").arg(bin)\n", "file_path": "driver/src/benchmark.rs", "rank": 50, "score": 9.32777813557458 }, { "content": "# or use an existing directory to avoid duplication\n\nln -s ~/.rlibs lib/Rlibs\n\n# alternatively do neither to use system libraries (requires sudo)\n\n```\n\n\n\nRunning `./plot-figures` on the host will generate:\n\n- `results/figure1.pdf`, some visual details are different from the paper figure because it was edited using Inkscape.\n\n- `results/figure8.pdf`\n\n\n\n\n\n## Looking at the logs\n\n\n\nYou can use `cat` or `less -R` on the logs in a `results/$TARGET` directory:\n\n\n\n- `info`: general system information\n\n- `hwinfo`: target hardware information\n\n- ..\n\n\n\nYou can also use `tail -f` to watch a log.\n\n\n\n## Artifact directories\n\n\n\n- [`driver`](driver) contains Rust and C/C++ code to run the benchmarks\n\n- [`lib`](lib) contains various library dependencies, in particular:\n\n - [`halide`](lib/halide) contains the Halide language and compiler\n\n - [`shine`](lib/shine) contains the Rise language and its Shine compiler\n\n- [`lift-gen`](lift-gen) contains the Lift-generated OpenCL kernels\n\n- [`plot`](plot) contains the R plotting scripts\n\n- [`results`](results) contains the benchmark logs and results\n\n- [`scripts`](scripts) contains various useful scripts\n\n\n\n## Authors\n\n\n\n- Thomas Koehler, University of Glasgow ([[email protected]](mailto:[email protected]))\n\n- Michel Steuwer, University of Edinburgh ([[email protected]](mailto:[email protected]))\n", "file_path": "README.md", "rank": 51, "score": 8.351104324303503 }, { "content": "## 1. Installing host dependencies\n\n\n\nWe provide a docker image for convenience, which you can download, build and run:\n\n```sh\n\nwget https://raw.githubusercontent.com/rise-lang/2021-CGO-artifact/main/Dockerfile\n\nsudo systemctl start docker.service\n\ndocker build . -t cgo21-rise\n\ndocker run --net=host -it cgo21-rise\n\n```\n\n\n\nAlternatively, install the following required software:\n\n- git, ssh, scp, POSIX shell\n\n- zlib\n\n- [rust](https://rust-lang.org) 1.4+\n\n- sbt 1.x, java 1.8 to 1.11 SDK\n\n- llvm 8 to 10\n\n- make\n\n- to plot figures:\n\n - R 3.6 to 4.0\n\n - DejaVu Sans font\n\n\n\n## 2. Cloning the repository\n\n\n\nTo install the artifact on the host (potentially from the provided docker container):\n\n\n\n```sh\n\ngit clone --recursive https://github.com/rise-lang/2021-CGO-artifact.git\n\ncd 2021-CGO-artifact\n\n```\n\n\n\n## 3. Generating code\n\n\n\nRunning `./codegen -t $TARGET.yaml` on the host will generate Halide binaries in `lib/halide/apps/harris/bin/` and Rise kernels in `lib/harris-rise-and-shine/gen/`.\n\nThe generated code is affected by the `halide` target string and `vector-width` specified in the `$TARGET.yaml` configuration file.\n\nSSH access to a properly configured target is not required at this point (everything happens on the host).\n\nBuilding Halide and Rise can take some time on the first run, after that code generation should take within a minute.\n\n\n", "file_path": "README.md", "rank": 52, "score": 7.933425011690848 }, { "content": "bool ocl_error(cl_int error) {\n\n if (error != CL_SUCCESS) {\n\n fprintf(stderr, \"%s\\n\", ocl_error_to_string(error));\n\n return true;\n\n }\n\n return false;\n\n}\n\n\n\nvoid ocl_unwrap(cl_int error) {\n\n if (ocl_error(error)) { exit(EXIT_FAILURE); }\n\n}\n\n\n", "file_path": "driver/cpp/ocl.cpp", "rank": 53, "score": 7.8002657366202275 }, { "content": "## 4. Configuring targets\n\n\n\nThis artifact includes configuration files used for the paper (`.yaml` files at the root).\n\nYou will need to tweak them according to your setup (e.g. change the ssh destination in the `remote` field).\n\nYou can create custom configuration files to generate code and run benchmarks on any other OpenCL-enabled target, but expect different performance behaviour.\n\nSee [`intel-i7-7700.yaml`](intel-i7-7700.yaml) for an example of Intel CPU target configuration.\n\n\n\n**You need ssh access to the remote target without password prompt ([setup ssh keys](https://www.digitalocean.com/community/tutorials/how-to-set-up-ssh-keys-2)).**\n\n\n\nThe following software is required to run benchmarks on a target:\n\n- POSIX shell\n\n- libpng and libjpeg\n\n- OpenCL 1.2+ (recommended: check your setup with `clinfo`)\n\n- a C/C++ compiler with C++14 support\n\n- OpenCV 4.3\n\n\n\n### Original Odroid XU4 configuration\n\n\n\n- [`cortex-a7.yaml`](cortex-a7.yaml)\n\n- [`cortex-a15.yaml`](cortex-a15.yaml)\n\n\n\nWhen benchmarking, we set the CPU frequency using the scripts in [`scripts/odroid-xu4/`](scripts/odroid-xu4/).\n\nThe above configuration files expect to find these scripts in the `~` directory\n\nof the target, and will run them with password-less `sudo`.\n\nYou can allow password-less `sudo` by adding the line `odroid ALL =(ALL) NOPASSWD: /home/odroid/perf_on_a15, /home/odroid/perf_on_a7, /home/odroid/perf_off` to `/etc/sudoers`.\n\nAlternatively, you can run these scripts manually before and after running the benchmarks.\n\n\n\nThe following software was used:\n\n- clang 8 from LLVM 8. Built from source.\n\n- [POCL](portablecl.org) 1.3 OpenCL implementation for the CPUs. Built from source along with LLVM 8.\n\n- [OpenCV](https://opencv.org/) 4.3.0 built from source with the following flags:\n\n```\n\ncmake -D CMAKE_BUILD_TYPE=RELEASE \\\n\n -D CMAKE_INSTALL_PREFIX=/usr/local \\\n\n -D ENABLE_NEON=ON \\\n\n -D ENABLE_VFPV3=ON \\\n\n -D WITH_OPENCL=ON \\\n\n -D WITH_JASPER=OFF \\\n\n -D BUILD_TESTS=OFF \\\n\n -D INSTALL_PYTHON_EXAMPLES=OFF \\\n\n -D BUILD_EXAMPLES=OFF ..\n\n```\n\n\n", "file_path": "README.md", "rank": 54, "score": 7.508067688028053 }, { "content": " CL_CONTEXT_PLATFORM, (cl_context_properties)(platform_id),\n\n 0\n\n };\n\n\n\n cl_int err;\n\n cl_context ctx = clCreateContext(ctx_props, 1, &device_id, NULL, NULL, &err);\n\n ocl_unwrap(err);\n\n\n\n // 2.0: clCreateCommandQueueWithProperties\n\n cl_command_queue queue = clCreateCommandQueue(ctx, device_id, CL_QUEUE_PROFILING_ENABLE, &err);\n\n ocl_unwrap(err);\n\n\n\n ocl->platform = platform_id;\n\n ocl->device = device_id;\n\n ocl->context = ctx;\n\n ocl->queue = queue;\n\n}\n\n\n\nvoid ocl_release(OCLExecutor* ocl) {\n\n ocl_error(clReleaseCommandQueue(ocl->queue));\n\n ocl_error(clReleaseContext(ocl->context));\n\n}\n\n\n", "file_path": "driver/cpp/ocl.cpp", "rank": 55, "score": 7.254633984204666 }, { "content": " cl_int ocl_err;\n\n ctx->input = clCreateBuffer(ocl->context, CL_MEM_READ_ONLY | CL_MEM_HOST_WRITE_ONLY,\n\n 3 * hi * w * sizeof(float), NULL, &ocl_err);\n\n ocl_unwrap(ocl_err);\n\n ctx->output = clCreateBuffer(ocl->context, CL_MEM_WRITE_ONLY | CL_MEM_HOST_READ_ONLY,\n\n ho * w * sizeof(float), NULL, &ocl_err);\n\n ocl_unwrap(ocl_err);\n\n\n\n size_t max_threads = ho / 32;\n\n size_t max_cbuf_size = 4 * (w + 8) * sizeof(float);\n\n ocl_create_compute_buffer(ocl, max_threads * max_cbuf_size, &ctx->cbuf1);\n\n ocl_create_compute_buffer(ocl, max_threads * max_cbuf_size, &ctx->cbuf2);\n\n ocl_create_compute_buffer(ocl, max_threads * max_cbuf_size, &ctx->cbuf3);\n\n}\n\n\n\nvoid destroy_context(OCLExecutor* ocl, ShineContext* ctx) {\n\n for (int i = 0; i < SHINE_VERSIONS; i++) {\n\n ocl_release_kernel(&ctx->harris[i]);\n\n }\n\n\n", "file_path": "driver/cpp/shine_harris.cpp", "rank": 56, "score": 7.0191392934431605 }, { "content": " variant(CL_INVALID_DEVICE_TYPE)\n\n variant(CL_INVALID_PLATFORM)\n\n variant(CL_INVALID_DEVICE)\n\n variant(CL_INVALID_CONTEXT)\n\n variant(CL_INVALID_QUEUE_PROPERTIES)\n\n variant(CL_INVALID_COMMAND_QUEUE)\n\n variant(CL_INVALID_HOST_PTR)\n\n variant(CL_INVALID_MEM_OBJECT)\n\n variant(CL_INVALID_IMAGE_DESCRIPTOR)\n\n variant(CL_INVALID_IMAGE_SIZE)\n\n variant(CL_INVALID_SAMPLER)\n\n variant(CL_INVALID_BINARY)\n\n variant(CL_INVALID_BUILD_OPTIONS)\n\n variant(CL_INVALID_PROGRAM)\n\n variant(CL_INVALID_PROGRAM_EXECUTABLE)\n\n variant(CL_INVALID_KERNEL_NAME)\n\n variant(CL_INVALID_KERNEL_DEFINITION)\n\n variant(CL_INVALID_KERNEL)\n\n variant(CL_INVALID_ARG_INDEX)\n\n variant(CL_INVALID_ARG_VALUE)\n", "file_path": "driver/cpp/ocl.cpp", "rank": 57, "score": 6.316943967082604 }, { "content": "#include <cstdlib>\n\n#include <cstdio>\n\n#include <cstdint>\n\n#include <cstring>\n\n#include <memory>\n\n\n\n#include <vector>\n\n\n\n#define CL_TARGET_OPENCL_VERSION 120\n\n#define CL_USE_DEPRECATED_OPENCL_1_2_APIS\n\n#ifdef __APPLE__\n\n #include \"OpenCL/opencl.h\"\n\n#else\n\n #include \"CL/cl.h\"\n\n#endif\n\n\n\nconst char* ocl_error_to_string(cl_int error) {\n\n switch (error) {\n\n#define variant(x) case x: return #x;\n\n variant(CL_SUCCESS)\n", "file_path": "driver/cpp/ocl.cpp", "rank": 58, "score": 6.283153554278487 }, { "content": " upload_file(&halide_path.join(\"tools\").join(\"halide_image_io.h\"))\n\n .log(log, env).expect(\"could not upload Halide headers\");\n\n upload_file(&halide_path.join(\"apps\").join(\"images\").join(\"rgb.png\"))\n\n .log(log, env).expect(\"could not upload Halide images\");\n\n\n\n upload_file(&polymage_path.join(\"images\").join(\"venice_wikimedia.jpg\"))\n\n .log(log, env).expect(\"could not upload Polymage images\");\n\n\n\n let halide_harris = halide_path.join(\"apps\").join(\"harris\");\n\n upload_file_to(&halide_harris.join(\"bin\").join(&env.target.halide),\n\n \"halide-gen\")\n\n .log(log, env).expect(\"could not upload harris files\");\n\n\n\n let rise_n_shine_path = env.lib.join(\"harris-rise-and-shine\");\n\n let gen_path = rise_n_shine_path.join(\"gen\")\n\n .join(format!(\"vec{}\", env.target.vector_width));\n\n upload_file_to(&gen_path, \"shine-gen\")\n\n .log(log, env).expect(\"could not upload Rise OpenCL kernels\");\n\n\n\n upload_file(Path::new(\"lift-gen\"))\n", "file_path": "driver/src/benchmark.rs", "rank": 60, "score": 6.063452234787749 }, { "content": " 3 * hi * w * sizeof(float), NULL, &ocl_err);\n\n ocl_unwrap(ocl_err);\n\n ctx->output = clCreateBuffer(ocl->context, CL_MEM_WRITE_ONLY | CL_MEM_HOST_READ_ONLY,\n\n ho * w * sizeof(float), NULL, &ocl_err);\n\n ocl_unwrap(ocl_err);\n\n\n\n size_t max_buf_size = hi * w * sizeof(float);\n\n for (int i = 0; i < HARRIS_BUFS; i++) {\n\n ocl_create_compute_buffer(ocl, max_buf_size, &ctx->bufs[i]);\n\n }\n\n}\n\n\n\nvoid destroy_lift_context(OCLExecutor* ocl, LiftContext* ctx) {\n\n for (int i = 0; i < HARRIS_KERNELS; i++) {\n\n ocl_release_kernel(&ctx->kernels[i]);\n\n }\n\n\n\n ocl_release_mem(ctx->input);\n\n ocl_release_mem(ctx->output);\n\n for (int i = 0; i < HARRIS_BUFS; i++) {\n", "file_path": "driver/cpp/lift_harris.cpp", "rank": 61, "score": 6.017214612810452 }, { "content": " const char* output_path = argv[5];\n\n\n\n Buffer<float> output1(input.width() - 4, input.height() - 4);\n\n output1.set_min(2, 2);\n\n std::vector<double> sample_vec;\n\n\n\n for (int i = 0; i < timing_iterations; i++) {\n\n auto start = Clock::now();\n\n int h_error = harris(input, output1);\n\n output1.device_sync();\n\n auto stop = Clock::now();\n\n\n\n if (h_error) {\n\n fprintf(stderr, \"halide returned an error: %d\\n\", h_error);\n\n exit(EXIT_FAILURE);\n\n }\n\n\n\n sample_vec.push_back(std::chrono::duration<double, std::milli>(stop - start).count());\n\n }\n\n\n", "file_path": "driver/cpp/harris.cpp", "rank": 62, "score": 6.003798742954915 }, { "content": "use crate::*;\n\n\n", "file_path": "driver/src/codegen.rs", "rank": 63, "score": 5.99825063639801 }, { "content": "use crate::*;\n\n\n", "file_path": "driver/src/benchmark.rs", "rank": 64, "score": 5.99825063639801 }, { "content": "use crate::*;\n\n\n", "file_path": "driver/src/collect.rs", "rank": 65, "score": 5.99825063639801 }, { "content": "# Artifact benchmarking the Harris operator on mobile CPUs for CGO 2021\n\n\n\nThis repository presents the artifact to supplement the paper\n\n*Towards a Domain Extensible Compiler: optimizing an image processing pipeline on mobile CPUs*\n\nto be presented at the [International Symposium on Code Generation and Optimization](https://conf.researchr.org/home/cgo-2021)\n\nin 2021.\n\n\n\nThis artifact contains the source code used to produce the performance results presented in the paper.\n\nThe host computer drives benchmarks on multiple target processors over ssh.\n\nWe recommend using an X86 Linux machine for the host, and Linux targets.\n\nTo fully reproduce the results reported in Figures 1 and 8, you will need access to ARM Cortex A7, A15, A53 and A73 processors\n\n(we used Odroid XU4 and Odroid N2 boards for the paper).\n\nOther OpenCL-enabled processors can be used, but expect different performance behavior.\n\n\n\nIf you are an artifact evaluator, we are working on providing you access to our own Odroid XU4 and Odroid N2 boards for convenience and will get back to you with access instructions.\n\n\n\n## Reproducing the paper results\n\n\n\nFollow these steps to reproduce the paper results:\n\n1. Install host dependencies\n\n2. Clone this repository on the host\n\n3. Use the Halide and Rise compilers to generate binaries and OpenCL kernels for each target (e.g. cortex-a7, cortex-a15, cortex-a53, cortex-a73)\n\n4. Configure each target\n\n5. Reproduce the performance results by running benchmarks for each target\n\n6. Plot figure 1 and 8\n\n\n\nExcluding dependency installation and target configuration, these steps should be feasible in one or two hours.\n\nThe following sections provide more details for every step.\n\n\n", "file_path": "README.md", "rank": 66, "score": 5.973693907166645 }, { "content": " // output2.device_sync();\n\n // auto stop = Clock::now();\n\n\n\n if (h_error) {\n\n fprintf(stderr, \"halide returned an error: %d\\n\", h_error);\n\n exit(EXIT_FAILURE);\n\n }\n\n\n\n // sample_vec.push_back(std::chrono::duration<double, std::milli>(stop - start).count());\n\n }\n\n\n\n output2.copy_to_host();\n\n\n\n // TimeStats t_stats2 = time_stats(sample_vec);\n\n // printf(\"%dx%d Halide auto %.2lf %.2lf %.2lf\\n\", input.dim(0).extent(), input.dim(1).extent(), t_stats2.median_ms, t_stats2.min_ms, t_stats2.max_ms);\n\n\n\n error_stats(output1.data(), output2.data(), output1.height() * output1.width(), 0.01, 100);\n\n\n\n ////\n\n\n", "file_path": "driver/cpp/harris.cpp", "rank": 67, "score": 5.5813423965210776 }, { "content": " output1.copy_to_host();\n\n fprintf(stderr, \"output: %s\\n\", output_path);\n\n convert_and_save_image(output1, output_path);\n\n for (int i = 0; i < output1.dimensions(); i++) {\n\n fprintf(stderr, \" %d from %d by %d\\n\", output1.dim(i).extent(), output1.dim(i).min(), output1.dim(i).stride());\n\n }\n\n\n\n TimeStats t_stats1 = time_stats(sample_vec);\n\n printf(\"%dx%d Halide halide %.2lf %.2lf %.2lf\\n\", input.dim(0).extent(), input.dim(1).extent(), t_stats1.median_ms, t_stats1.min_ms, t_stats1.max_ms);\n\n\n\n ////\n\n\n\n Buffer<float> output2(output1.width(), output1.height());\n\n output2.set_min(2, 2);\n\n sample_vec.clear();\n\n\n\n {\n\n // for (int i = 0; i < timing_iterations; i++) {\n\n // auto start = Clock::now();\n\n int h_error = harris_auto_schedule(input, output2);\n", "file_path": "driver/cpp/harris.cpp", "rank": 68, "score": 4.606976866367976 }, { "content": " sample_vec.clear();\n\n\n\n // rounding up output lines to 32\n\n size_t h = input.height();\n\n size_t w = input.width();\n\n size_t ho = (((h - 4) + 31) / 32) * 32;\n\n size_t hi = ho + 4;\n\n\n\n output2.fill(0);\n\n sample_vec.clear();\n\n cv::ocl::setUseOpenCL(false);\n\n\n\n cv::Mat cv_in(h, w, CV_32FC3);\n\n cv::Mat cv_gray(h, w, CV_32F);\n\n cv::Mat cv_out(h, w, CV_32F);\n\n\n\n for (int c = 0; c < 3; c++) {\n\n for (int y = 0; y < input.height(); y++) {\n\n for (int x = 0; x < input.width(); x++) {\n\n int i = ((c * input.height() + y) * input.width()) + x;\n", "file_path": "driver/cpp/harris.cpp", "rank": 69, "score": 4.006045041461002 }, { "content": "\n\n double mse = square_sum / n;\n\n double range = (double)(max_g) - (double)(min_g);\n\n double psnr = 10.0 * log10((range * range) / mse);\n\n min /= range;\n\n max /= range;\n\n\n\n fprintf(stderr, \"error stats: [%.3lf - %.3lf]*(%.4lf) with %.2lf PSNR\\n\", min, max, range, psnr);\n\n if (max > tolerated_per_pixel || psnr < required_psnr) {\n\n fprintf(stderr, \"maximum tolerated error of %.4f per pixel, and minimum PSNR of %.2f\\n\",\n\n tolerated_per_pixel, required_psnr);\n\n exit(EXIT_FAILURE);\n\n }\n\n}\n\n\n", "file_path": "driver/cpp/stats.hpp", "rank": 70, "score": 3.9000774612802207 }, { "content": " variant(CL_INVALID_ARG_SIZE)\n\n variant(CL_INVALID_KERNEL_ARGS)\n\n variant(CL_INVALID_WORK_DIMENSION)\n\n variant(CL_INVALID_WORK_GROUP_SIZE)\n\n variant(CL_INVALID_WORK_ITEM_SIZE)\n\n variant(CL_INVALID_GLOBAL_OFFSET)\n\n variant(CL_INVALID_EVENT_WAIT_LIST)\n\n variant(CL_INVALID_EVENT)\n\n variant(CL_INVALID_OPERATION)\n\n variant(CL_INVALID_BUFFER_SIZE)\n\n variant(CL_INVALID_GLOBAL_WORK_SIZE)\n\n variant(CL_INVALID_PROPERTY)\n\n variant(CL_INVALID_COMPILER_OPTIONS)\n\n variant(CL_INVALID_LINKER_OPTIONS)\n\n variant(CL_INVALID_DEVICE_PARTITION_COUNT)\n\n#undef variant\n\n default: return \"UNKNOWN CL ERROR\";\n\n }\n\n}\n\n\n", "file_path": "driver/cpp/ocl.cpp", "rank": 71, "score": 3.2795638265937628 }, { "content": "\n\n std::vector<size_t> local_work_size = { 1 };\n\n std::vector<size_t> global_work_size = { ho / 32 };\n\n\n\n size_t origin[3] = { 0 };\n\n {\n\n size_t region[3] = { w * sizeof(float), h, 3 };\n\n size_t buffer_slice_pitch = hi * w * sizeof(float);\n\n size_t host_slice_pitch = h * w * sizeof(float);\n\n ocl_unwrap(clEnqueueWriteBufferRect(ocl->queue, ctx->input, false,\n\n origin, origin, region,\n\n 0, buffer_slice_pitch, 0, host_slice_pitch,\n\n input, 0, NULL, NULL));\n\n }\n\n\n\n // gray\n\n ocl_set_kernel_args(ctx->kernels[0], {\n\n ocl_data<cl_mem>(ctx->bufs[0]), ocl_data<cl_int>(ho), ocl_data<cl_int>(w), ocl_data<cl_mem>(ctx->input)\n\n });\n\n cl_event first_event = ocl_enqueue_kernel(ocl, &ctx->kernels[0], global_work_size, local_work_size);\n", "file_path": "driver/cpp/lift_harris.cpp", "rank": 72, "score": 2.972824100678607 }, { "content": "\n\n#include <opencv2/imgproc.hpp>\n\n#include <opencv2/core/ocl.hpp>\n\n\n\nint main(int argc, char **argv) {\n\n if (argc != 6) {\n\n fprintf(stderr, \"usage: %s rgba.png platform_subname device_type timing_iterations output.png\\n\", argv[0]);\n\n return EXIT_FAILURE;\n\n }\n\n\n\n fprintf(stderr, \"input: %s\\n\", argv[1]);\n\n Buffer<float> input = load_and_convert_image(argv[1]);\n\n for (int i = 0; i < input.dimensions(); i++) {\n\n fprintf(stderr, \" %d from %d by %d\\n\", input.dim(i).extent(), input.dim(i).min(), input.dim(i).stride());\n\n }\n\n\n\n OCLExecutor ocl;\n\n ocl_init(&ocl, argv[2], argv[3]);\n\n\n\n int timing_iterations = atoi(argv[4]);\n", "file_path": "driver/cpp/harris.cpp", "rank": 73, "score": 2.9578462454095757 }, { "content": " ocl_set_kernel_args(ctx->kernels[4], {\n\n ocl_data<cl_mem>(ctx->bufs[8]), ocl_data<cl_int>(ho), ocl_data<cl_int>(w), ocl_data<cl_mem>(ctx->bufs[5])\n\n });\n\n ocl_enqueue_kernel(ocl, &ctx->kernels[4], global_work_size, local_work_size);\n\n\n\n // coarsity\n\n ocl_set_kernel_args(ctx->kernels[5], {\n\n ocl_data<cl_mem>(ctx->output), ocl_data<cl_int>(ho), ocl_data<cl_int>(w), ocl_data<cl_mem>(ctx->bufs[6]), ocl_data<cl_mem>(ctx->bufs[7]), ocl_data<cl_mem>(ctx->bufs[8])\n\n });\n\n cl_event last_event = ocl_enqueue_kernel(ocl, &ctx->kernels[5], global_work_size, local_work_size);\n\n\n\n\n\n {\n\n size_t region[3] = { (w-4) * sizeof(float), (h-4), 1 };\n\n size_t buffer_row_pitch = w * sizeof(float);\n\n size_t host_row_pitch = (w-4) * sizeof(float);\n\n ocl_unwrap(clEnqueueReadBufferRect(ocl->queue, ctx->output, false,\n\n origin, origin, region,\n\n buffer_row_pitch, 0, host_row_pitch, 0,\n\n output, 0, NULL, NULL));\n\n }\n\n ocl_unwrap(clFinish(ocl->queue));\n\n\n\n return std::make_pair(first_event, last_event);\n\n}", "file_path": "driver/cpp/lift_harris.cpp", "rank": 74, "score": 2.954051309595844 }, { "content": "This artifact consists of software components which have their own licenses:\n\n- Rise & Shine in the folder `lib/shine`\n\n- Halide in the folder `lib/halide`\n\n\n\nThe remaining files are licensed under the MIT license shown below.\n\n\n\n----\n\n\n\nMIT License\n\n\n\nCopyright (c) 2020 Thomas Koehler, Michel Steuwer\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n", "file_path": "LICENSE.md", "rank": 75, "score": 2.9007274641663527 }, { "content": " // rounding up output lines to 32\n\n size_t ho = (((h - 4) + 31) / 32) * 32;\n\n size_t hi = ho + 4;\n\n\n\n std::vector<size_t> local_work_size = { 1 };\n\n std::vector<size_t> global_work_size = { 1 };\n\n //if (version >= 2) {\n\n global_work_size = { ho / 32 };\n\n //}\n\n\n\n ocl_set_kernel_args(ctx->harris[version], {\n\n ocl_data<cl_mem>(ctx->output), ocl_data<cl_int>(ho), ocl_data<cl_int>(w), ocl_data<cl_mem>(ctx->input),\n\n ocl_data<cl_mem>(ctx->cbuf3), ocl_data<cl_mem>(ctx->cbuf2), ocl_data<cl_mem>(ctx->cbuf1)\n\n });\n\n\n\n size_t origin[3] = { 0 };\n\n {\n\n size_t region[3] = { w * sizeof(float), h, 3 };\n\n size_t buffer_slice_pitch = hi * w * sizeof(float);\n\n size_t host_slice_pitch = h * w * sizeof(float);\n", "file_path": "driver/cpp/shine_harris.cpp", "rank": 76, "score": 2.495267736697935 }, { "content": " cl_uint platform_count = 0;\n\n ocl_unwrap(clGetPlatformIDs(platform_entries, platform_ids, &platform_count));\n\n if (platform_count > platform_entries) {\n\n fprintf(stderr, \"did not expected that many OpenCL platforms (%u)\\n\", platform_count);\n\n platform_count = platform_entries;\n\n }\n\n\n\n cl_platform_id platform_id = find_platform(platform_count, platform_ids, platform_subname);\n\n\n\n const cl_uint device_entries = 1;\n\n cl_uint device_count = 0;\n\n cl_device_id device_id;\n\n ocl_unwrap(clGetDeviceIDs(platform_id, device_type, device_entries, &device_id, &device_count));\n\n if (device_count == 0) {\n\n fprintf(stderr, \"did not find any OpenCL device\\n\");\n\n exit(EXIT_FAILURE);\n\n }\n\n // fprintf(stderr, \"OpenCL device: %u\\n\", device_index);\n\n\n\n const cl_context_properties ctx_props[] = {\n", "file_path": "driver/cpp/ocl.cpp", "rank": 77, "score": 2.444023837989168 }, { "content": "#include <cstdlib>\n\n#include <cstdio>\n\n#include <vector>\n\n\n\n#include \"ocl.cpp\"\n\n#include \"shine_harris.cpp\"\n\n#include \"lift_harris.cpp\"\n\n// #include \"harrisNeon.hpp\"\n\n\n\n#include \"time.hpp\"\n\n#include \"stats.hpp\"\n\n\n\n#include \"harris.h\"\n\n#include \"harris_auto_schedule.h\"\n\n\n\n#include \"HalideBuffer.h\"\n\n#include \"halide_image_io.h\"\n\n\n\nusing namespace Halide::Runtime;\n\nusing namespace Halide::Tools;\n", "file_path": "driver/cpp/harris.cpp", "rank": 78, "score": 2.3922740786700305 }, { "content": " ocl_unwrap(clEnqueueWriteBufferRect(ocl->queue, ctx->input, false,\n\n origin, origin, region,\n\n 0, buffer_slice_pitch, 0, host_slice_pitch,\n\n input, 0, NULL, NULL));\n\n }\n\n cl_event ev = ocl_enqueue_kernel(ocl, &ctx->harris[version], global_work_size, local_work_size);\n\n {\n\n size_t region[3] = { (w-4) * sizeof(float), (h-4), 1 };\n\n size_t buffer_row_pitch = w * sizeof(float);\n\n size_t host_row_pitch = (w-4) * sizeof(float);\n\n ocl_unwrap(clEnqueueReadBufferRect(ocl->queue, ctx->output, false,\n\n origin, origin, region,\n\n buffer_row_pitch, 0, host_row_pitch, 0,\n\n output, 0, NULL, NULL));\n\n }\n\n ocl_unwrap(clFinish(ocl->queue));\n\n return ev;\n\n}", "file_path": "driver/cpp/shine_harris.cpp", "rank": 79, "score": 2.3060367188186346 }, { "content": "\n\n // sobelX\n\n ocl_set_kernel_args(ctx->kernels[1], {\n\n ocl_data<cl_mem>(ctx->bufs[1]), ocl_data<cl_int>(ho), ocl_data<cl_int>(w), ocl_data<cl_mem>(ctx->bufs[0])\n\n });\n\n ocl_enqueue_kernel(ocl, &ctx->kernels[1], global_work_size, local_work_size);\n\n\n\n // sobelY\n\n ocl_set_kernel_args(ctx->kernels[2], {\n\n ocl_data<cl_mem>(ctx->bufs[2]), ocl_data<cl_int>(ho), ocl_data<cl_int>(w), ocl_data<cl_mem>(ctx->bufs[0])\n\n });\n\n ocl_enqueue_kernel(ocl, &ctx->kernels[2], global_work_size, local_work_size);\n\n\n\n // mul\n\n ocl_set_kernel_args(ctx->kernels[3], {\n\n ocl_data<cl_mem>(ctx->bufs[3]), ocl_data<cl_int>(ho), ocl_data<cl_int>(w), ocl_data<cl_mem>(ctx->bufs[1]), ocl_data<cl_mem>(ctx->bufs[1])\n\n });\n\n ocl_enqueue_kernel(ocl, &ctx->kernels[3], global_work_size, local_work_size);\n\n\n\n ocl_set_kernel_args(ctx->kernels[3], {\n", "file_path": "driver/cpp/lift_harris.cpp", "rank": 80, "score": 2.2399526847256728 }, { "content": " ocl_data<cl_mem>(ctx->bufs[4]), ocl_data<cl_int>(ho), ocl_data<cl_int>(w), ocl_data<cl_mem>(ctx->bufs[1]), ocl_data<cl_mem>(ctx->bufs[2])\n\n });\n\n ocl_enqueue_kernel(ocl, &ctx->kernels[3], global_work_size, local_work_size);\n\n\n\n ocl_set_kernel_args(ctx->kernels[3], {\n\n ocl_data<cl_mem>(ctx->bufs[5]), ocl_data<cl_int>(ho), ocl_data<cl_int>(w), ocl_data<cl_mem>(ctx->bufs[2]), ocl_data<cl_mem>(ctx->bufs[2])\n\n });\n\n ocl_enqueue_kernel(ocl, &ctx->kernels[3], global_work_size, local_work_size);\n\n\n\n // sum3x3\n\n ocl_set_kernel_args(ctx->kernels[4], {\n\n ocl_data<cl_mem>(ctx->bufs[6]), ocl_data<cl_int>(ho), ocl_data<cl_int>(w), ocl_data<cl_mem>(ctx->bufs[3])\n\n });\n\n ocl_enqueue_kernel(ocl, &ctx->kernels[4], global_work_size, local_work_size);\n\n\n\n ocl_set_kernel_args(ctx->kernels[4], {\n\n ocl_data<cl_mem>(ctx->bufs[7]), ocl_data<cl_int>(ho), ocl_data<cl_int>(w), ocl_data<cl_mem>(ctx->bufs[4])\n\n });\n\n ocl_enqueue_kernel(ocl, &ctx->kernels[4], global_work_size, local_work_size);\n\n\n", "file_path": "driver/cpp/lift_harris.cpp", "rank": 81, "score": 1.7817980358262187 }, { "content": "#include <cmath>\n\n#include <algorithm>\n\n#include <vector>\n\n\n\nvoid error_stats(float* gold, float* other, size_t n, double tolerated_per_pixel, double required_psnr) {\n\n double square_sum = 0.f;\n\n double min = 1.f / 0.f;\n\n double max = 0.f;\n\n float min_g = 1.f / 0.f;\n\n float max_g = -min_g;\n\n\n\n for (int i = 0; i < n; i++) {\n\n min_g = std::min(min_g, gold[i]);\n\n max_g = std::max(max_g, gold[i]);\n\n double delta = (double)(gold[i]) - (double)(other[i]);\n\n double d_abs = std::abs(delta);\n\n min = std::min(min, d_abs);\n\n max = std::max(max, d_abs);\n\n square_sum += d_abs * d_abs;\n\n }\n", "file_path": "driver/cpp/stats.hpp", "rank": 82, "score": 1.4681290953510886 }, { "content": " TimeStats t_stats3 = ocl_time_stats(ocl_sample_vec);\n\n printf(\"%dx%d Rise %s %.2lf %.2lf %.2lf\\n\", input.dim(0).extent(), input.dim(1).extent(), SHINE_NAMES[version],\n\n t_stats3.median_ms, t_stats3.min_ms, t_stats3.max_ms);\n\n error_stats(output1.data(), output2.data(), output1.height() * output1.width(), 0.01, 100);\n\n }\n\n\n\n destroy_context(&ocl, &ctx);\n\n\n\n ////\n\n\n\n ocl_sample_vec.clear();\n\n // wipe out any previous results for correctness check\n\n output2.fill(0);\n\n\n\n LiftContext lift_ctx;\n\n init_lift_context(&ocl, &lift_ctx, input.height(), input.width());\n\n\n\n for (int i = 0; i < timing_iterations; i++) {\n\n auto events = lift_harris(&ocl, &lift_ctx,\n\n output2.data(), input.height(), input.width(), input.data());\n", "file_path": "driver/cpp/harris.cpp", "rank": 83, "score": 1.247694330366787 }, { "content": "\n\n cl_ulong start, stop;\n\n ocl_unwrap(clGetEventProfilingInfo(events.first, CL_PROFILING_COMMAND_START, sizeof(cl_ulong), &start, NULL));\n\n ocl_unwrap(clGetEventProfilingInfo(events.second, CL_PROFILING_COMMAND_END, sizeof(cl_ulong), &stop, NULL));\n\n ocl_sample_vec.push_back(stop - start);\n\n }\n\n\n\n TimeStats t_stats3 = ocl_time_stats(ocl_sample_vec);\n\n printf(\"%dx%d Lift lift %.2lf %.2lf %.2lf\\n\", input.dim(0).extent(), input.dim(1).extent(),\n\n t_stats3.median_ms, t_stats3.min_ms, t_stats3.max_ms);\n\n error_stats(output1.data(), output2.data(), output1.height() * output1.width(), 0.01, 100);\n\n\n\n destroy_lift_context(&ocl, &lift_ctx);\n\n\n\n ////\n\n\n\n ocl_release(&ocl);\n\n\n\n ////\n\n output2.fill(0);\n", "file_path": "driver/cpp/harris.cpp", "rank": 84, "score": 1.1784564063070708 }, { "content": " cv_in.at<cv::Vec3f>(y, x)[c] = input.data()[i];\n\n }\n\n }\n\n }\n\n\n\n for (int i = 0; i < timing_iterations; i++) {\n\n auto start = Clock::now();\n\n cv::cvtColor(cv_in, cv_gray, cv::COLOR_RGB2GRAY);\n\n cv::cornerHarris(cv_gray, cv_out, 3, 3, 0.04, cv::BORDER_ISOLATED);\n\n auto stop = Clock::now();\n\n\n\n sample_vec.push_back(std::chrono::duration<double, std::milli>(stop - start).count());\n\n }\n\n\n\n for (int y = 0; y < output2.height(); y++) {\n\n for (int x = 0; x < output2.width(); x++) {\n\n int i = (y * output2.width()) + x;\n\n output2.data()[i] = cv_out.at<float>(y + 2, x + 2);\n\n }\n\n }\n\n\n\n TimeStats t_stats5 = time_stats(sample_vec);\n\n printf(\"%dx%d OpenCV opencv %.2lf %.2lf %.2lf\\n\", input.dim(0).extent(), input.dim(1).extent(), t_stats5.median_ms, t_stats5.min_ms, t_stats5.max_ms);\n\n\n\n error_stats(output1.data(), output2.data(), output1.height() * output1.width(), 0.01, 100);\n\n\n\n return EXIT_SUCCESS;\n\n}\n", "file_path": "driver/cpp/harris.cpp", "rank": 85, "score": 1.157053728782441 } ]
Rust
mm0-rs/src/parser/ast.rs
mattsse/mm0
247fd3e2ac65eec7d5317285bb2fee639b17a63f
use std::sync::Arc; use std::fmt::{self, Display}; use num::BigUint; use crate::lined_string::LinedString; use crate::util::{Span, ArcString}; use crate::elab::lisp::print::{EnvDisplay, FormatEnv}; use super::ParseError; bitflags! { pub struct Modifiers: u8 { const PURE = 1; const STRICT = 2; const PROVABLE = 4; const FREE = 8; const PUB = 16; const ABSTRACT = 32; const LOCAL = 64; } } impl Modifiers { pub const NONE: Modifiers = Self::empty(); pub fn sort_data() -> Modifiers { Modifiers::PURE | Modifiers::STRICT | Modifiers::PROVABLE | Modifiers::FREE } pub fn allowed_visibility(self, k: DeclKind) -> bool { match k { DeclKind::Term => self.is_empty(), DeclKind::Axiom => self.is_empty(), DeclKind::Def => self == Modifiers::ABSTRACT || self == Modifiers::LOCAL || self.is_empty(), DeclKind::Thm => self == Modifiers::PUB || self.is_empty(), } } pub fn from_name(s: &str) -> Option<Modifiers> { match s { "pure" => Some(Modifiers::PURE), "strict" => Some(Modifiers::STRICT), "provable" => Some(Modifiers::PROVABLE), "free" => Some(Modifiers::FREE), "pub" => Some(Modifiers::PUB), "abstract" => Some(Modifiers::ABSTRACT), "local" => Some(Modifiers::LOCAL), _ => None } } } impl Display for Modifiers { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.contains(Modifiers::PURE) {write!(f, "pure ")?} if self.contains(Modifiers::STRICT) {write!(f, "strict ")?} if self.contains(Modifiers::PROVABLE) {write!(f, "provable ")?} if self.contains(Modifiers::FREE) {write!(f, "free ")?} if self.contains(Modifiers::PUB) {write!(f, "pub ")?} if self.contains(Modifiers::ABSTRACT) {write!(f, "abstract ")?} if self.contains(Modifiers::LOCAL) {write!(f, "local ")?} Ok(()) } } #[derive(Clone)] pub enum Delimiter { Both(Box<[u8]>), LeftRight(Box<[u8]>, Box<[u8]>), } #[derive(Copy, Clone, Debug)] pub struct Formula(pub Span); impl Formula { pub fn inner(&self) -> Span { (self.0.start + 1 .. self.0.end - 1).into() } } #[derive(Clone)] pub struct Const { pub fmla: Formula, pub trim: Span } #[derive(Clone, Copy, PartialEq, Eq)] pub enum DeclKind { Term, Axiom, Thm, Def } #[derive(Clone, Copy, PartialEq, Eq, Debug)] pub enum LocalKind { Bound, Reg, Dummy, Anon } impl LocalKind { pub fn is_bound(self) -> bool { match self { LocalKind::Bound | LocalKind::Dummy => true, LocalKind::Reg | LocalKind::Anon => false, } } } #[derive(Clone, Debug)] pub struct DepType { pub sort: Span, pub deps: Vec<Span>, } impl DepType { pub fn span(&self) -> Span { (self.sort.start..self.deps.last().unwrap_or(&self.sort).end).into() } } #[derive(Clone, Debug)] pub enum Type { DepType(DepType), Formula(Formula) } impl Type { pub fn span(&self) -> Span { match self { Type::DepType(d) => d.span(), Type::Formula(f) => f.0 } } } #[derive(Clone, Debug)] pub struct Binder { pub span: Span, pub local: Option<Span>, pub kind: LocalKind, pub ty: Option<Type>, } #[derive(Clone, Debug)] pub struct SExpr { pub span: Span, pub k: SExprKind, } #[derive(Copy, Clone, Debug)] pub enum Atom { Ident, Quote, Unquote, Nfx } #[derive(Clone, Debug)] pub enum SExprKind { Atom(Atom), List(Vec<SExpr>), DottedList(Vec<SExpr>, Box<SExpr>), Number(BigUint), String(ArcString), Bool(bool), Formula(Formula), } pub fn curly_transform<T>(es: &mut Vec<T>, no_dot: bool, eq: impl Fn(&T, &T) -> bool, nfx: impl FnOnce() -> T) { let n = es.len(); if n > 2 { let valid_curly = no_dot && n % 2 != 0 && { let e = &es[1]; (3..n).step_by(2).all(|i| eq(&es[i], e)) }; if valid_curly { es.swap(0, 1); let mut from = 4; let mut to = 3; while from < n { es.swap(from, to); to += 1; from += 2; } es.truncate(to); } else { es.insert(0, nfx()); } } } impl SExpr { pub fn atom(span: impl Into<Span>, a: Atom) -> SExpr { SExpr {span: span.into(), k: SExprKind::Atom(a)} } pub fn list(span: impl Into<Span>, es: Vec<SExpr>) -> SExpr { SExpr {span: span.into(), k: SExprKind::List(es)} } pub fn dotted_list(span: impl Into<Span>, mut es: Vec<SExpr>, dot: Option<SExpr>) -> SExpr { match dot { None => SExpr {span: span.into(), k: SExprKind::List(es)}, Some(e) => match e.k { SExprKind::DottedList(es2, e2) => { es.extend(es2); SExpr {span: span.into(), k: SExprKind::DottedList(es, e2)} } SExprKind::List(es2) => { es.extend(es2); SExpr::list(span, es) } _ => SExpr {span: span.into(), k: SExprKind::DottedList(es, Box::new(e))} } } } pub fn curly_list(span: Span, curly: bool, mut es: Vec<SExpr>, dot: Option<SExpr>, eq: impl Fn(&SExpr, &SExpr) -> bool) -> SExpr { if curly { curly_transform(&mut es, dot.is_none(), eq, || SExpr::atom(span.start..span.start+1, Atom::Nfx)) } Self::dotted_list(span, es, dot) } } impl EnvDisplay for SExpr { fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result { match &self.k { &SExprKind::Atom(a) => fe.source.span_atom(self.span, a).fmt(f), SExprKind::List(es) => { let mut it = es.iter(); match it.next() { None => "()".fmt(f), Some(e) => { write!(f, "({}", fe.to(e))?; for e in it {write!(f, " {}", fe.to(e))?} ")".fmt(f) } } } SExprKind::DottedList(es, r) => { "(".fmt(f)?; for e in es {write!(f, "{} ", fe.to(e))?} write!(f, ". {})", fe.to(r)) } SExprKind::Number(n) => n.fmt(f), SExprKind::String(s) => write!(f, "{:?}", s), SExprKind::Bool(true) => "#t".fmt(f), SExprKind::Bool(false) => "#f".fmt(f), SExprKind::Formula(s) => fe.source[s.0].fmt(f), } } } #[derive(Clone)] pub struct Decl { pub mods: Modifiers, pub k: DeclKind, pub id: Span, pub bis: Vec<Binder>, pub ty: Option<Type>, pub val: Option<SExpr>, } #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] pub enum Prec { Prec(u32), Max } impl fmt::Display for Prec { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { &Prec::Prec(p) => p.fmt(f), &Prec::Max => "max".fmt(f) } } } impl fmt::Debug for Prec { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(self, f) } } #[derive(Clone)] pub enum SimpleNotaKind { Prefix, Infix {right: bool} } #[derive(Clone)] pub struct SimpleNota { pub k: SimpleNotaKind, pub id: Span, pub c: Const, pub prec: Prec, } #[derive(Clone)] pub enum Literal { Const(Const, Prec), Var(Span), } #[derive(Clone)] pub struct GenNota { pub id: Span, pub bis: Vec<Binder>, pub ty: Option<Type>, pub lits: Vec<Literal>, pub prec: Option<(Prec, bool)> } #[derive(Clone)] pub enum StmtKind { Sort(Span, Modifiers), Decl(Decl), Delimiter(Delimiter), SimpleNota(SimpleNota), Coercion { id: Span, from: Span, to: Span }, Notation(GenNota), Inout { out: bool, k: Span, hs: Vec<SExpr> }, Annot(SExpr, Box<Stmt>), Do(Vec<SExpr>), Import(Span, String), } #[derive(Clone)] pub struct Stmt { pub span: Span, pub k: StmtKind, } pub struct AST { pub source: Arc<LinedString>, pub imports: Vec<(Span, String)>, pub stmts: Vec<Stmt>, pub errors: Vec<ParseError>, } impl LinedString { pub fn span_atom(&self, sp: Span, a: Atom) -> &str { match a { Atom::Ident => &self[sp], Atom::Quote => "quote", Atom::Unquote => "unquote", Atom::Nfx => ":nfx", } } } impl AST { pub fn span(&self, s: Span) -> &str { &self.source[s] } pub fn span_atom(&self, sp: Span, a: Atom) -> &str { self.source.span_atom(sp, a) } pub fn last_checkpoint(&self, pos: usize) -> (usize, usize) { match self.stmts.binary_search_by_key(&pos, |stmt| stmt.span.end) { Ok(i) => (i+1, pos), Err(0) => (0, 0), Err(i) => (i, self.stmts[i-1].span.end) } } }
use std::sync::Arc; use std::fmt::{self, Display}; use num::BigUint; use crate::lined_string::LinedString; use crate::util::{Span, ArcString}; use crate::elab::lisp::print::{EnvDisplay, FormatEnv}; use super::ParseError; bitflags! { pub struct Modifiers: u8 { const PURE = 1; const STRICT = 2; const PROVABLE = 4; const FREE = 8; const PUB = 16; const ABSTRACT = 32; const LOCAL = 64; } } impl Modifiers { pub const NONE: Modifiers = Self::empty(); pub fn sort_data() -> Modifiers { Modifiers::PURE | Modifiers::STRICT | Modifiers::PROVABLE | Modifiers::FREE } pub fn allowed_visibility(self, k: DeclKind) -> bool { match k { DeclKind::Term => self.is_empty(), DeclKind::Axiom => self.is_empty(), DeclKind::Def => self == Modifiers::ABSTRACT || self == Modifiers::LOCAL || self.is_empty(), DeclKind::Thm => self == Modifiers::PUB || self.is_empty(), } } pub fn from_name(s: &str) -> Option<Modifiers> { match s { "pure" => Some(Modifiers::PURE), "strict" => Some(Modifiers::STRICT), "provable" => Some(Modifiers::PROVABLE), "free" => Some(Modifiers::FREE), "pub" => Some(Modifiers::PUB), "abstract" => Some(Modifiers::ABSTRACT), "local" => Some(Modifiers::LOCAL), _ => None } } } impl Display for Modifiers { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.contains(Modifiers::PURE) {write!(f, "pure ")?} if self.contains(Modifiers::STRICT) {write!(f, "strict ")?} if self.contains(Modifiers::PROVABLE) {write!(f, "provable ")?} if self.contains(Modifiers::FREE) {write!(f, "free ")?} if self.contains(Modifiers::PUB) {write!(f, "pub ")?} if self.contains(Modifiers::ABSTRACT) {write!(f, "abstract ")?} if self.contains(Modifiers::LOCAL) {write!(f, "local ")?} Ok(()) } } #[derive(Clone)] pub enum Delimiter { Both(Box<[u8]>), LeftRight(Box<[u8]>, Box<[u8]>), } #[derive(Copy, Clone, Debug)] pub struct Formula(pub Span); impl Formula { pub fn inner(&self) -> Span { (self.0.start + 1 .. self.0.end - 1).into() } } #[derive(Clone)] pub struct Const { pub fmla: Formula, pub trim: Span } #[derive(Clone, Copy, PartialEq, Eq)] pub enum DeclKind { Term, Axiom, Thm, Def } #[derive(Clone, Copy, PartialEq, Eq, Debug)] pub enum LocalKind { Bound, Reg, Dummy, Anon } impl LocalKind {
} #[derive(Clone, Debug)] pub struct DepType { pub sort: Span, pub deps: Vec<Span>, } impl DepType { pub fn span(&self) -> Span { (self.sort.start..self.deps.last().unwrap_or(&self.sort).end).into() } } #[derive(Clone, Debug)] pub enum Type { DepType(DepType), Formula(Formula) } impl Type { pub fn span(&self) -> Span { match self { Type::DepType(d) => d.span(), Type::Formula(f) => f.0 } } } #[derive(Clone, Debug)] pub struct Binder { pub span: Span, pub local: Option<Span>, pub kind: LocalKind, pub ty: Option<Type>, } #[derive(Clone, Debug)] pub struct SExpr { pub span: Span, pub k: SExprKind, } #[derive(Copy, Clone, Debug)] pub enum Atom { Ident, Quote, Unquote, Nfx } #[derive(Clone, Debug)] pub enum SExprKind { Atom(Atom), List(Vec<SExpr>), DottedList(Vec<SExpr>, Box<SExpr>), Number(BigUint), String(ArcString), Bool(bool), Formula(Formula), } pub fn curly_transform<T>(es: &mut Vec<T>, no_dot: bool, eq: impl Fn(&T, &T) -> bool, nfx: impl FnOnce() -> T) { let n = es.len(); if n > 2 { let valid_curly = no_dot && n % 2 != 0 && { let e = &es[1]; (3..n).step_by(2).all(|i| eq(&es[i], e)) }; if valid_curly { es.swap(0, 1); let mut from = 4; let mut to = 3; while from < n { es.swap(from, to); to += 1; from += 2; } es.truncate(to); } else { es.insert(0, nfx()); } } } impl SExpr { pub fn atom(span: impl Into<Span>, a: Atom) -> SExpr { SExpr {span: span.into(), k: SExprKind::Atom(a)} } pub fn list(span: impl Into<Span>, es: Vec<SExpr>) -> SExpr { SExpr {span: span.into(), k: SExprKind::List(es)} } pub fn dotted_list(span: impl Into<Span>, mut es: Vec<SExpr>, dot: Option<SExpr>) -> SExpr { match dot { None => SExpr {span: span.into(), k: SExprKind::List(es)}, Some(e) => match e.k { SExprKind::DottedList(es2, e2) => { es.extend(es2); SExpr {span: span.into(), k: SExprKind::DottedList(es, e2)} } SExprKind::List(es2) => { es.extend(es2); SExpr::list(span, es) } _ => SExpr {span: span.into(), k: SExprKind::DottedList(es, Box::new(e))} } } } pub fn curly_list(span: Span, curly: bool, mut es: Vec<SExpr>, dot: Option<SExpr>, eq: impl Fn(&SExpr, &SExpr) -> bool) -> SExpr { if curly { curly_transform(&mut es, dot.is_none(), eq, || SExpr::atom(span.start..span.start+1, Atom::Nfx)) } Self::dotted_list(span, es, dot) } } impl EnvDisplay for SExpr { fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result { match &self.k { &SExprKind::Atom(a) => fe.source.span_atom(self.span, a).fmt(f), SExprKind::List(es) => { let mut it = es.iter(); match it.next() { None => "()".fmt(f), Some(e) => { write!(f, "({}", fe.to(e))?; for e in it {write!(f, " {}", fe.to(e))?} ")".fmt(f) } } } SExprKind::DottedList(es, r) => { "(".fmt(f)?; for e in es {write!(f, "{} ", fe.to(e))?} write!(f, ". {})", fe.to(r)) } SExprKind::Number(n) => n.fmt(f), SExprKind::String(s) => write!(f, "{:?}", s), SExprKind::Bool(true) => "#t".fmt(f), SExprKind::Bool(false) => "#f".fmt(f), SExprKind::Formula(s) => fe.source[s.0].fmt(f), } } } #[derive(Clone)] pub struct Decl { pub mods: Modifiers, pub k: DeclKind, pub id: Span, pub bis: Vec<Binder>, pub ty: Option<Type>, pub val: Option<SExpr>, } #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] pub enum Prec { Prec(u32), Max } impl fmt::Display for Prec { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { &Prec::Prec(p) => p.fmt(f), &Prec::Max => "max".fmt(f) } } } impl fmt::Debug for Prec { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(self, f) } } #[derive(Clone)] pub enum SimpleNotaKind { Prefix, Infix {right: bool} } #[derive(Clone)] pub struct SimpleNota { pub k: SimpleNotaKind, pub id: Span, pub c: Const, pub prec: Prec, } #[derive(Clone)] pub enum Literal { Const(Const, Prec), Var(Span), } #[derive(Clone)] pub struct GenNota { pub id: Span, pub bis: Vec<Binder>, pub ty: Option<Type>, pub lits: Vec<Literal>, pub prec: Option<(Prec, bool)> } #[derive(Clone)] pub enum StmtKind { Sort(Span, Modifiers), Decl(Decl), Delimiter(Delimiter), SimpleNota(SimpleNota), Coercion { id: Span, from: Span, to: Span }, Notation(GenNota), Inout { out: bool, k: Span, hs: Vec<SExpr> }, Annot(SExpr, Box<Stmt>), Do(Vec<SExpr>), Import(Span, String), } #[derive(Clone)] pub struct Stmt { pub span: Span, pub k: StmtKind, } pub struct AST { pub source: Arc<LinedString>, pub imports: Vec<(Span, String)>, pub stmts: Vec<Stmt>, pub errors: Vec<ParseError>, } impl LinedString { pub fn span_atom(&self, sp: Span, a: Atom) -> &str { match a { Atom::Ident => &self[sp], Atom::Quote => "quote", Atom::Unquote => "unquote", Atom::Nfx => ":nfx", } } } impl AST { pub fn span(&self, s: Span) -> &str { &self.source[s] } pub fn span_atom(&self, sp: Span, a: Atom) -> &str { self.source.span_atom(sp, a) } pub fn last_checkpoint(&self, pos: usize) -> (usize, usize) { match self.stmts.binary_search_by_key(&pos, |stmt| stmt.span.end) { Ok(i) => (i+1, pos), Err(0) => (0, 0), Err(i) => (i, self.stmts[i-1].span.end) } } }
pub fn is_bound(self) -> bool { match self { LocalKind::Bound | LocalKind::Dummy => true, LocalKind::Reg | LocalKind::Anon => false, } }
function_block-full_function
[ { "content": "pub fn whitespace(c: u8) -> bool { c == b' ' || c == b'\\n' }\n\n\n\nimpl<'a> Parser<'a> {\n\n pub fn cur(&self) -> u8 { self.source[self.idx] }\n\n pub fn cur_opt(&self) -> Option<u8> { self.source.get(self.idx).cloned() }\n\n\n\n pub fn err(&self, msg: BoxError) -> ParseError {\n\n ParseError::new(self.idx..self.idx, msg)\n\n }\n\n\n\n pub fn err_str<T>(&self, msg: &'static str) -> Result<T> {\n\n Err(self.err(msg.into()))\n\n }\n\n\n\n fn push_err(&mut self, r: Result<()>) {\n\n r.unwrap_or_else(|e| self.errors.push(e))\n\n }\n\n\n\n fn ws(&mut self) {\n\n while self.idx < self.source.len() {\n", "file_path": "mm0-rs/src/parser.rs", "rank": 1, "score": 165364.73611430908 }, { "content": "pub fn main(mut args: impl Iterator<Item=String>) {\n\n if args.next().map_or(false, |s| s == \"--debug\") {\n\n std::env::set_var(\"RUST_BACKTRACE\", \"1\");\n\n use {simplelog::*, std::fs::File};\n\n let _ = WriteLogger::init(LevelFilter::Debug, Config::default(), File::create(\"lsp.log\").unwrap());\n\n }\n\n log_message(\"started\".into()).unwrap();\n\n SERVER.run()\n\n}", "file_path": "mm0-rs/src/server.rs", "rank": 2, "score": 155050.020972824 }, { "content": "pub fn ident_start(c: u8) -> bool { b'a' <= c && c <= b'z' || b'A' <= c && c <= b'Z' || c == b'_' }\n", "file_path": "mm0-rs/src/parser.rs", "rank": 3, "score": 151568.07922457808 }, { "content": "pub fn try_get_span(fsp: &FileSpan, e: &LispKind) -> Span {\n\n try_get_span_from(fsp, e.fspan().as_ref())\n\n}\n\n\n", "file_path": "mm0-rs/src/elab/local_context.rs", "rank": 4, "score": 151083.11891304428 }, { "content": "pub fn lisp_ident(c: u8) -> bool { ident_rest(c) || b\"!%&*/:<=>?^~+-.@\".contains(&c) }\n", "file_path": "mm0-rs/src/parser.rs", "rank": 5, "score": 148504.43056154702 }, { "content": "fn write_cmd_bytes(w: &mut impl Write, cmd: u8, vec: &[u8]) -> io::Result<()> {\n\n if let Ok(data) = (vec.len() + 2).try_into() {\n\n w.write_u8(cmd | DATA_8)?;\n\n w.write_u8(data)?;\n\n w.write_all(vec)\n\n } else if let Ok(data) = (vec.len() + 3).try_into() {\n\n w.write_u8(cmd | DATA_16)?;\n\n w.write_u16::<LE>(data)?;\n\n w.write_all(vec)\n\n } else {\n\n w.write_u8(cmd | DATA_32)?;\n\n w.write_u32::<LE>((vec.len() + 5).try_into().unwrap())?;\n\n w.write_all(vec)\n\n }\n\n}\n\n\n\nimpl UnifyCmd {\n\n fn write_to(self, w: &mut impl Write) -> io::Result<()> {\n\n match self {\n\n UnifyCmd::Term(tid) => write_cmd(w, UNIFY_TERM, tid.0),\n", "file_path": "mm0-rs/src/export_mmb.rs", "rank": 6, "score": 148356.1640656718 }, { "content": "pub fn try_get_span_from(fsp: &FileSpan, fsp2: Option<&FileSpan>) -> Span {\n\n match fsp2 {\n\n Some(fsp2) if fsp.file == fsp2.file && fsp2.span.start >= fsp.span.start => fsp2.span,\n\n _ => fsp.span,\n\n }\n\n}\n\n\n\nimpl Environment {\n\n pub fn apply_coe(&self, fsp: &Option<FileSpan>, c: &Coe, res: LispVal) -> LispVal {\n\n fn apply(c: &Coe, f: impl FnOnce(TermID, LispVal) -> LispVal + Clone, e: LispVal) -> LispVal {\n\n match c {\n\n &Coe::One(_, tid) => f(tid, e),\n\n Coe::Trans(c1, _, c2) => apply(c2, f.clone(), apply(c1, f, e)),\n\n }\n\n }\n\n apply(c, |tid, e| LispKind::List(\n\n vec![LispVal::atom(self.terms[tid].atom), e]).decorate_span(fsp), res)\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/src/elab/local_context.rs", "rank": 7, "score": 146999.4918610813 }, { "content": "fn dummies(fe: FormatEnv, fsp: &FileSpan, lc: &mut LocalContext, e: &LispVal) -> Result<()> {\n\n macro_rules! sp {($e:expr) => {$e.fspan().unwrap_or(fsp.clone()).span}}\n\n let mut dummy = |x: AtomID, es: &LispKind| -> Result<()> {\n\n let s = es.as_atom().ok_or_else(|| ElabError::new_e(sp!(es), \"expected an atom\"))?;\n\n let sort = fe.data[s].sort.ok_or_else(|| ElabError::new_e(sp!(es),\n\n format!(\"unknown sort '{}'\", fe.to(&s))))?;\n\n if x != AtomID::UNDER {lc.vars.insert(x, (true, InferSort::Bound {sort}));}\n\n Ok(())\n\n };\n\n e.unwrapped(|r| {\n\n if let LispKind::AtomMap(m) = r {\n\n for (&a, e) in m {dummy(a, e)?}\n\n } else {\n\n for e in Uncons::from(e.clone()) {\n\n let mut u = Uncons::from(e.clone());\n\n if let (Some(ex), Some(es)) = (u.next(), u.next()) {\n\n let x = ex.as_atom().ok_or_else(|| ElabError::new_e(sp!(ex), \"expected an atom\"))?;\n\n dummy(x, &es)?;\n\n } else {Err(ElabError::new_e(sp!(e), \"invalid dummy arguments\"))?}\n\n }\n", "file_path": "mm0-rs/src/elab/local_context.rs", "rank": 8, "score": 145285.74739130132 }, { "content": "pub fn ident_rest(c: u8) -> bool { ident_start(c) || b'0' <= c && c <= b'9' }\n", "file_path": "mm0-rs/src/parser.rs", "rank": 9, "score": 145231.39004059293 }, { "content": "struct ElabTermMut<'a> {\n\n lc: &'a mut LocalContext,\n\n fe: FormatEnv<'a>,\n\n fsp: FileSpan,\n\n spans: &'a mut Spans<ObjectKind>,\n\n}\n\nimpl<'a> Deref for ElabTermMut<'a> {\n\n type Target = ElabTerm<'a>;\n\n fn deref(&self) -> &ElabTerm<'a> { unsafe { mem::transmute(self) } }\n\n}\n\n\n", "file_path": "mm0-rs/src/elab/local_context.rs", "rank": 10, "score": 142581.11266843468 }, { "content": "pub fn main(mut args: impl Iterator<Item=String>) -> io::Result<()> {\n\n let path = args.next().expect(\"expected a .mm1 file\");\n\n let (path, file) = VFS_.get_or_insert(FileRef::new(fs::canonicalize(path)?))?;\n\n let env = block_on(elaborate(path.clone()))?;\n\n if let Some(out) = args.next() {\n\n use {fs::File, io::BufWriter};\n\n let mut w = BufWriter::new(File::create(out)?);\n\n let mut ex = Exporter::new(path, &file.text, &env, &mut w);\n\n ex.run(true)?;\n\n ex.finish()?;\n\n }\n\n Ok(())\n\n}", "file_path": "mm0-rs/src/compiler.rs", "rank": 11, "score": 141632.9380408281 }, { "content": "fn write_cmd(w: &mut impl Write, cmd: u8, data: u32) -> io::Result<()> {\n\n if data == 0 {w.write_u8(cmd)}\n\n else if let Ok(data) = data.try_into() {\n\n w.write_u8(cmd | DATA_8)?;\n\n w.write_u8(data)\n\n } else if let Ok(data) = data.try_into() {\n\n w.write_u8(cmd | DATA_16)?;\n\n w.write_u16::<LE>(data)\n\n } else {\n\n w.write_u8(cmd | DATA_32)?;\n\n w.write_u32::<LE>(data)\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/src/export_mmb.rs", "rank": 12, "score": 133072.85550145316 }, { "content": "fn write_expr_proof(w: &mut impl Write,\n\n heap: &[ExprNode],\n\n reorder: &mut Reorder,\n\n head: &ExprNode,\n\n save: bool\n\n) -> io::Result<u32> {\n\n Ok(match head {\n\n &ExprNode::Ref(i) => match reorder.map[i] {\n\n None => {\n\n let n = write_expr_proof(w, heap, reorder, &heap[i], true)?;\n\n reorder.map[i] = Some(n);\n\n n\n\n }\n\n Some(n) => {\n\n ProofCmd::Ref(n.try_into().unwrap()).write_to(w)?;\n\n n\n\n }\n\n }\n\n &ExprNode::Dummy(_, s) => {\n\n ProofCmd::Dummy(s).write_to(w)?;\n", "file_path": "mm0-rs/src/export_mmb.rs", "rank": 13, "score": 130473.12926459985 }, { "content": "// Patch for https://github.com/rust-lang/annotate-snippets-rs/issues/17\n\nfn patch(dl: &mut DisplayList) {\n\n use annotate_snippets::display_list::{DisplayLine, DisplayRawLine};\n\n for l in &mut dl.body {\n\n if let DisplayLine::Raw(DisplayRawLine::Origin {pos: Some(p), ..}) = l {\n\n p.1 += 1;\n\n }\n\n }\n\n}\n\n\n\nasync fn elaborate(path: FileRef) -> io::Result<Arc<Environment>> {\n\n let (path, file) = VFS_.get_or_insert(path)?;\n\n {\n\n let mut g = file.parsed.lock().await;\n\n match &mut *g {\n\n None => *g = Some(FileCache::InProgress(vec![])),\n\n Some(FileCache::InProgress(senders)) => {\n\n let (send, recv) = channel();\n\n senders.push(send);\n\n drop(g);\n\n return Ok(recv.await.unwrap().1)\n", "file_path": "mm0-rs/src/compiler.rs", "rank": 14, "score": 124794.78582594715 }, { "content": "struct ElabTerm<'a> {\n\n lc: &'a LocalContext,\n\n fe: FormatEnv<'a>,\n\n fsp: FileSpan,\n\n}\n\n\n", "file_path": "mm0-rs/src/elab/local_context.rs", "rank": 15, "score": 116089.75012438561 }, { "content": "pub trait NodeHash: Hash + Eq + Sized + std::fmt::Debug {\n\n const VAR: fn(usize) -> Self;\n\n fn from<'a>(nh: &NodeHasher<'a>, fsp: Option<&FileSpan>, r: &LispVal,\n\n de: &mut Dedup<Self>) -> Result<std::result::Result<Self, usize>>;\n\n fn vars(&self, bv: &mut u64, deps: impl Fn(usize) -> u64) -> u64;\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Dedup<H: NodeHash> {\n\n map: HashMap<Rc<H>, usize>,\n\n prev: HashMap<*const LispKind, usize>,\n\n pub vec: Vec<(Rc<H>, bool, u64)>,\n\n bv: u64,\n\n}\n\n\n\nimpl<H: NodeHash> Dedup<H> {\n\n pub fn new(args: &[(Option<AtomID>, Type)]) -> Dedup<H> {\n\n let mut bv = 1;\n\n let vec: Vec<_> = args.iter().enumerate()\n\n .map(|(i, (_, t))| (Rc::new(H::VAR(i)), true, match t {\n", "file_path": "mm0-rs/src/elab/proof.rs", "rank": 16, "score": 111496.22525017126 }, { "content": "fn list(init: &[LispVal], e: Option<&LispKind>, mut start: bool, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result {\n\n for e in init {\n\n if start {\n\n write!(f, \"({}\", fe.to(e))?;\n\n start = false\n\n } else {\n\n write!(f, \" {}\", fe.to(e))?\n\n }\n\n }\n\n match e {\n\n None => if start {write!(f, \"()\")} else {write!(f, \")\")},\n\n Some(LispKind::List(es)) => list(es, None, start, fe, f),\n\n Some(LispKind::DottedList(es, r)) => list(es, Some(&r), start, fe, f),\n\n Some(e) if e.exactly(0) => if start {write!(f, \"()\")} else {write!(f, \")\")},\n\n Some(e) => if start {write!(f, \"{}\", fe.to(e))} else {write!(f, \" . {})\", fe.to(e))}\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/src/elab/lisp/print.rs", "rank": 17, "score": 107818.3973789356 }, { "content": "#[allow(unused)]\n\npub fn log(s: String) {\n\n LOGGER.0.lock().unwrap().push((Instant::now(), thread::current().id(), s));\n\n LOGGER.1.notify_one();\n\n}\n\n\n\n#[allow(unused)]\n\nmacro_rules! log {\n\n ($($es:tt)*) => {crate::server::log(format!($($es)*))}\n\n}\n\n\n\nasync fn elaborate(path: FileRef, start: Option<Position>,\n\n cancel: Arc<AtomicBool>) -> Result<(u64, Arc<Environment>)> {\n\n let Server {vfs, pool, ..} = &*SERVER;\n\n let (path, file) = vfs.get_or_insert(path)?;\n\n let v = file.text.lock().unwrap().0;\n\n let (old_ast, old_env, old_deps) = {\n\n let mut g = file.parsed.lock().await;\n\n let (res, senders) = match &mut *g {\n\n None => ((None, None, vec![]), vec![]),\n\n &mut Some(FileCache::InProgress {version, ref cancel, ref mut senders}) => {\n", "file_path": "mm0-rs/src/server.rs", "rank": 18, "score": 99522.51122623964 }, { "content": "fn new_mvar(mvars: &mut Vec<LispVal>, tgt: InferTarget, sp: Option<FileSpan>) -> LispVal {\n\n let n = mvars.len();\n\n let e = LispVal::new(LispKind::MVar(n, tgt));\n\n let e = LispVal::new_ref(if let Some(sp) = sp {e.span(sp)} else {e});\n\n mvars.push(e.clone());\n\n e\n\n}\n\n\n\nimpl LocalContext {\n\n pub fn new() -> LocalContext { Self::default() }\n\n\n\n pub fn clear(&mut self) {\n\n self.vars.clear();\n\n self.var_order.clear();\n\n self.mvars.clear();\n\n self.goals.clear();\n\n self.proofs.clear();\n\n self.proof_order.clear();\n\n }\n\n\n", "file_path": "mm0-rs/src/elab/local_context.rs", "rank": 19, "score": 99290.0361936162 }, { "content": "pub trait HashMapExt<K, V> {\n\n fn try_insert(&mut self, k: K, v: V) -> Option<(V, OccupiedEntry<K, V>)>;\n\n}\n\nimpl<K: Hash + Eq, V, S: BuildHasher> HashMapExt<K, V> for HashMap<K, V, S> {\n\n fn try_insert(&mut self, k: K, v: V) -> Option<(V, OccupiedEntry<K, V>)> {\n\n match self.entry(k) {\n\n Entry::Vacant(e) => { e.insert(v); None }\n\n Entry::Occupied(e) => Some((v, e))\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Hash, PartialEq, Eq)] pub struct ArcString(pub Arc<String>);\n\n\n\nimpl Borrow<str> for ArcString {\n\n fn borrow(&self) -> &str { &*self.0 }\n\n}\n\nimpl Deref for ArcString {\n\n type Target = str;\n\n fn deref(&self) -> &str { &*self.0 }\n", "file_path": "mm0-rs/src/util.rs", "rank": 20, "score": 93028.67316167866 }, { "content": "enum InferBinder {\n\n Var(Option<AtomID>, (bool, InferSort)),\n\n Hyp(Option<AtomID>, LispVal),\n\n}\n\n\n\nimpl Elaborator {\n\n fn elab_dep_type(&mut self, error: &mut bool, lk: LocalKind, d: &DepType) -> Result<(bool, InferSort)> {\n\n let a = self.env.get_atom(self.ast.span(d.sort));\n\n let sort = self.data[a].sort.ok_or_else(|| ElabError::new_e(d.sort, \"sort not found\"))?;\n\n self.spans.insert(d.sort, ObjectKind::Sort(sort));\n\n Ok(if lk.is_bound() {\n\n if !d.deps.is_empty() {\n\n self.report(ElabError::new_e(\n\n d.deps[0].start..d.deps.last().unwrap().end, \"dependencies not allowed in curly binders\"));\n\n *error = true;\n\n }\n\n (lk == LocalKind::Dummy, InferSort::Bound {sort})\n\n } else {\n\n (false, InferSort::Reg {\n\n sort,\n", "file_path": "mm0-rs/src/elab/local_context.rs", "rank": 21, "score": 89326.50689924754 }, { "content": "#[derive(Default)]\n\nstruct BuildArgs {\n\n map: HashMap<AtomID, u64>,\n\n size: usize,\n\n}\n\nconst MAX_BOUND_VARS: usize = 55;\n\n\n\nimpl BuildArgs {\n\n fn push_bound(&mut self, a: Option<AtomID>) -> Option<()> {\n\n if self.size >= MAX_BOUND_VARS {return None}\n\n if let Some(a) = a {self.map.insert(a, 1 << self.size);}\n\n self.size += 1;\n\n Some(())\n\n }\n\n\n\n fn deps(&self, v: &[AtomID]) -> u64 {\n\n let mut ret = 0;\n\n for &a in v { ret |= self.map[&a] }\n\n ret\n\n }\n\n\n", "file_path": "mm0-rs/src/elab/local_context.rs", "rank": 22, "score": 89210.56254388916 }, { "content": "struct LocalCtx {\n\n names: HashMap<AtomID, Vec<usize>>,\n\n ctx: Vec<AtomID>,\n\n}\n\n\n\nimpl LocalCtx {\n\n fn new() -> Self { Self {names: HashMap::new(), ctx: vec![]} }\n\n fn len(&self) -> usize { self.ctx.len() }\n\n fn get(&self, x: AtomID) -> Option<usize> {\n\n self.names.get(&x).and_then(|v| v.last().cloned())\n\n }\n\n fn push(&mut self, x: AtomID) -> usize {\n\n let old = self.ctx.len();\n\n if x != AtomID::UNDER { self.names.entry(x).or_insert(vec![]).push(old) }\n\n self.ctx.push(x);\n\n old\n\n }\n\n fn push_list(&mut self, xs: &Vec<AtomID>) -> usize {\n\n let old = self.ctx.len();\n\n for &x in xs { self.push(x); }\n", "file_path": "mm0-rs/src/elab/lisp/parser.rs", "rank": 23, "score": 89210.56254388916 }, { "content": "pub trait Node: Sized + std::fmt::Debug {\n\n type Hash: NodeHash;\n\n const REF: fn(usize) -> Self;\n\n fn from(e: &Self::Hash, ids: &mut [Val<Self>]) -> Self;\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Val<T: Node> {Built(T), Ref(usize), Done}\n\n\n\nimpl<T: Node> Val<T> {\n\n pub fn take(&mut self) -> T {\n\n match mem::replace(self, Val::Done) {\n\n Val::Built(x) => x,\n\n Val::Ref(n) => {*self = Val::Ref(n); T::REF(n)}\n\n Val::Done => panic!(\"taking a value twice\")\n\n }\n\n }\n\n}\n\n\n\npub struct Builder<T: Node> {\n", "file_path": "mm0-rs/src/elab/proof.rs", "rank": 24, "score": 84148.48073524467 }, { "content": "pub trait EnvDisplay {\n\n fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result;\n\n}\n\n\n\nimpl Elaborator {\n\n pub fn format_env(&self) -> FormatEnv {\n\n FormatEnv {source: &self.ast.source, env: self}\n\n }\n\n pub fn print<'a, D: ?Sized>(&'a self, e: &'a D) -> Print<'a, D> {\n\n self.format_env().to(e)\n\n }\n\n}\n\n\n\nimpl<'a, D: EnvDisplay + ?Sized> fmt::Display for Print<'a, D> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.e.fmt(self.fe, f) }\n\n}\n\n\n", "file_path": "mm0-rs/src/elab/lisp/print.rs", "rank": 25, "score": 83027.50015601853 }, { "content": "struct TestPending<'a>(Span, LispVal, &'a IR);\n\n\n\npub type SResult<T> = std::result::Result<T, String>;\n\n\n\nimpl Elaborator {\n\n fn pattern_match<'b>(&mut self, stack: &mut Vec<PatternStack<'b>>, ctx: &mut [LispVal],\n\n mut active: PatternState<'b>) -> std::result::Result<bool, TestPending<'b>> {\n\n loop {\n\n // crate::server::log(format!(\"{}\\n\", self.print(&active)));\n\n active = match active {\n\n PatternState::Eval(p, e) => match p {\n\n Pattern::Skip => PatternState::Ret(true),\n\n &Pattern::Atom(i) => {ctx[i] = e; PatternState::Ret(true)}\n\n &Pattern::QuoteAtom(a) => PatternState::Ret(e.unwrapped(|e|\n\n match e {&LispKind::Atom(a2) => a == a2, _ => false})),\n\n Pattern::String(s) => PatternState::Ret(e.unwrapped(|e|\n\n match e {LispKind::String(s2) => s == s2, _ => false})),\n\n &Pattern::Bool(b) => PatternState::Ret(e.unwrapped(|e|\n\n match e {&LispKind::Bool(b2) => b == b2, _ => false})),\n\n Pattern::Number(i) => PatternState::Ret(e.unwrapped(|e|\n", "file_path": "mm0-rs/src/elab/lisp/eval.rs", "rank": 26, "score": 82189.16369793589 }, { "content": "fn make_snippet(path: &FileRef, file: &LinedString, pos: Span,\n\n msg: String, level: ErrorLevel, footer: Vec<Annotation>) -> Snippet {\n\n let annotation_type = level.to_annotation_type();\n\n let Range {start, end} = file.to_range(pos);\n\n let start2 = pos.start - start.character as usize;\n\n let end2 = file.to_idx(Position {line: end.line + 1, character: 0})\n\n .unwrap_or_else(|| file.s.len());\n\n Snippet {\n\n title: Some(Annotation {\n\n id: None,\n\n label: Some(msg),\n\n annotation_type,\n\n }),\n\n slices: vec![Slice {\n\n source: file[(start2..end2).into()].to_owned(),\n\n line_start: start.line as usize + 1,\n\n origin: Some(path.rel().to_owned()),\n\n fold: end.line - start.line >= 5,\n\n annotations: vec![SourceAnnotation {\n\n range: (pos.start - start2, pos.end - start2),\n", "file_path": "mm0-rs/src/compiler.rs", "rank": 27, "score": 76110.83858695748 }, { "content": "fn response_err(code: ErrorCode, message: impl Into<String>) -> ResponseError {\n\n ResponseError {code: code as i32, message: message.into(), data: None}\n\n}\n\n\n", "file_path": "mm0-rs/src/server.rs", "rank": 28, "score": 74019.60649860141 }, { "content": "pub fn parse(file: Arc<LinedString>, old: Option<(Position, Arc<AST>)>) ->\n\n (usize, AST) {\n\n let (errors, imports, idx, mut stmts) =\n\n if let Some((pos, ast)) = old {\n\n let (ix, start) = ast.last_checkpoint(file.to_idx(pos).unwrap());\n\n match Arc::try_unwrap(ast) {\n\n Ok(mut ast) => {\n\n ast.errors.retain(|e| e.pos.start < start);\n\n ast.imports.retain(|e| e.0.start < start);\n\n ast.stmts.truncate(ix);\n\n (ast.errors, ast.imports, start, ast.stmts)\n\n }\n\n Err(ast) => (\n\n ast.errors.iter().filter(|e| e.pos.start < start).cloned().collect(),\n\n ast.imports.iter().filter(|e| e.0.start < start).cloned().collect(),\n\n start, ast.stmts[..ix].into())\n\n }\n\n } else {Default::default()};\n\n let mut p = Parser {source: file.as_bytes(), errors, imports, idx};\n\n p.ws();\n\n while let Some(d) = p.stmt_recover() { stmts.push(d) }\n\n (0, AST { errors: p.errors, imports: p.imports, source: file, stmts })\n\n}\n", "file_path": "mm0-rs/src/parser.rs", "rank": 29, "score": 69084.90258723035 }, { "content": "fn make_completion_item(path: &FileRef, fe: FormatEnv, ad: &AtomData, detail: bool, tk: TraceKind) -> Option<CompletionItem> {\n\n use CompletionItemKind::*;\n\n macro_rules! done {($desc:expr, $kind:expr) => {\n\n Some(CompletionItem {\n\n label: (*ad.name.0).clone(),\n\n detail: if detail {Some($desc)} else {None},\n\n kind: Some($kind),\n\n data: Some(to_value((path.url(), tk)).unwrap()),\n\n ..Default::default()\n\n })\n\n }}\n\n match tk {\n\n TraceKind::Sort => ad.sort.and_then(|s| {\n\n let sd = &fe.sorts[s];\n\n done!(format!(\"{}\", sd), Class)\n\n }),\n\n TraceKind::Decl => ad.decl.and_then(|dk| match dk {\n\n DeclKey::Term(t) => {let td = &fe.terms[t]; done!(format!(\"{}\", fe.to(td)), Constructor)}\n\n DeclKey::Thm(t) => {let td = &fe.thms[t]; done!(format!(\"{}\", fe.to(td)), Method)}\n\n }),\n", "file_path": "mm0-rs/src/server.rs", "rank": 30, "score": 63591.9651693756 }, { "content": "fn elaborate_and_send(path: FileRef, send: FSender<((), Arc<Environment>)>) ->\n\n BoxFuture<'static, ()> {\n\n async {\n\n if let Ok(env) = elaborate(path).await {\n\n let _ = send.send(((), env));\n\n }\n\n }.boxed()\n\n}\n\n\n", "file_path": "mm0-rs/src/compiler.rs", "rank": 31, "score": 63167.52328277267 }, { "content": "struct Capabilities {\n\n definition_location_links: bool,\n\n}\n\n\n\nimpl Capabilities {\n\n fn new(params: &InitializeParams) -> Capabilities {\n\n Capabilities {\n\n definition_location_links: params.capabilities.text_document.as_ref()\n\n .and_then(|d| d.definition.as_ref())\n\n .and_then(|g| g.link_support).unwrap_or(false)\n\n }\n\n }\n\n}\n\n\n\nimpl Server {\n\n fn new() -> Result<Server> {\n\n let (conn, _iot) = Connection::stdio();\n\n let params = from_value(conn.initialize(\n\n to_value(ServerCapabilities {\n\n text_document_sync: Some(TextDocumentSyncCapability::Kind(TextDocumentSyncKind::Incremental)),\n", "file_path": "mm0-rs/src/server.rs", "rank": 32, "score": 62454.277305214666 }, { "content": "struct Server {\n\n conn: Connection,\n\n #[allow(unused)]\n\n params: InitializeParams,\n\n caps: Capabilities,\n\n reqs: OpenRequests,\n\n vfs: VFS,\n\n pool: ThreadPool,\n\n}\n\n\n", "file_path": "mm0-rs/src/server.rs", "rank": 33, "score": 62454.277305214666 }, { "content": "#define INDEX_KIND_LOCAL_DEF (u8)0x0D // This is a def that does not appear in the spec\n", "file_path": "mm0-c/types.c", "rank": 34, "score": 62110.62669098392 }, { "content": "#define CMD_STMT_LOCAL_DEF 0x0D\n", "file_path": "mm0-c/types.c", "rank": 35, "score": 62110.62669098392 }, { "content": "#define CMD_STMT_LOCAL_THM 0x0E\n\n\n", "file_path": "mm0-c/types.c", "rank": 36, "score": 62092.45878688687 }, { "content": "#define INDEX_KIND_LOCAL_THM (u8)0x0E // This is a theorem that does not appear in the spec\n\n\n", "file_path": "mm0-c/types.c", "rank": 37, "score": 62092.45878688687 }, { "content": "enum Value {\n\n U32(u32),\n\n U64(u64),\n\n Box(Box<[u8]>),\n\n}\n\n\n\nconst DATA_8: u8 = 0x40;\n\nconst DATA_16: u8 = 0x80;\n\nconst DATA_32: u8 = 0xC0;\n\n\n\nconst STMT_SORT: u8 = 0x04;\n\nconst STMT_AXIOM: u8 = 0x02;\n\nconst STMT_TERM: u8 = 0x05;\n\nconst STMT_DEF: u8 = 0x05;\n\nconst STMT_THM: u8 = 0x06;\n\nconst STMT_LOCAL: u8 = 0x08;\n\n\n\nconst PROOF_TERM: u8 = 0x10;\n\nconst PROOF_TERM_SAVE: u8 = 0x11;\n\nconst PROOF_REF: u8 = 0x12;\n", "file_path": "mm0-rs/src/export_mmb.rs", "rank": 38, "score": 61076.170844318374 }, { "content": "enum FileCache {\n\n InProgress(Vec<FSender<((), Arc<Environment>)>>),\n\n Ready(Arc<Environment>),\n\n}\n\n\n", "file_path": "mm0-rs/src/compiler.rs", "rank": 39, "score": 61076.170844318374 }, { "content": "enum FileCache {\n\n InProgress {\n\n version: Option<i64>,\n\n cancel: Arc<AtomicBool>,\n\n senders: Vec<FSender<(u64, Arc<Environment>)>>,\n\n },\n\n Ready {\n\n hash: u64,\n\n source: Arc<LinedString>,\n\n ast: Arc<AST>,\n\n errors: Vec<ElabError>,\n\n env: Arc<Environment>,\n\n deps: Vec<FileRef>,\n\n complete: bool,\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/src/server.rs", "rank": 40, "score": 61076.170844318374 }, { "content": "enum RequestType {\n\n Completion(CompletionParams),\n\n CompletionResolve(CompletionItem),\n\n Hover(TextDocumentPositionParams),\n\n Definition(TextDocumentPositionParams),\n\n DocumentSymbol(DocumentSymbolParams),\n\n}\n\n\n", "file_path": "mm0-rs/src/server.rs", "rank": 41, "score": 61076.170844318374 }, { "content": "enum ElabStmt {\n\n Ok,\n\n Import(Span),\n\n}\n\n\n\nimpl Elaborator {\n\n fn elab_stmt(&mut self, stmt: &Stmt) -> Result<ElabStmt> {\n\n self.cur_timeout = self.timeout.and_then(|d| Instant::now().checked_add(d));\n\n self.spans.set_stmt(stmt.span);\n\n match &stmt.k {\n\n &StmtKind::Sort(sp, sd) => {\n\n let a = self.env.get_atom(self.ast.span(sp));\n\n let fsp = self.fspan(sp);\n\n let id = self.add_sort(a, fsp, stmt.span, sd).map_err(|e| e.to_elab_error(sp))?;\n\n self.spans.insert(sp, ObjectKind::Sort(id));\n\n }\n\n StmtKind::Decl(d) => self.elab_decl(stmt.span, d)?,\n\n StmtKind::Delimiter(Delimiter::Both(f)) => self.pe.add_delimiters(f, f),\n\n StmtKind::Delimiter(Delimiter::LeftRight(ls, rs)) => self.pe.add_delimiters(ls, rs),\n\n StmtKind::SimpleNota(n) => self.elab_simple_nota(n)?,\n", "file_path": "mm0-rs/src/elab.rs", "rank": 42, "score": 61076.170844318374 }, { "content": "#[derive(Default)]\n\nstruct Remapper {\n\n sort: HashMap<SortID, SortID>,\n\n term: HashMap<TermID, TermID>,\n\n thm: HashMap<ThmID, ThmID>,\n\n atom: HashMap<AtomID, AtomID>,\n\n}\n\n\n", "file_path": "mm0-rs/src/elab/environment.rs", "rank": 43, "score": 60957.64637794887 }, { "content": "struct VirtualFile {\n\n text: Arc<LinedString>,\n\n /// File parse\n\n parsed: FMutex<Option<FileCache>>,\n\n}\n\n\n\nimpl VirtualFile {\n\n fn new(text: String) -> VirtualFile {\n\n VirtualFile {\n\n text: Arc::new(text.into()),\n\n parsed: FMutex::new(None),\n\n }\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/src/compiler.rs", "rank": 44, "score": 60957.64637794887 }, { "content": "struct RequestHandler {\n\n id: RequestId,\n\n #[allow(unused)]\n\n cancel: Arc<AtomicBool>,\n\n}\n\n\n\nimpl RequestHandler {\n\n async fn handle(self, req: RequestType) -> Result<()> {\n\n match req {\n\n RequestType::Hover(TextDocumentPositionParams {text_document: doc, position}) =>\n\n self.finish(hover(FileRef::from_url(doc.uri), position).await),\n\n RequestType::Definition(TextDocumentPositionParams {text_document: doc, position}) =>\n\n if SERVER.caps.definition_location_links {\n\n self.finish(definition(FileRef::from_url(doc.uri), position,\n\n |text, text2, src, &FileSpan {ref file, span}, full| LocationLink {\n\n origin_selection_range: Some(text.to_range(src)),\n\n target_uri: file.url().clone(),\n\n target_range: text2.to_range(full),\n\n target_selection_range: text2.to_range(span),\n\n }).await)\n", "file_path": "mm0-rs/src/server.rs", "rank": 45, "score": 60957.64637794887 }, { "content": "struct VirtualFile {\n\n /// File data, saved (true) or unsaved (false)\n\n text: Mutex<(Option<i64>, Arc<LinedString>)>,\n\n /// File parse\n\n parsed: FMutex<Option<FileCache>>,\n\n /// Files that depend on this one\n\n downstream: Mutex<HashSet<FileRef>>,\n\n}\n\n\n\nimpl VirtualFile {\n\n fn new(version: Option<i64>, text: String) -> VirtualFile {\n\n VirtualFile {\n\n text: Mutex::new((version, Arc::new(text.into()))),\n\n parsed: FMutex::new(None),\n\n downstream: Mutex::new(HashSet::new())\n\n }\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/src/server.rs", "rank": 46, "score": 60957.64637794887 }, { "content": "struct ReportMode {\n\n error: bool,\n\n warn: bool,\n\n info: bool,\n\n}\n\n\n\nimpl ReportMode {\n\n fn new() -> ReportMode {\n\n ReportMode {error: true, warn: true, info: true}\n\n }\n\n\n\n fn active(&self, lvl: ErrorLevel) -> bool {\n\n match lvl {\n\n ErrorLevel::Error => self.error,\n\n ErrorLevel::Warning => self.warn,\n\n ErrorLevel::Info => self.info,\n\n }\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/src/elab.rs", "rank": 47, "score": 60957.64637794887 }, { "content": "enum RefineExpr {\n\n App(Span, Span, InferMode, AtomID, Uncons),\n\n Typed(LispVal, LispVal),\n\n Exact(LispVal),\n\n Proc,\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum RStack {\n\n Goals {g: LispVal, gs: std::vec::IntoIter<LispVal>, es: std::vec::IntoIter<LispVal>},\n\n Coerce(LispVal),\n\n Typed(LispVal),\n\n RefineApp {sp2: Span, tgt: InferTarget, t: TermID, u: Uncons, args: Vec<LispVal>},\n\n RefineExtraArgs {sp: Span, tgt: LispVal, u: Uncons, head: LispVal, args: Vec<LispVal>},\n\n RefineBis {sp: Span, sp2: Span, tgt: LispVal, im: InferMode, t: ThmID, u: Uncons, args: Vec<LispVal>},\n\n RefineHyps {sp: Span, sp2: Span, tgt: LispVal, t: ThmID, u: Uncons, args: Vec<LispVal>,\n\n hyps: std::vec::IntoIter<LispVal>, res: RefineHypsResult},\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "mm0-rs/src/elab/tactic.rs", "rank": 48, "score": 59700.10325283551 }, { "content": "enum UnifyCmd {\n\n Term(TermID),\n\n TermSave(TermID),\n\n Ref(u32),\n\n Dummy(SortID),\n\n Hyp,\n\n}\n\n\n", "file_path": "mm0-rs/src/export_mmb.rs", "rank": 49, "score": 59700.10325283551 }, { "content": "enum ProofCmd {\n\n Term(TermID),\n\n TermSave(TermID),\n\n Ref(u32),\n\n Dummy(SortID),\n\n Thm(ThmID),\n\n ThmSave(ThmID),\n\n Hyp,\n\n Conv,\n\n Refl,\n\n Sym,\n\n Cong,\n\n Unfold,\n\n ConvCut,\n\n ConvRef(u32),\n\n ConvSave,\n\n Save,\n\n}\n\n\n", "file_path": "mm0-rs/src/export_mmb.rs", "rank": 50, "score": 59700.10325283551 }, { "content": "#[derive(Debug)]\n\nenum Stack<'a> {\n\n List(Span, Vec<LispVal>, std::slice::Iter<'a, IR>),\n\n DottedList(Vec<LispVal>, std::slice::Iter<'a, IR>, &'a IR),\n\n DottedList2(Vec<LispVal>),\n\n App(Span, Span, &'a [IR]),\n\n App2(Span, Span, LispVal, Vec<LispVal>, std::slice::Iter<'a, IR>),\n\n AppHead(Span, Span, LispVal),\n\n If(&'a IR, &'a IR),\n\n Def(Option<&'a Option<(Span, Span, AtomID)>>),\n\n Eval(std::slice::Iter<'a, IR>),\n\n Match(Span, std::slice::Iter<'a, Branch>),\n\n TestPattern(Span, LispVal, std::slice::Iter<'a, Branch>,\n\n &'a Branch, Vec<PatternStack<'a>>, Box<[LispVal]>),\n\n Drop(usize),\n\n Ret(FileSpan, ProcPos, Vec<LispVal>, Arc<IR>),\n\n MatchCont(Span, LispVal, std::slice::Iter<'a, Branch>, Arc<AtomicBool>),\n\n MapProc(Span, Span, LispVal, Box<[Uncons]>, Vec<LispVal>),\n\n AddThmProc(FileSpan, AwaitingProof),\n\n Refines(Span, Option<Span>, std::slice::Iter<'a, IR>),\n\n Refine {sp: Span, stack: Vec<RStack>, gv: Arc<Mutex<Vec<LispVal>>>},\n", "file_path": "mm0-rs/src/elab/lisp/eval.rs", "rank": 51, "score": 55421.197491654006 }, { "content": "#[derive(Debug)]\n\nenum State<'a> {\n\n Eval(&'a IR),\n\n Refines(Span, std::slice::Iter<'a, IR>),\n\n Ret(LispVal),\n\n List(Span, Vec<LispVal>, std::slice::Iter<'a, IR>),\n\n DottedList(Vec<LispVal>, std::slice::Iter<'a, IR>, &'a IR),\n\n App(Span, Span, LispVal, Vec<LispVal>, std::slice::Iter<'a, IR>),\n\n Match(Span, LispVal, std::slice::Iter<'a, Branch>),\n\n Pattern(Span, LispVal, std::slice::Iter<'a, Branch>,\n\n &'a Branch, Vec<PatternStack<'a>>, Box<[LispVal]>, PatternState<'a>),\n\n MapProc(Span, Span, LispVal, Box<[Uncons]>, Vec<LispVal>),\n\n Refine {sp: Span, stack: Vec<RStack>, state: RState, gv: Arc<Mutex<Vec<LispVal>>>},\n\n}\n\n\n\nimpl<'a> EnvDisplay for State<'a> {\n\n fn fmt(&self, fe: FormatEnv, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n match self {\n\n &State::Eval(ir) => write!(f, \"-> {}\", fe.to(ir)),\n\n State::Refines(_, irs) => write!(f, \"(refine {})\", fe.to(irs.as_slice())),\n\n State::Ret(e) => write!(f, \"<- {}\", fe.to(e)),\n", "file_path": "mm0-rs/src/elab/lisp/eval.rs", "rank": 52, "score": 55421.197491654006 }, { "content": "enum Item<'a> {\n\n List(&'a [SExpr]),\n\n DottedList(&'a [SExpr], &'a SExpr),\n\n}\n\n\n\nimpl<'a> LispParser<'a> {\n\n fn def_var<'c>(&mut self, mut e: &'c SExpr) -> Result<(Span, AtomID, Vec<Item<'c>>), ElabError> {\n\n let mut stack = vec![];\n\n loop {\n\n match &e.k {\n\n &SExprKind::Atom(a) => break Ok((e.span, self.parse_ident(e.span, a)?, stack)),\n\n SExprKind::List(xs) if !xs.is_empty() =>\n\n {stack.push(Item::List(&xs[1..])); e = &xs[0]}\n\n SExprKind::DottedList(xs, y) if !xs.is_empty() =>\n\n {stack.push(Item::DottedList(&xs[1..], y)); e = &xs[0]}\n\n _ => Err(ElabError::new_e(e.span, \"def: invalid spec\"))?\n\n }\n\n }\n\n }\n\n\n", "file_path": "mm0-rs/src/elab/lisp/parser.rs", "rank": 53, "score": 55416.46031098996 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nstruct PP<'a> {\n\n left: bool,\n\n right: bool,\n\n small: bool,\n\n doc: RefDoc<'a, ()>,\n\n}\n\n\n\nimpl<'a> PP<'a> {\n\n fn token(alloc: &'a Arena<'a, ()>, env: &Environment, tk: &'a str) -> PP<'a> {\n\n PP {\n\n // A right delimiter like ')' has a token boundary on its left side,\n\n // and vice versa. This ensures that `x ( y ) z` gets notated as `x (y) z`\n\n left: env.pe.delims_r.get(*tk.as_bytes().first().unwrap()),\n\n right: env.pe.delims_l.get(*tk.as_bytes().last().unwrap()),\n\n small: true,\n\n doc: alloc.alloc(Doc::text(tk)),\n\n }\n\n }\n\n\n\n fn word(alloc: &'a Arena<'a, ()>, data: impl Into<Cow<'a, str>>) -> PP<'a> {\n", "file_path": "mm0-rs/src/elab/lisp/pretty.rs", "rank": 54, "score": 55326.380448294396 }, { "content": "struct Evaluator<'a> {\n\n elab: &'a mut Elaborator,\n\n ctx: Vec<LispVal>,\n\n file: FileRef,\n\n orig_span: Span,\n\n stack: Vec<Stack<'a>>,\n\n}\n\nimpl<'a> Deref for Evaluator<'a> {\n\n type Target = Elaborator;\n\n fn deref(&self) -> &Elaborator { self.elab }\n\n}\n\nimpl<'a> DerefMut for Evaluator<'a> {\n\n fn deref_mut(&mut self) -> &mut Elaborator { self.elab }\n\n}\n\n\n\nimpl<'a> Evaluator<'a> {\n\n fn new(elab: &'a mut Elaborator, orig_span: Span) -> Evaluator<'a> {\n\n let file = elab.path.clone();\n\n Evaluator {elab, ctx: vec![], file, orig_span, stack: vec![]}\n\n }\n", "file_path": "mm0-rs/src/elab/lisp/eval.rs", "rank": 55, "score": 55312.70914432831 }, { "content": "struct IndexHeader<'a> {\n\n sorts: &'a mut [[u8; 8]],\n\n terms: &'a mut [[u8; 8]],\n\n thms: &'a mut [[u8; 8]]\n\n}\n\n\n\nimpl<'a> IndexHeader<'a> {\n\n fn sort(&mut self, i: SortID) -> &mut [u8; 8] { &mut self.sorts[i.0 as usize] }\n\n fn term(&mut self, i: TermID) -> &mut [u8; 8] { &mut self.terms[i.0 as usize] }\n\n fn thm(&mut self, i: ThmID) -> &mut [u8; 8] { &mut self.thms[i.0 as usize] }\n\n}\n\n\n\npub struct Exporter<'a, W: Write + Seek + ?Sized> {\n\n file: FileRef,\n\n source: &'a LinedString,\n\n env: &'a Environment,\n\n w: &'a mut W,\n\n pos: u64,\n\n term_reord: TermVec<Option<Reorder>>,\n\n fixups: Vec<(u64, Value)>,\n", "file_path": "mm0-rs/src/export_mmb.rs", "rank": 56, "score": 55312.70914432831 }, { "content": "#[derive(Debug)]\n\nenum PatternState<'a> {\n\n Eval(&'a Pattern, LispVal),\n\n Ret(bool),\n\n List(Uncons, std::slice::Iter<'a, Pattern>, Dot<'a>),\n\n Binary(bool, bool, LispVal, std::slice::Iter<'a, Pattern>),\n\n}\n\n\n\nimpl<'a> EnvDisplay for PatternState<'a> {\n\n fn fmt(&self, fe: FormatEnv, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n match self {\n\n &PatternState::Eval(p, ref e) => write!(f, \"{} := {}\", fe.to(p), fe.to(e)),\n\n &PatternState::Ret(e) => write!(f, \"<- {}\", e),\n\n PatternState::List(u, ps, Dot::List(None)) => write!(f, \"({}) := {}\",\n\n fe.to(ps.as_slice()), fe.to(u)),\n\n PatternState::List(u, ps, Dot::List(Some(0))) => write!(f, \"({} ...) := {}\",\n\n fe.to(ps.as_slice()), fe.to(u)),\n\n PatternState::List(u, ps, Dot::List(Some(n))) => write!(f, \"({} __ {}) := {}\",\n\n fe.to(ps.as_slice()), n, fe.to(u)),\n\n &PatternState::List(ref u, ref ps, Dot::DottedList(r)) => write!(f, \"({} . {}) := {}\",\n\n fe.to(ps.as_slice()), fe.to(r), fe.to(u)),\n\n PatternState::Binary(false, false, e, ps) => write!(f, \"(and {}) := {}\", fe.to(ps.as_slice()), fe.to(e)),\n\n PatternState::Binary(true, true, e, ps) => write!(f, \"(or {}) := {}\", fe.to(ps.as_slice()), fe.to(e)),\n\n PatternState::Binary(true, false, e, ps) => write!(f, \"(not {}) := {}\", fe.to(ps.as_slice()), fe.to(e)),\n\n PatternState::Binary(false, true, e, ps) => write!(f, \"(nor {}) := {}\", fe.to(ps.as_slice()), fe.to(e)),\n\n }\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/src/elab/lisp/eval.rs", "rank": 57, "score": 54152.82926825763 }, { "content": "#[derive(Debug)]\n\nenum PatternStack<'a> {\n\n Bool(&'a Pattern, bool),\n\n List(Uncons, std::slice::Iter<'a, Pattern>, Dot<'a>),\n\n Binary(bool, bool, LispVal, std::slice::Iter<'a, Pattern>),\n\n}\n\n\n", "file_path": "mm0-rs/src/elab/lisp/eval.rs", "rank": 58, "score": 54152.82926825763 }, { "content": "#[derive(Debug)]\n\nstruct ServerError(BoxError);\n\n\n", "file_path": "mm0-rs/src/server.rs", "rank": 59, "score": 54050.31561728871 }, { "content": "struct MathParser<'a> {\n\n p: Parser<'a>,\n\n pe: &'a ParserEnv,\n\n spans: &'a mut Spans<ObjectKind>,\n\n}\n\nimpl<'a> Deref for MathParser<'a> {\n\n type Target = Parser<'a>;\n\n fn deref(&self) -> &Parser<'a> { &self.p }\n\n}\n\nimpl<'a> DerefMut for MathParser<'a> {\n\n fn deref_mut(&mut self) -> &mut Parser<'a> { &mut self.p }\n\n}\n\n\n\nimpl<'a> MathParser<'a> {\n\n fn ws(&mut self) {\n\n loop {\n\n match self.cur() {\n\n b' ' | b'\\n' => self.idx += 1,\n\n _ => return\n\n }\n", "file_path": "mm0-rs/src/elab/math_parser.rs", "rank": 60, "score": 54045.57843662467 }, { "content": "struct LispParser<'a> {\n\n elab: &'a mut Elaborator,\n\n ctx: LocalCtx,\n\n}\n\nimpl<'a> Deref for LispParser<'a> {\n\n type Target = Elaborator;\n\n fn deref(&self) -> &Elaborator { self.elab }\n\n}\n\nimpl<'a> DerefMut for LispParser<'a> {\n\n fn deref_mut(&mut self) -> &mut Elaborator { self.elab }\n\n}\n\n\n", "file_path": "mm0-rs/src/elab/lisp/parser.rs", "rank": 61, "score": 54045.57843662467 }, { "content": "fn main() -> io::Result<()> {\n\n let mut args = env::args().skip(1);\n\n match args.next().expect(\"expected a subcommand\").as_str() {\n\n \"server\" => Ok(server::main(args)),\n\n \"compile\" => Ok(compiler::main(args)?),\n\n _ => panic!(\"incorrect subcommand, expected {server}\")\n\n }\n\n}\n", "file_path": "mm0-rs/src/main.rs", "rank": 62, "score": 51950.78475438735 }, { "content": "pub trait Remap<R> {\n\n fn remap(&self, r: &mut R) -> Self;\n\n}\n\nimpl Remap<Remapper> for SortID {\n\n fn remap(&self, r: &mut Remapper) -> Self { *r.sort.get(self).unwrap_or(self) }\n\n}\n\nimpl Remap<Remapper> for TermID {\n\n fn remap(&self, r: &mut Remapper) -> Self { *r.term.get(self).unwrap_or(self) }\n\n}\n\nimpl Remap<Remapper> for ThmID {\n\n fn remap(&self, r: &mut Remapper) -> Self { *r.thm.get(self).unwrap_or(self) }\n\n}\n\nimpl Remap<Remapper> for AtomID {\n\n fn remap(&self, r: &mut Remapper) -> Self { *r.atom.get(self).unwrap_or(self) }\n\n}\n\nimpl<R> Remap<R> for String {\n\n fn remap(&self, _: &mut R) -> Self { self.clone() }\n\n}\n\nimpl<R, A: Remap<R>, B: Remap<R>> Remap<R> for (A, B) {\n\n fn remap(&self, r: &mut R) -> Self { (self.0.remap(r), self.1.remap(r)) }\n", "file_path": "mm0-rs/src/elab/environment.rs", "rank": 63, "score": 51026.84471790713 }, { "content": "struct Reorder<T=u32> {\n\n map: Box<[Option<T>]>,\n\n idx: u32,\n\n}\n\n\n\nimpl<T: Clone> Reorder<T> {\n\n fn new(nargs: u32, len: usize, mut f: impl FnMut(u32) -> T) -> Reorder<T> {\n\n let mut map: Box<[Option<T>]> = vec![None; len].into();\n\n for i in 0..nargs {map[i as usize] = Some(f(i))}\n\n Reorder {map, idx: nargs}\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/src/export_mmb.rs", "rank": 64, "score": 50934.117258578786 }, { "content": "fn elaborate_and_send(path: FileRef,\n\n cancel: Arc<AtomicBool>, send: FSender<(u64, Arc<Environment>)>) ->\n\n BoxFuture<'static, ()> {\n\n async {\n\n if let Ok(env) = elaborate(path, Some(Position::default()), cancel).await {\n\n let _ = send.send(env);\n\n }\n\n }.boxed()\n\n}\n\n\n", "file_path": "mm0-rs/src/server.rs", "rank": 65, "score": 49518.2232772391 }, { "content": "#[derive(Serialize_repr, Deserialize_repr)]\n\n#[repr(u8)]\n\nenum TraceKind {Sort, Decl, Global}\n\n\n", "file_path": "mm0-rs/src/server.rs", "rank": 66, "score": 47482.99613030489 }, { "content": "#[allow(unused)]\n\nfn log_message(message: String) -> Result<()> {\n\n send_message(Notification {\n\n method: \"window/logMessage\".to_owned(),\n\n params: to_value(LogMessageParams {typ: MessageType::Log, message})?\n\n })\n\n}\n\n\n", "file_path": "mm0-rs/src/server.rs", "rank": 67, "score": 47170.199407726235 }, { "content": "fn alphanumber(n: usize) -> String {\n\n let mut out = Vec::with_capacity(2);\n\n let mut n = n + 1;\n\n while n != 0 {\n\n out.push(b'a' + ((n - 1) % 26) as u8);\n\n n = (n - 1) / 26;\n\n }\n\n out.reverse();\n\n unsafe { String::from_utf8_unchecked(out) }\n\n}\n\n\n\nimpl EnvDisplay for AtomID {\n\n fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result {\n\n fe.data[*self].name.fmt(f)\n\n }\n\n}\n\nimpl EnvDisplay for Option<AtomID> {\n\n fn fmt(&self, fe: FormatEnv, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n None => \"_\".fmt(f),\n", "file_path": "mm0-rs/src/elab/lisp/print.rs", "rank": 68, "score": 47170.199407726235 }, { "content": "fn make_relative(buf: &PathBuf) -> String {\n\n pathdiff::diff_paths(buf, &CURRENT_DIR).as_ref().unwrap_or(buf)\n\n .to_str().unwrap().to_owned()\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct FileRef(Arc<(PathBuf, String, Url)>);\n\nimpl FileRef {\n\n pub fn new(buf: PathBuf) -> FileRef {\n\n let u = Url::from_file_path(&buf).expect(\"bad file path\");\n\n let rel = make_relative(&buf);\n\n FileRef(Arc::new((buf, rel, u)))\n\n }\n\n pub fn from_url(url: Url) -> FileRef {\n\n let buf = url.to_file_path().expect(\"bad URL\");\n\n let rel = make_relative(&buf);\n\n FileRef(Arc::new((buf, rel, url)))\n\n }\n\n pub fn path(&self) -> &PathBuf { &self.0 .0 }\n\n pub fn rel(&self) -> &str { &self.0 .1 }\n", "file_path": "mm0-rs/src/util.rs", "rank": 69, "score": 46086.34445149824 }, { "content": "fn nos_id(nos: NumberOrString) -> RequestId {\n\n match nos {\n\n NumberOrString::Number(n) => n.into(),\n\n NumberOrString::String(s) => s.into(),\n\n }\n\n}\n\n\n\nlazy_static! {\n\n static ref LOGGER: (Mutex<Vec<(Instant, ThreadId, String)>>, Condvar) = Default::default();\n\n static ref SERVER: Server = Server::new().expect(\"Initialization failed\");\n\n}\n", "file_path": "mm0-rs/src/server.rs", "rank": 70, "score": 45078.4060082503 }, { "content": "u32 debug_cmd_unpack(u8* cmd, u32* data_out) {\n\n if (!cmd) {\n\n fprintf(stderr, \"null\");\n\n return 0;\n\n }\n\n if (cmd < g_file || cmd + CMD_MAX_SIZE > g_end) {\n\n fprintf(stderr, \"%lX: command out of range\", cmd - g_file);\n\n return 0;\n\n }\n\n switch (CMD_DATA(*cmd)) {\n\n case CMD_DATA_0: {\n\n *data_out = 0;\n\n return sizeof(cmd0);\n\n } break;\n\n\n\n case CMD_DATA_8: {\n\n cmd8* p = (cmd8*)cmd;\n\n *data_out = p->data;\n\n return sizeof(cmd8);\n\n } break;\n\n\n\n case CMD_DATA_16: {\n\n cmd16* p = (cmd16*)cmd;\n\n *data_out = p->data;\n\n return sizeof(cmd16);\n\n } break;\n\n\n\n case CMD_DATA_32: {\n\n cmd32* p = (cmd32*)cmd;\n\n *data_out = p->data;\n\n return sizeof(cmd32);\n\n } break;\n\n }\n\n UNREACHABLE();\n", "file_path": "mm0-c/verifier_debug.c", "rank": 71, "score": 42456.57977353649 }, { "content": "void debug_print_cmd(u8* cmd, u32 data) {\n\n if (!cmd) return;\n\n u64 pos = cmd - g_file;\n\n if (cmd < g_file || cmd + CMD_MAX_SIZE > g_end) {\n\n fprintf(stderr, \"%lX: command out of range\", pos);\n\n return;\n\n }\n\n switch (*cmd & 0x3F) {\n\n case CMD_END: fprintf(stderr, \"%lX: End\", pos); break;\n\n\n\n case CMD_PROOF_REF: {\n\n fprintf(stderr, \"%lX: Ref %d\", pos, data);\n\n if (data < g_heap_size) {\n\n fprintf(stderr, \" // = \");\n\n debug_print_stackel(&g_heap[data]);\n\n }\n\n } break;\n\n\n\n case CMD_PROOF_TERM:\n\n case CMD_PROOF_TERM_SAVE: {\n\n fprintf(stderr, \"%lX: Term %d\", pos, data);\n\n index* ix;\n\n if (data < g_num_terms && (ix = lookup_term(data))) {\n\n fprintf(stderr, \" // = %s\", ix->value);\n\n }\n\n if (*cmd & 0x01) fprintf(stderr, \"\\n %lX: Save\", pos);\n\n } break;\n\n\n\n case CMD_PROOF_DUMMY: {\n\n fprintf(stderr, \"%lX: Dummy %d\", pos, data);\n\n index* ix;\n\n if (data < g_num_sorts && (ix = lookup_sort(data))) {\n\n fprintf(stderr, \" // = %s\", ix->value);\n\n }\n\n } break;\n\n\n\n case CMD_PROOF_THM:\n\n case CMD_PROOF_THM_SAVE: {\n\n fprintf(stderr, \"%lX: Thm %d\", pos, data);\n\n index* ix;\n\n if (data < g_num_thms && (ix = lookup_thm(data))) {\n\n fprintf(stderr, \" // = %s\", ix->value);\n\n }\n\n if (*cmd & 0x01) fprintf(stderr, \"\\n%lX: Save\", pos);\n\n } break;\n\n\n\n case CMD_PROOF_HYP: fprintf(stderr, \"%lX: Hyp\", pos); break;\n\n case CMD_PROOF_CONV: fprintf(stderr, \"%lX: Conv\", pos); break;\n\n case CMD_PROOF_REFL: fprintf(stderr, \"%lX: Refl\", pos); break;\n\n case CMD_PROOF_SYMM: fprintf(stderr, \"%lX: Symm\", pos); break;\n\n case CMD_PROOF_CONG: fprintf(stderr, \"%lX: Cong\", pos); break;\n\n case CMD_PROOF_UNFOLD: fprintf(stderr, \"%lX: Unfold\", pos); break;\n\n case CMD_PROOF_CONV_CUT: fprintf(stderr, \"%lX: ConvCut\", pos); break;\n\n\n\n case CMD_PROOF_CONV_REF: {\n\n fprintf(stderr, \"%lX: ConvRef %d\", pos, data);\n\n if (data < g_heap_size) {\n\n fprintf(stderr, \" // = \");\n\n debug_print_stackel(&g_heap[data]);\n\n }\n\n } break;\n\n\n\n case CMD_PROOF_CONV_SAVE: fprintf(stderr, \"%lX: ConvSave\", pos); break;\n\n case CMD_PROOF_SAVE: fprintf(stderr, \"%lX: Save\", pos); break;\n\n\n\n case CMD_UNIFY_REF: {\n\n fprintf(stderr, \"%lX: URef %d\", pos, data);\n\n if (data < g_uheap_size) {\n\n fprintf(stderr, \" // = \");\n\n debug_print_expr(g_uheap[data], true);\n\n }\n\n } break;\n\n\n\n case CMD_UNIFY_TERM:\n\n case CMD_UNIFY_TERM_SAVE: {\n\n fprintf(stderr, \"%lX: UTerm %d\", pos, data);\n\n index* ix;\n\n if (data < g_num_terms && (ix = lookup_term(data))) {\n\n fprintf(stderr, \" // = %s\", ix->value);\n\n }\n\n if (*cmd & 0x01) fprintf(stderr, \"\\n%lX: Save\", pos);\n\n } break;\n\n\n\n case CMD_UNIFY_DUMMY: {\n\n fprintf(stderr, \"%lX: UDummy\", pos);\n\n } break;\n\n\n\n case CMD_UNIFY_HYP: {\n\n fprintf(stderr, \"%lX: UHyp\", pos);\n\n } break;\n\n\n\n default: {\n\n fprintf(stderr, \"%lX: ?%02X\", pos, *cmd);\n\n } break;\n\n }\n\n fprintf(stderr, \"\\n\");\n", "file_path": "mm0-c/verifier_debug.c", "rank": 72, "score": 42456.57977353649 }, { "content": "void debug_print_cmds(u8* cmd, u8* stop) {\n\n if (!cmd) return;\n\n if (cmd < g_file || cmd + CMD_MAX_SIZE > g_end) {\n\n fprintf(stderr, \"%lX: command out of range\", cmd - g_file);\n\n return;\n\n }\n\n while (*cmd != CMD_END) {\n\n u32 data;\n\n u32 sz = debug_cmd_unpack(cmd, &data);\n\n if (!sz) return;\n\n if (cmd == stop) fprintf(stderr, \"> \");\n\n else fprintf(stderr, \" \");\n\n debug_print_cmd(cmd, data);\n\n // if (cmd == stop) return;\n\n cmd += sz;\n\n }\n", "file_path": "mm0-c/verifier_debug.c", "rank": 73, "score": 42456.57977353649 }, { "content": "void debug_print_stack() {\n\n fprintf(stderr, \"stack:\\n\");\n\n for (u32* p = g_stack_top - 1; p >= g_stack; p -= debug_stackel_size(p)) {\n\n debug_print_stackel(p);\n\n fprintf(stderr, \"\\n\");\n\n }\n", "file_path": "mm0-c/verifier_debug.c", "rank": 74, "score": 42456.57977353649 }, { "content": "void debug_print_expr(u32 n, bool type) {\n\n if (n % 4 != 0) {fprintf(stderr, \"unaligned expr\"); return;}\n\n if (n >= g_store_size) {fprintf(stderr, \"expr out of range\"); return;}\n\n store_expr* p = (store_expr*)&g_store[n];\n\n bool bound = (p->type & TYPE_BOUND_MASK) != 0;\n\n if (type && bound) fprintf(stderr, \"{\");\n\n switch (((store_expr*)&g_store[n])->tag) {\n\n case EXPR_VAR: {\n\n fprintf(stderr, \"v%d\", ((store_var*)p)->var);\n\n } break;\n\n\n\n case EXPR_TERM: {\n\n store_term* t = (store_term*)p;\n\n index* ix = lookup_term(t->termid);\n\n if (ix) fprintf(stderr, \"(%s\", ix->value);\n\n else fprintf(stderr, \"(t%d\", t->termid);\n\n for (int i = 0; i < t->num_args; i++) {\n\n fprintf(stderr, \" \");\n\n debug_print_expr(t->args[i], false);\n\n }\n\n fprintf(stderr, \")\");\n\n } break;\n\n\n\n case EXPR_CONV: {\n\n store_conv* c = (store_conv*)p;\n\n debug_print_expr(c->e1, false);\n\n fprintf(stderr, \" = \");\n\n debug_print_expr(c->e2, false);\n\n } break;\n\n\n\n default: fprintf(stderr, \"?\"); break;\n\n }\n\n index* ix;\n\n if (type && (ix = lookup_sort((p->type >> 56) & 0x7F))) {\n\n fprintf(stderr, \":%s\", ix->value);\n\n }\n\n if (type && bound) fprintf(stderr, \"}\");\n", "file_path": "mm0-c/verifier_debug.c", "rank": 75, "score": 42456.57977353649 }, { "content": "void debug_print_heap() {\n\n fprintf(stderr, \"heap:\\n\");\n\n for (int i = 0; i < g_heap_size; i++) {\n\n fprintf(stderr, \"%d: \", i); debug_print_stackel(&g_heap[i]); fprintf(stderr, \"\\n\");\n\n }\n", "file_path": "mm0-c/verifier_debug.c", "rank": 76, "score": 42456.57977353649 }, { "content": "u32 debug_stackel_size(u32* p) {\n\n switch (*p & STACK_TYPE_MASK) {\n\n case STACK_TYPE_EXPR: return 1;\n\n case STACK_TYPE_PROOF: return 1;\n\n case STACK_TYPE_CONV: return 2;\n\n case STACK_TYPE_CO_CONV: return 2;\n\n default: UNREACHABLE();\n\n }\n", "file_path": "mm0-c/verifier_debug.c", "rank": 77, "score": 42456.57977353649 }, { "content": "void debug_print_stackel(u32* p) {\n\n switch (*p & STACK_TYPE_MASK) {\n\n case STACK_TYPE_EXPR: {\n\n fprintf(stderr, \"expr \");\n\n debug_print_expr(*p & STACK_DATA_MASK, true);\n\n } break;\n\n case STACK_TYPE_PROOF: {\n\n fprintf(stderr, \"proof \");\n\n debug_print_expr(*p & STACK_DATA_MASK, true);\n\n } break;\n\n case STACK_TYPE_CONV: {\n\n debug_print_expr(*p & STACK_DATA_MASK, false);\n\n fprintf(stderr, \" = \");\n\n debug_print_expr(*(p-1) & STACK_DATA_MASK, true);\n\n } break;\n\n case STACK_TYPE_CO_CONV: {\n\n debug_print_expr(*p & STACK_DATA_MASK, false);\n\n fprintf(stderr, \" =?= \");\n\n debug_print_expr(*(p-1) & STACK_DATA_MASK, true);\n\n } break;\n\n default: UNREACHABLE();\n\n }\n", "file_path": "mm0-c/verifier_debug.c", "rank": 78, "score": 42456.57977353649 }, { "content": "void debug_print_uheap() {\n\n fprintf(stderr, \"uheap:\\n\");\n\n for (int i = 0; i < g_uheap_size; i++) {\n\n fprintf(stderr, \"%d: \", i);\n\n debug_print_expr(g_uheap[i], true);\n\n fprintf(stderr, \"\\n\");\n\n }\n", "file_path": "mm0-c/verifier_debug.c", "rank": 79, "score": 42456.57977353649 }, { "content": "void debug_print_ustack() {\n\n fprintf(stderr, \"ustack:\\n\");\n\n for (u32* p = g_ustack_top - 1; p >= g_ustack; p--) {\n\n debug_print_expr(*p, true);\n\n fprintf(stderr, \"\\n\");\n\n }\n", "file_path": "mm0-c/verifier_debug.c", "rank": 80, "score": 42456.57977353649 }, { "content": "fn dep_change(path: FileRef) -> BoxFuture<'static, ()> {\n\n elaborate_and_report(path, None, Arc::new(AtomicBool::new(false))).boxed()\n\n}\n\n\n", "file_path": "mm0-rs/src/server.rs", "rank": 81, "score": 42291.81320125629 }, { "content": "fn send_message<T: Into<Message>>(t: T) -> Result<()> {\n\n Ok(SERVER.conn.sender.send(t.into())?)\n\n}\n\n\n", "file_path": "mm0-rs/src/server.rs", "rank": 82, "score": 41520.317578211005 }, { "content": "#[allow(unused)]\n\nfn show_message(typ: MessageType, message: String) -> Result<()> {\n\n send_message(Notification {\n\n method: \"window/showMessage\".to_owned(),\n\n params: to_value(ShowMessageParams {typ, message})?\n\n })\n\n}\n\n\n", "file_path": "mm0-rs/src/server.rs", "rank": 83, "score": 40801.12961137103 }, { "content": "struct VFS(Mutex<HashMap<FileRef, Arc<VirtualFile>>>);\n\n\n\nimpl VFS {\n\n fn get_or_insert(&self, path: FileRef) -> io::Result<(FileRef, Arc<VirtualFile>)> {\n\n match self.0.lock().unwrap().entry(path) {\n\n Entry::Occupied(e) => Ok((e.key().clone(), e.get().clone())),\n\n Entry::Vacant(e) => {\n\n let path = e.key().clone();\n\n let s = fs::read_to_string(path.path())?;\n\n let val = e.insert(Arc::new(VirtualFile::new(s))).clone();\n\n Ok((path, val))\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl ElabErrorKind {\n\n pub fn to_footer(&self, mut to_range: impl FnMut(&FileSpan) -> Range) -> Vec<Annotation> {\n\n match self {\n\n ElabErrorKind::Boxed(_, Some(info)) =>\n", "file_path": "mm0-rs/src/compiler.rs", "rank": 84, "score": 40108.646040384556 }, { "content": "struct VFS(Mutex<HashMap<FileRef, Arc<VirtualFile>>>);\n\n\n\nimpl VFS {\n\n fn get(&self, path: &FileRef) -> Option<Arc<VirtualFile>> {\n\n self.0.lock().unwrap().get(path).cloned()\n\n }\n\n\n\n fn get_or_insert(&self, path: FileRef) -> io::Result<(FileRef, Arc<VirtualFile>)> {\n\n match self.0.lock().unwrap().entry(path) {\n\n Entry::Occupied(e) => Ok((e.key().clone(), e.get().clone())),\n\n Entry::Vacant(e) => {\n\n let path = e.key().clone();\n\n let s = fs::read_to_string(path.path())?;\n\n let val = e.insert(Arc::new(VirtualFile::new(None, s))).clone();\n\n Ok((path, val))\n\n }\n\n }\n\n }\n\n\n\n fn source(&self, file: &FileRef) -> Arc<LinedString> {\n", "file_path": "mm0-rs/src/server.rs", "rank": 85, "score": 40108.646040384556 }, { "content": "fn covariant<'a>(from: RefDoc<'static, ()>) -> RefDoc<'a, ()> {\n\n unsafe {mem::transmute(from)}\n\n}\n\n\n\nimpl<'a> Pretty<'a> {\n\n pub fn nil() -> RefDoc<'a, ()> {covariant(NIL)}\n\n // fn hardline() -> RefDoc<'a, ()> {covariant(HARDLINE)}\n\n // fn space() -> RefDoc<'a, ()> {covariant(SPACE)}\n\n fn line() -> RefDoc<'a, ()> {covariant(LINE)}\n\n // fn line_() -> RefDoc<'a, ()> {covariant(LINE_)}\n\n fn softline() -> RefDoc<'a, ()> {covariant(SOFTLINE)}\n\n fn softline_() -> RefDoc<'a, ()> {covariant(SOFTLINE_)}\n\n\n\n fn new(fe: FormatEnv<'a>, alloc: &'a Arena<'a, ()>) -> Pretty<'a> {\n\n Pretty {\n\n lparen: PP::token(&alloc, fe.env, \"(\"),\n\n rparen: PP::token(&alloc, fe.env, \")\"),\n\n fe, alloc, hash: RefCell::new(HashMap::new())\n\n }\n\n }\n", "file_path": "mm0-rs/src/elab/lisp/pretty.rs", "rank": 86, "score": 39572.65095260566 }, { "content": "fn send_diagnostics(uri: Url, diagnostics: Vec<Diagnostic>) -> Result<()> {\n\n send_message(Notification {\n\n method: \"textDocument/publishDiagnostics\".to_owned(),\n\n params: to_value(PublishDiagnosticsParams {uri, diagnostics})?\n\n })\n\n}\n\n\n", "file_path": "mm0-rs/src/server.rs", "rank": 87, "score": 39572.65095260566 }, { "content": "#[derive(Debug)]\n\nenum Dot<'a> { List(Option<usize>), DottedList(&'a Pattern) }\n", "file_path": "mm0-rs/src/elab/lisp/eval.rs", "rank": 88, "score": 37898.37191686414 }, { "content": "fn parse_request(req: Request) -> Result<Option<(RequestId, RequestType)>> {\n\n let Request {id, method, params} = req;\n\n match method.as_str() {\n\n \"textDocument/completion\" => Ok(Some((id, RequestType::Completion(from_value(params)?)))),\n\n \"textDocument/hover\" => Ok(Some((id, RequestType::Hover(from_value(params)?)))),\n\n \"textDocument/definition\" => Ok(Some((id, RequestType::Definition(from_value(params)?)))),\n\n \"textDocument/documentSymbol\" => Ok(Some((id, RequestType::DocumentSymbol(from_value(params)?)))),\n\n \"completionItem/resolve\" => Ok(Some((id, RequestType::CompletionResolve(from_value(params)?)))),\n\n _ => Ok(None)\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/src/server.rs", "rank": 89, "score": 37871.88486806757 }, { "content": "use std::mem::MaybeUninit;\n\nuse std::collections::BTreeMap;\n\nuse super::environment::AtomID;\n\nuse super::local_context::LocalContext;\n\nuse crate::util::*;\n\n\n\npub struct Spans<T> {\n\n stmt: MaybeUninit<Span>,\n\n decl: MaybeUninit<AtomID>,\n\n pub lc: Option<LocalContext>,\n\n data: BTreeMap<usize, Vec<(Span, T)>>,\n\n}\n\n\n\nuse std::fmt;\n\nimpl<T: fmt::Debug> fmt::Debug for Spans<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{{ stmt: {:?},\\n data: {:?} }}\", self.stmt(), self.data)\n\n }\n\n}\n\n\n", "file_path": "mm0-rs/src/elab/spans.rs", "rank": 90, "score": 35358.23563299323 }, { "content": " v.push((sp, val));\n\n &mut v.last_mut().unwrap().1\n\n }\n\n pub fn insert_if(&mut self, sp: Span, val: impl FnOnce() -> T) {\n\n if sp.start >= self.stmt().start {\n\n self.insert(sp, val());\n\n }\n\n }\n\n\n\n pub fn _get(&self, sp: Span) -> Option<&T> {\n\n self.data.get(&sp.start).and_then(|v|\n\n v.iter().find(|x| x.0 == sp).map(|x| &x.1))\n\n }\n\n pub fn _get_mut(&mut self, sp: Span) -> Option<&mut T> {\n\n self.data.get_mut(&sp.start).and_then(|v|\n\n v.iter_mut().find(|x| x.0 == sp).map(|x| &mut x.1))\n\n }\n\n\n\n pub fn find_pos(&self, pos: usize) -> Vec<&(Span, T)> {\n\n if let Some((_, v)) = self.data.range(..=pos).rev().next() {\n\n v.iter().filter(|x| pos < x.0.end).collect()\n\n } else {vec![]}\n\n }\n\n}\n", "file_path": "mm0-rs/src/elab/spans.rs", "rank": 91, "score": 35351.667004059134 }, { "content": "impl<T> Spans<T> {\n\n pub fn new() -> Spans<T> {\n\n Spans {\n\n stmt: MaybeUninit::uninit(),\n\n decl: MaybeUninit::uninit(),\n\n lc: None,\n\n data: BTreeMap::new()\n\n }\n\n }\n\n pub fn set_stmt(&mut self, sp: Span) { self.stmt = MaybeUninit::new(sp) }\n\n pub fn set_decl(&mut self, a: AtomID) { self.decl = MaybeUninit::new(a) }\n\n pub fn stmt(&self) -> Span { unsafe { self.stmt.assume_init() } }\n\n pub fn _decl(&self) -> AtomID { unsafe { self.decl.assume_init() } }\n\n pub fn insert(&mut self, sp: Span, val: T) -> &mut T {\n\n let v = self.data.entry(sp.start).or_default();\n\n for (sp1, k) in &mut *v {\n\n if sp == *sp1 {return unsafe {&mut *(k as *mut T)}}\n\n }\n\n // the unsafe above is needed because NLL support is not all there,\n\n // and this looks like a double borrow of `*v`\n", "file_path": "mm0-rs/src/elab/spans.rs", "rank": 92, "score": 35351.40702507772 }, { "content": " /* term* */ u32 p_terms; // pointer to start of term table\n", "file_path": "mm0-c/types.c", "rank": 93, "score": 35297.39685258107 }, { "content": "u32 g_num_terms; term* g_terms;\n", "file_path": "mm0-c/index.c", "rank": 94, "score": 35292.73386510587 }, { "content": "#define SORT_FREE 8\n\n\n", "file_path": "mm0-c/types.c", "rank": 95, "score": 33887.22632223946 }, { "content": "#define SORT_PURE 1\n", "file_path": "mm0-c/types.c", "rank": 96, "score": 33887.22632223946 }, { "content": "#define SORT_PROVABLE 4\n", "file_path": "mm0-c/types.c", "rank": 97, "score": 33887.22632223946 }, { "content": "#define SORT_STRICT 2\n", "file_path": "mm0-c/types.c", "rank": 98, "score": 33887.22632223946 }, { "content": "index* lookup_thm(u32 thm) {\n\n if (!init_index()) return 0;\n\n if (thm < gi_header->num_thms) {\n\n u8* p = &g_file[gi_thms[thm]];\n\n if (p < g_end) return (index*)p;\n\n }\n\n return 0;\n", "file_path": "mm0-c/index.c", "rank": 99, "score": 33846.73717295179 } ]
Rust
farms/farm-client/tests/vault_actions/mod.rs
biw/solana-program-library
5611ad8bd595d9e3666f8b115cd28f8116038645
use { crate::{utils, utils::Swap}, solana_farm_client::client::FarmClient, solana_sdk::{commitment_config::CommitmentConfig, signature::Keypair, signer::Signer}, std::{thread, time}, }; const MAX_SOL_BALANCE_TO_USE: f64 = 0.1; const INITIAL_CRANK_DELAY: u64 = 400; const CRANK_INTERVAL: u64 = 100; pub fn do_swap(client: &FarmClient, keypair: &Keypair, swap: &Swap) { let amount = if swap.amount == 0.0 { utils::get_token_or_native_balance(client, &keypair.pubkey(), swap.from_token) } else if swap.amount < 0.0 { -1.0 * swap.amount * utils::get_token_or_native_balance(client, &keypair.pubkey(), swap.from_token) } else { swap.amount }; if amount < 0.0001 { return; } println!( ">> Swap {} {} to {}", amount, swap.from_token, swap.to_token ); println!( " Done: {}", client .swap( keypair, swap.protocol, swap.from_token, swap.to_token, amount, 0.0, ) .unwrap() ); let _ = utils::get_balances( client, &keypair.pubkey(), swap.from_token, swap.to_token, "After swap", ); } pub fn do_add_liquidity( client: &FarmClient, keypair: &Keypair, vault_name: &str, max_token_a_ui_amount: f64, max_token_b_ui_amount: f64, ) -> f64 { println!( ">> Add liquidity to {}: {}, {}", vault_name, max_token_a_ui_amount, max_token_b_ui_amount ); let (token_a_str, token_b_str, vt_token_name) = client.get_vault_token_names(vault_name).unwrap(); let vt_balance = utils::get_token_or_native_balance(client, &keypair.pubkey(), &vt_token_name); println!( " Done: {}", client .add_liquidity_vault( keypair, vault_name, max_token_a_ui_amount, max_token_b_ui_amount, ) .unwrap() ); let _ = utils::get_balances( client, &keypair.pubkey(), &token_a_str, &token_b_str, "After add liquidity", ); let _ = utils::get_balance(client, &keypair.pubkey(), &vt_token_name, "VT"); let _ = utils::get_vault_stake_balance(client, vault_name); utils::get_token_or_native_balance(client, &keypair.pubkey(), &vt_token_name) - vt_balance } pub fn do_crank(client: &FarmClient, keypair: &Keypair, vault_name: &str, step: u64) { println!(">> Crank {} with step {}", vault_name, step); let initial_info = client.get_vault_info(vault_name).unwrap(); println!( " Done: {}", client.crank_vault(keypair, vault_name, step).unwrap() ); let after_crank_info = client.get_vault_info(vault_name).unwrap(); println!( " Rewards received: {}, {}", after_crank_info.tokens_a_rewards - initial_info.tokens_a_rewards, after_crank_info.tokens_b_rewards - initial_info.tokens_b_rewards ); let _ = utils::get_vault_stake_balance(client, vault_name); } pub fn do_remove_liquidity(client: &FarmClient, keypair: &Keypair, vault_name: &str, amount: f64) { println!(">> Remove liquidity from {}: {}", vault_name, amount); let (token_a_str, token_b_str, vt_token_name) = client.get_vault_token_names(vault_name).unwrap(); println!( " Done: {}", client .remove_liquidity_vault(keypair, vault_name, amount) .unwrap() ); let _ = utils::get_balances( client, &keypair.pubkey(), &token_a_str, &token_b_str, "After remove liquidity", ); let _ = utils::get_balance(client, &keypair.pubkey(), &vt_token_name, "VT"); let _ = utils::get_vault_stake_balance(client, vault_name); } pub fn cleanup(client: &FarmClient, keypair: &Keypair, vault_name: &str, cleanup_swaps: Vec<Swap>) { println!("\n>>> Clean-up {}...", vault_name); let wallet = keypair.pubkey(); let (token_a_str, token_b_str, vt_token_name) = client.get_vault_token_names(vault_name).unwrap(); let vt_token_balance = utils::get_token_or_native_balance(client, &wallet, &vt_token_name); if vt_token_balance > 0.0 { do_remove_liquidity(client, keypair, vault_name, vt_token_balance); } for swap in cleanup_swaps { do_swap(client, keypair, &swap); } if token_a_str != "SOL" { let token_a_balance = utils::get_token_or_native_balance(client, &wallet, &token_a_str); if token_a_balance > 0.0 { do_swap( client, keypair, &Swap { protocol: "RDM", from_token: token_a_str.as_str(), to_token: "SOL", amount: token_a_balance, }, ); } } if token_b_str != "SOL" { let token_b_balance = utils::get_token_or_native_balance(client, &wallet, &token_b_str); if token_b_balance > 0.0 { do_swap( client, keypair, &Swap { protocol: "RDM", from_token: token_b_str.as_str(), to_token: "SOL", amount: token_b_balance, }, ); } } let _ = utils::get_vault_stake_balance(client, vault_name); } pub fn run_test(vault_name: &str, swaps: Vec<Swap>, cleanup_swaps: Vec<Swap>) { let (endpoint, keypair) = utils::get_endpoint_and_keypair(); let client = FarmClient::new_with_commitment(&endpoint, CommitmentConfig::confirmed()); let wallet = keypair.pubkey(); cleanup(&client, &keypair, vault_name, cleanup_swaps.clone()); println!("\n>>> Testing {}...", vault_name); let (token_a_str, token_b_str, _) = client.get_vault_token_names(vault_name).unwrap(); let (_, _) = utils::get_balances(&client, &wallet, &token_a_str, &token_b_str, "Initial"); let _ = utils::get_vault_stake_balance(&client, vault_name); for swap in swaps { do_swap(&client, &keypair, &swap); } let token_a_balance = if token_a_str == "SOL" { MAX_SOL_BALANCE_TO_USE.min(utils::get_token_or_native_balance( &client, &wallet, &token_a_str, )) } else { utils::get_token_or_native_balance(&client, &wallet, &token_a_str) }; let token_b_balance = if token_b_str == "SOL" { MAX_SOL_BALANCE_TO_USE.min(utils::get_token_or_native_balance( &client, &wallet, &token_b_str, )) } else { utils::get_token_or_native_balance(&client, &wallet, &token_b_str) }; let mut vt_received; if vault_name.starts_with("SBR.") { if token_a_str == "USDC" { assert!(token_a_balance > 0.0); vt_received = do_add_liquidity( &client, &keypair, vault_name, token_a_balance * 2.0 / 3.0, 0.0, ); } else { assert!(token_b_balance > 0.0); vt_received = do_add_liquidity( &client, &keypair, vault_name, 0.0, token_b_balance * 2.0 / 3.0, ); } } else { assert!(token_a_balance > 0.0 && token_b_balance > 0.0); vt_received = do_add_liquidity(&client, &keypair, vault_name, token_a_balance / 3.0, 0.0); assert!(vt_received > 0.0); vt_received += do_add_liquidity(&client, &keypair, vault_name, 0.0, token_b_balance / 3.0); } println!("Waiting {} secs for rewards...", INITIAL_CRANK_DELAY); thread::sleep(time::Duration::from_secs(INITIAL_CRANK_DELAY)); do_crank(&client, &keypair, vault_name, 1); let cranks = if vault_name.starts_with("SBR.") { 6 } else { 4 }; for step in 2..cranks { println!("Waiting {} secs before next crank...", CRANK_INTERVAL); thread::sleep(time::Duration::from_secs(CRANK_INTERVAL)); do_crank(&client, &keypair, vault_name, step); } do_remove_liquidity(&client, &keypair, vault_name, vt_received / 2.0); do_remove_liquidity(&client, &keypair, vault_name, 0.0); cleanup(&client, &keypair, vault_name, cleanup_swaps); let (_, _) = utils::get_balances(&client, &wallet, &token_a_str, &token_b_str, "Final"); let _ = utils::get_vault_stake_balance(&client, vault_name); }
use { crate::{utils, utils::Swap}, solana_farm_client::client::FarmClient, solana_sdk::{commitment_config::CommitmentConfig, signature::Keypair, signer::Signer}, std::{thread, time}, }; const MAX_SOL_BALANCE_TO_USE: f64 = 0.1; const INITIAL_CRANK_DELAY: u64 = 400; const CRANK_INTERVAL: u64 = 100; pub fn do_swap(client: &FarmClient, keypair: &Keypair, swap: &Swap) { let amount = if swap.amount == 0.0 { utils::get_token_or_native_balance(client, &keypair.pubkey(), swap.from_token) } else if swap.amount < 0.0 { -1.0 * swap.amount * utils::get_token_or_native_balance(client, &keypair.pubkey(), swap.from_token) } else { swap.amount }; if amount < 0.0001 { return; } println!( ">> Swap {} {} to {}", amount, swap.from_token, swap.to_token ); println!( " Done: {}", client .swap( keypair, swap.protocol, swap.from_token, swap.to_token, amount, 0.0, ) .unwrap() ); let _ = utils::get_balances( client, &keypair.pubkey(), swap.from_token, swap.to_token, "After swap", ); } pub fn do_add_liquidity( client: &FarmClient, keypair: &Keypair, vault_name: &str, max_token_a_ui_amount: f64, max_token_b_ui_amount: f64, ) -> f64 { println!( ">> Add liquidity to {}: {}, {}", vault_name, max_token_a_ui_amount, max_token_b_ui_amount ); let (token_a_str, token_b_str, vt_token_name) = client.get_vault_token_names(vault_name).unwrap(); let vt_balance = utils::get_token_or_native_balance(client, &keypair.pubkey(), &vt_token_name); println!( " Done: {}", client .add_liquidity_vault( keypair, vault_name, max_token_a_ui_amount, max_token_b_ui_amount, ) .unwrap() ); let _ = utils::get_balances( client, &keypair.pubkey(), &token_a_str, &token_b_str, "After add liquidity", ); let _ = utils::get_balance(client, &keypair.pubkey(), &vt_token_name, "VT"); let _ = utils::get_vault_stake_balance(client, vault_name); utils::get_token_or_native_balance(client, &keypair.pubkey(), &vt_token_name) - vt_balance } pub fn do_crank(client: &FarmClient, keypair: &Keypair, vault_name: &str, step: u64) { println!(">> Crank {} with step {}", vault_name, step); let initial_info = client.get_vault_info(vault_name).unwrap(); println!( " Done: {}", client.crank_vault(keypair, vault_name, step).unwrap() ); let after_crank_info = client.get_vault_info(vault_name).unwrap(); println!( " Rewards received: {}, {}", after_crank_info.tokens_a_rewards - initial_info.tokens_a_rewar
pub fn do_remove_liquidity(client: &FarmClient, keypair: &Keypair, vault_name: &str, amount: f64) { println!(">> Remove liquidity from {}: {}", vault_name, amount); let (token_a_str, token_b_str, vt_token_name) = client.get_vault_token_names(vault_name).unwrap(); println!( " Done: {}", client .remove_liquidity_vault(keypair, vault_name, amount) .unwrap() ); let _ = utils::get_balances( client, &keypair.pubkey(), &token_a_str, &token_b_str, "After remove liquidity", ); let _ = utils::get_balance(client, &keypair.pubkey(), &vt_token_name, "VT"); let _ = utils::get_vault_stake_balance(client, vault_name); } pub fn cleanup(client: &FarmClient, keypair: &Keypair, vault_name: &str, cleanup_swaps: Vec<Swap>) { println!("\n>>> Clean-up {}...", vault_name); let wallet = keypair.pubkey(); let (token_a_str, token_b_str, vt_token_name) = client.get_vault_token_names(vault_name).unwrap(); let vt_token_balance = utils::get_token_or_native_balance(client, &wallet, &vt_token_name); if vt_token_balance > 0.0 { do_remove_liquidity(client, keypair, vault_name, vt_token_balance); } for swap in cleanup_swaps { do_swap(client, keypair, &swap); } if token_a_str != "SOL" { let token_a_balance = utils::get_token_or_native_balance(client, &wallet, &token_a_str); if token_a_balance > 0.0 { do_swap( client, keypair, &Swap { protocol: "RDM", from_token: token_a_str.as_str(), to_token: "SOL", amount: token_a_balance, }, ); } } if token_b_str != "SOL" { let token_b_balance = utils::get_token_or_native_balance(client, &wallet, &token_b_str); if token_b_balance > 0.0 { do_swap( client, keypair, &Swap { protocol: "RDM", from_token: token_b_str.as_str(), to_token: "SOL", amount: token_b_balance, }, ); } } let _ = utils::get_vault_stake_balance(client, vault_name); } pub fn run_test(vault_name: &str, swaps: Vec<Swap>, cleanup_swaps: Vec<Swap>) { let (endpoint, keypair) = utils::get_endpoint_and_keypair(); let client = FarmClient::new_with_commitment(&endpoint, CommitmentConfig::confirmed()); let wallet = keypair.pubkey(); cleanup(&client, &keypair, vault_name, cleanup_swaps.clone()); println!("\n>>> Testing {}...", vault_name); let (token_a_str, token_b_str, _) = client.get_vault_token_names(vault_name).unwrap(); let (_, _) = utils::get_balances(&client, &wallet, &token_a_str, &token_b_str, "Initial"); let _ = utils::get_vault_stake_balance(&client, vault_name); for swap in swaps { do_swap(&client, &keypair, &swap); } let token_a_balance = if token_a_str == "SOL" { MAX_SOL_BALANCE_TO_USE.min(utils::get_token_or_native_balance( &client, &wallet, &token_a_str, )) } else { utils::get_token_or_native_balance(&client, &wallet, &token_a_str) }; let token_b_balance = if token_b_str == "SOL" { MAX_SOL_BALANCE_TO_USE.min(utils::get_token_or_native_balance( &client, &wallet, &token_b_str, )) } else { utils::get_token_or_native_balance(&client, &wallet, &token_b_str) }; let mut vt_received; if vault_name.starts_with("SBR.") { if token_a_str == "USDC" { assert!(token_a_balance > 0.0); vt_received = do_add_liquidity( &client, &keypair, vault_name, token_a_balance * 2.0 / 3.0, 0.0, ); } else { assert!(token_b_balance > 0.0); vt_received = do_add_liquidity( &client, &keypair, vault_name, 0.0, token_b_balance * 2.0 / 3.0, ); } } else { assert!(token_a_balance > 0.0 && token_b_balance > 0.0); vt_received = do_add_liquidity(&client, &keypair, vault_name, token_a_balance / 3.0, 0.0); assert!(vt_received > 0.0); vt_received += do_add_liquidity(&client, &keypair, vault_name, 0.0, token_b_balance / 3.0); } println!("Waiting {} secs for rewards...", INITIAL_CRANK_DELAY); thread::sleep(time::Duration::from_secs(INITIAL_CRANK_DELAY)); do_crank(&client, &keypair, vault_name, 1); let cranks = if vault_name.starts_with("SBR.") { 6 } else { 4 }; for step in 2..cranks { println!("Waiting {} secs before next crank...", CRANK_INTERVAL); thread::sleep(time::Duration::from_secs(CRANK_INTERVAL)); do_crank(&client, &keypair, vault_name, step); } do_remove_liquidity(&client, &keypair, vault_name, vt_received / 2.0); do_remove_liquidity(&client, &keypair, vault_name, 0.0); cleanup(&client, &keypair, vault_name, cleanup_swaps); let (_, _) = utils::get_balances(&client, &wallet, &token_a_str, &token_b_str, "Final"); let _ = utils::get_vault_stake_balance(&client, vault_name); }
ds, after_crank_info.tokens_b_rewards - initial_info.tokens_b_rewards ); let _ = utils::get_vault_stake_balance(client, vault_name); }
function_block-function_prefixed
[ { "content": "pub fn do_remove_liquidity(client: &FarmClient, keypair: &Keypair, pool_name: &str, amount: f64) {\n\n println!(\">> Remove liquidity from {}: {}\", pool_name, amount);\n\n let (token_a_str, token_b_str, lp_token_name) = client.get_pool_token_names(pool_name).unwrap();\n\n println!(\n\n \" Done: {}\",\n\n client\n\n .remove_liquidity_pool(keypair, pool_name, amount)\n\n .unwrap()\n\n );\n\n let _ = utils::get_balances(\n\n client,\n\n &keypair.pubkey(),\n\n &token_a_str,\n\n &token_b_str,\n\n \"After remove liquidity\",\n\n );\n\n let _ = utils::get_balance(client, &keypair.pubkey(), &lp_token_name, \"LP\");\n\n}\n\n\n", "file_path": "farms/farm-client/tests/pool_actions/mod.rs", "rank": 1, "score": 514441.5784455505 }, { "content": "pub fn do_stake(client: &FarmClient, keypair: &Keypair, farm_name: &str, amount: f64) {\n\n println!(\">> Stake liquidity to {}: {}\", farm_name, amount);\n\n let (token_a_str, token_b_str, lp_token_name) = client.get_farm_token_names(farm_name).unwrap();\n\n println!(\n\n \" Done: {}\",\n\n client.stake(keypair, farm_name, amount).unwrap()\n\n );\n\n let _ = utils::get_balances(\n\n client,\n\n &keypair.pubkey(),\n\n &token_a_str,\n\n &token_b_str,\n\n \"After stake\",\n\n );\n\n let _ = utils::get_balance(client, &keypair.pubkey(), &lp_token_name, \"LP after stake\");\n\n}\n\n\n", "file_path": "farms/farm-client/tests/pool_actions/mod.rs", "rank": 3, "score": 480028.7433134046 }, { "content": "pub fn do_unstake(client: &FarmClient, keypair: &Keypair, farm_name: &str, amount: f64) {\n\n println!(\">> Unstake liquidity from {}: {}\", farm_name, amount);\n\n let (token_a_str, token_b_str, lp_token_name) = client.get_farm_token_names(farm_name).unwrap();\n\n println!(\n\n \" Done: {}\",\n\n client.unstake(keypair, farm_name, amount).unwrap()\n\n );\n\n let _ = utils::get_balances(\n\n client,\n\n &keypair.pubkey(),\n\n &token_a_str,\n\n &token_b_str,\n\n \"After unstake\",\n\n );\n\n let _ = utils::get_balance(\n\n client,\n\n &keypair.pubkey(),\n\n &lp_token_name,\n\n \"LP after unstake\",\n\n );\n\n}\n\n\n", "file_path": "farms/farm-client/tests/pool_actions/mod.rs", "rank": 4, "score": 480028.7433134046 }, { "content": "pub fn crank(client: &FarmClient, config: &Config, vault_names: &str, step: u64) {\n\n let vaults = vault_names.split(',').collect::<Vec<_>>();\n\n for vault in vaults {\n\n info!(\"Cranking step {} for Vault {}...\", step, vault);\n\n info!(\n\n \"Signature: {}\",\n\n client\n\n .crank_vault(config.keypair.as_ref(), vault, step)\n\n .unwrap()\n\n );\n\n }\n\n info!(\"Done.\")\n\n}\n\n\n", "file_path": "farms/farm-ctrl/src/vault.rs", "rank": 5, "score": 456522.8014963593 }, { "content": "pub fn do_swap(client: &FarmClient, keypair: &Keypair, swap: &Swap) {\n\n let amount = if swap.amount == 0.0 {\n\n utils::get_token_or_native_balance(client, &keypair.pubkey(), swap.from_token)\n\n } else if swap.amount < 0.0 {\n\n -1.0 * swap.amount\n\n * utils::get_token_or_native_balance(client, &keypair.pubkey(), swap.from_token)\n\n } else {\n\n swap.amount\n\n };\n\n if amount < 0.0001 {\n\n return;\n\n }\n\n println!(\n\n \">> Swap {} {} to {}\",\n\n amount, swap.from_token, swap.to_token\n\n );\n\n println!(\n\n \" Done: {}\",\n\n client\n\n .swap(\n", "file_path": "farms/farm-client/tests/pool_actions/mod.rs", "rank": 6, "score": 425748.2826373804 }, { "content": "pub fn crank_all(client: &FarmClient, config: &Config, step: u64) {\n\n let vaults = client.get_vaults().unwrap();\n\n for (vault_name, _) in vaults.iter() {\n\n info!(\"Cranking step {} for Vault {}...\", step, vault_name);\n\n info!(\n\n \"Signature: {}\",\n\n client\n\n .crank_vault(config.keypair.as_ref(), vault_name, step)\n\n .unwrap()\n\n );\n\n }\n\n info!(\"Done.\")\n\n}\n\n\n", "file_path": "farms/farm-ctrl/src/vault.rs", "rank": 9, "score": 409823.93406203185 }, { "content": "pub fn init(client: &FarmClient, config: &Config, vault_names: &str, step: u64) {\n\n let vaults = vault_names.split(',').collect::<Vec<_>>();\n\n for vault in vaults {\n\n info!(\"Initializing Vault {}...\", vault);\n\n info!(\n\n \"Signature: {}\",\n\n client\n\n .init_vault(config.keypair.as_ref(), vault, step)\n\n .unwrap()\n\n );\n\n }\n\n info!(\"Done.\")\n\n}\n\n\n", "file_path": "farms/farm-ctrl/src/vault.rs", "rank": 10, "score": 395676.5233710612 }, { "content": "/// Convert a raw amount to its UI representation (using the decimals field defined in its mint)\n\npub fn amount_to_ui_amount(amount: u64) -> f64 {\n\n amount as f64 / 10_usize.pow(spl_token::native_mint::DECIMALS as u32) as f64\n\n}\n", "file_path": "feature-proposal/program/src/lib.rs", "rank": 11, "score": 390579.48409847403 }, { "content": "pub fn do_harvest(client: &FarmClient, keypair: &Keypair, farm_name: &str) {\n\n println!(\">> Harvest from {}\", farm_name);\n\n let (token_a_str, token_b_str, lp_token_name) = client.get_farm_token_names(farm_name).unwrap();\n\n println!(\" Done: {}\", client.harvest(keypair, farm_name).unwrap());\n\n let _ = utils::get_balances(\n\n client,\n\n &keypair.pubkey(),\n\n &token_a_str,\n\n &token_b_str,\n\n \"After harvest\",\n\n );\n\n let _ = utils::get_balance(\n\n client,\n\n &keypair.pubkey(),\n\n &lp_token_name,\n\n \"LP after harvest\",\n\n );\n\n}\n\n\n", "file_path": "farms/farm-client/tests/pool_actions/mod.rs", "rank": 12, "score": 386462.33821875823 }, { "content": "/// Convert a raw amount to its UI representation (using the decimals field defined in its mint)\n\npub fn amount_to_ui_amount(amount: u64, decimals: u8) -> f64 {\n\n amount as f64 / 10_usize.pow(decimals as u32) as f64\n\n}\n\n\n", "file_path": "token/program-2022/src/lib.rs", "rank": 13, "score": 364722.08396522474 }, { "content": "/// Convert a raw amount to its UI representation (using the decimals field defined in its mint)\n\npub fn amount_to_ui_amount(amount: u64, decimals: u8) -> f64 {\n\n amount as f64 / 10_usize.pow(decimals as u32) as f64\n\n}\n\n\n", "file_path": "token/program/src/lib.rs", "rank": 14, "score": 364722.08396522474 }, { "content": "#[allow(dead_code)]\n\npub fn get_vault_stake_balance(client: &FarmClient, vault_name: &str) -> f64 {\n\n let stake_balance = client.get_vault_stake_balance(vault_name).unwrap();\n\n println!(\" Stake balance. {}\", stake_balance);\n\n stake_balance\n\n}\n", "file_path": "farms/farm-client/tests/utils/mod.rs", "rank": 15, "score": 364309.3980525822 }, { "content": "/// Convert the UI representation of a token amount (using the decimals field defined in its mint)\n\n/// to the raw amount\n\npub fn ui_amount_to_amount(ui_amount: f64) -> u64 {\n\n (ui_amount * 10_usize.pow(spl_token::native_mint::DECIMALS as u32) as f64) as u64\n\n}\n\n\n", "file_path": "feature-proposal/program/src/lib.rs", "rank": 16, "score": 358545.15611325786 }, { "content": "pub fn to_ui_amount(amount: u64, decimals: u8) -> f64 {\n\n let mut ui_amount = amount;\n\n for _ in 0..decimals {\n\n ui_amount /= 10;\n\n }\n\n ui_amount as f64\n\n}\n\n\n", "file_path": "farms/farm-sdk/src/program/account.rs", "rank": 17, "score": 352161.37051225116 }, { "content": "#[allow(dead_code)]\n\npub fn get_token_or_native_balance(client: &FarmClient, wallet: &Pubkey, token_name: &str) -> f64 {\n\n if token_name != \"SOL\" {\n\n if let Ok(balance) = client.get_token_account_balance(wallet, token_name) {\n\n balance\n\n } else {\n\n 0.0\n\n }\n\n } else if let Ok(balance) = client.get_account_balance(wallet) {\n\n balance\n\n } else {\n\n 0.0\n\n }\n\n}\n\n\n", "file_path": "farms/farm-client/tests/utils/mod.rs", "rank": 18, "score": 338236.07344764494 }, { "content": "/// Convert the UI representation of a token amount (using the decimals field defined in its mint)\n\n/// to the raw amount\n\npub fn ui_amount_to_amount(ui_amount: f64, decimals: u8) -> u64 {\n\n (ui_amount * 10_usize.pow(decimals as u32) as f64) as u64\n\n}\n\n\n", "file_path": "token/program-2022/src/lib.rs", "rank": 19, "score": 337125.3915489282 }, { "content": "/// Convert the UI representation of a token amount (using the decimals field defined in its mint)\n\n/// to the raw amount\n\npub fn ui_amount_to_amount(ui_amount: f64, decimals: u8) -> u64 {\n\n (ui_amount * 10_usize.pow(decimals as u32) as f64) as u64\n\n}\n\n\n", "file_path": "token/program/src/lib.rs", "rank": 20, "score": 337125.3915489282 }, { "content": "pub fn get_amount_val<'a>(matches: &ArgMatches<'a>, argname: &str) -> f64 {\n\n matches.value_of(argname).unwrap().parse::<f64>().unwrap()\n\n}\n\n\n", "file_path": "farms/farm-client/src/cli/config.rs", "rank": 21, "score": 335404.68913342274 }, { "content": "pub fn unwrap_token(accounts: &[AccountInfo], amount: u64) -> ProgramResult {\n\n msg!(\"Processing AmmInstruction::UnwrapToken\");\n\n msg!(\"amount {} \", amount);\n\n\n\n #[allow(clippy::deprecated_cfg_attr)]\n\n #[cfg_attr(rustfmt, rustfmt_skip)]\n\n if let [\n\n user_account,\n\n user_underlying_token_account,\n\n underlying_token_mint,\n\n _spl_token_id,\n\n decimal_wrapper_program,\n\n user_wrapped_token_account,\n\n wrapped_token_mint,\n\n wrapped_token_vault,\n\n decimal_wrapper\n\n ] = accounts\n\n {\n\n let initial_underlying_token_user_balance =\n\n account::get_token_balance(user_underlying_token_account)?;\n", "file_path": "farms/router-saber/src/unwrap_token.rs", "rank": 22, "score": 320000.9682083027 }, { "content": "pub fn remove_liquidity(accounts: &[AccountInfo], amount: u64) -> ProgramResult {\n\n msg!(\"Processing AmmInstruction::RemoveLiquidity\");\n\n msg!(\"amount {} \", amount);\n\n\n\n #[allow(clippy::deprecated_cfg_attr)]\n\n #[cfg_attr(rustfmt, rustfmt_skip)]\n\n if let [\n\n user_account,\n\n user_token_a_account,\n\n user_token_b_account,\n\n user_lp_token_account,\n\n pool_program_id,\n\n pool_withdraw_queue,\n\n pool_temp_lp_token_account,\n\n pool_coin_token_account,\n\n pool_pc_token_account,\n\n lp_token_mint,\n\n spl_token_id,\n\n amm_id,\n\n amm_authority,\n", "file_path": "farms/router-raydium/src/remove_liquidity.rs", "rank": 23, "score": 319882.7714911975 }, { "content": "pub fn remove_liquidity(accounts: &[AccountInfo], amount: u64) -> ProgramResult {\n\n msg!(\"Processing AmmInstruction::RemoveLiquidity\");\n\n msg!(\"amount {} \", amount);\n\n\n\n #[allow(clippy::deprecated_cfg_attr)]\n\n #[cfg_attr(rustfmt, rustfmt_skip)]\n\n if let [\n\n user_account,\n\n user_token_a_account,\n\n user_token_b_account,\n\n user_lp_token_account,\n\n pool_program_id,\n\n pool_token_a_account,\n\n pool_token_b_account,\n\n lp_token_mint,\n\n _spl_token_id,\n\n amm_id,\n\n amm_authority,\n\n fees_account\n\n ] = accounts\n", "file_path": "farms/router-orca/src/remove_liquidity.rs", "rank": 24, "score": 319882.7714911976 }, { "content": "pub fn remove_liquidity(accounts: &[AccountInfo], amount: u64) -> ProgramResult {\n\n msg!(\"Processing AmmInstruction::RemoveLiquidity\");\n\n msg!(\"amount {} \", amount);\n\n\n\n #[allow(clippy::deprecated_cfg_attr)]\n\n #[cfg_attr(rustfmt, rustfmt_skip)]\n\n if let [\n\n user_account,\n\n user_token_a_account,\n\n user_token_b_account,\n\n user_lp_token_account,\n\n pool_program_id,\n\n pool_token_a_account,\n\n pool_token_b_account,\n\n lp_token_mint,\n\n _spl_token_id,\n\n swap_account,\n\n swap_authority,\n\n fees_account_a,\n\n fees_account_b\n", "file_path": "farms/router-saber/src/remove_liquidity.rs", "rank": 25, "score": 319882.7714911976 }, { "content": "pub fn do_add_liquidity(\n\n client: &FarmClient,\n\n keypair: &Keypair,\n\n pool_name: &str,\n\n max_token_a_ui_amount: f64,\n\n max_token_b_ui_amount: f64,\n\n) -> f64 {\n\n println!(\n\n \">> Add liquidity to {}: {}, {}\",\n\n pool_name, max_token_a_ui_amount, max_token_b_ui_amount\n\n );\n\n let (token_a_str, token_b_str, lp_token_name) = client.get_pool_token_names(pool_name).unwrap();\n\n let lp_balance = utils::get_token_or_native_balance(client, &keypair.pubkey(), &lp_token_name);\n\n println!(\n\n \" Done: {}\",\n\n client\n\n .add_liquidity_pool(\n\n keypair,\n\n pool_name,\n\n max_token_a_ui_amount,\n", "file_path": "farms/farm-client/tests/pool_actions/mod.rs", "rank": 27, "score": 318233.1389576422 }, { "content": "fn lam_to_sol(amount: u64) -> f64 {\n\n (amount as f64) / 10f64.powi(9)\n\n}\n", "file_path": "farms/farm-ctrl/src/print.rs", "rank": 28, "score": 315355.6754062193 }, { "content": "pub fn print(client: &FarmClient, config: &Config, target: &str, object: &str) {\n\n match target {\n\n \"program\" => {\n\n println!(\"{}: {}\", object, client.get_program_id(object).unwrap());\n\n }\n\n \"vault\" => {\n\n print_object(\n\n config,\n\n &client.get_vault_ref(&object.to_uppercase()).unwrap(),\n\n &client.get_vault(&object.to_uppercase()).unwrap(),\n\n );\n\n }\n\n \"farm\" => {\n\n print_object(\n\n config,\n\n &client.get_farm_ref(&object.to_uppercase()).unwrap(),\n\n &client.get_farm(&object.to_uppercase()).unwrap(),\n\n );\n\n }\n\n \"pool\" => {\n", "file_path": "farms/farm-client/src/cli/printer.rs", "rank": 30, "score": 309020.9981229449 }, { "content": "pub fn init(client: &FarmClient, config: &Config, dao_program: &Pubkey, mint_ui_amount: f64) {\n\n info!(\"Initializing DAO...\");\n\n\n\n let wallet = config.keypair.pubkey();\n\n if main_router_admin::id() != wallet {\n\n panic!(\n\n \"DAO must be initialized with the admin account {}\",\n\n main_router_admin::id()\n\n );\n\n }\n\n if mint_ui_amount < 100.0 {\n\n panic!(\"Mint amount must be >= 100\");\n\n }\n\n\n\n let mut inst = vec![];\n\n\n\n info!(\" Writing Program \\\"{}\\\" to on-chain RefDB...\", dao_program);\n\n client\n\n .add_program_id(\n\n config.keypair.as_ref(),\n", "file_path": "farms/farm-ctrl/src/governance.rs", "rank": 31, "score": 308143.2085781486 }, { "content": "pub fn print_with_ref(client: &FarmClient, config: &Config, target: &str, object: &str) {\n\n let ref_key = Pubkey::from_str(object).unwrap();\n\n match target {\n\n \"program\" => {\n\n println!(\"{}: {}\", client.get_program_name(&ref_key).unwrap(), object);\n\n }\n\n \"vault\" => {\n\n print_object(\n\n config,\n\n &ref_key,\n\n &client.get_vault_by_ref(&ref_key).unwrap(),\n\n );\n\n }\n\n \"farm\" => {\n\n print_object(config, &ref_key, &client.get_farm_by_ref(&ref_key).unwrap());\n\n }\n\n \"pool\" => {\n\n print_object(config, &ref_key, &client.get_pool_by_ref(&ref_key).unwrap());\n\n }\n\n \"token\" => {\n", "file_path": "farms/farm-client/src/cli/printer.rs", "rank": 32, "score": 305788.7463914957 }, { "content": "pub fn add_liquidity(\n\n accounts: &[AccountInfo],\n\n max_token_a_amount: u64,\n\n max_token_b_amount: u64,\n\n) -> ProgramResult {\n\n msg!(\"Processing AmmInstruction::AddLiquidity\");\n\n msg!(\"max_token_a_amount {} \", max_token_a_amount);\n\n msg!(\"max_token_b_amount {} \", max_token_b_amount);\n\n\n\n #[allow(clippy::deprecated_cfg_attr)]\n\n #[cfg_attr(rustfmt, rustfmt_skip)]\n\n if let [\n\n user_account,\n\n user_token_a_account,\n\n user_token_b_account,\n\n user_lp_token_account,\n\n pool_program_id,\n\n pool_token_a_account,\n\n pool_token_b_account,\n\n lp_token_mint,\n", "file_path": "farms/router-orca/src/add_liquidity.rs", "rank": 33, "score": 302917.97782625846 }, { "content": "pub fn add_liquidity(\n\n accounts: &[AccountInfo],\n\n max_coin_token_amount: u64,\n\n max_pc_token_amount: u64,\n\n) -> ProgramResult {\n\n msg!(\"Processing AmmInstruction::AddLiquidity\");\n\n msg!(\"max_coin_token_amount {} \", max_coin_token_amount);\n\n msg!(\"max_pc_token_amount {} \", max_pc_token_amount);\n\n\n\n #[allow(clippy::deprecated_cfg_attr)]\n\n #[cfg_attr(rustfmt, rustfmt_skip)]\n\n if let [\n\n user_account,\n\n user_token_a_account,\n\n user_token_b_account,\n\n user_lp_token_account,\n\n pool_program_id,\n\n pool_coin_token_account,\n\n pool_pc_token_account,\n\n lp_token_mint,\n", "file_path": "farms/router-raydium/src/add_liquidity.rs", "rank": 34, "score": 302917.97782625846 }, { "content": "pub fn add_liquidity(\n\n accounts: &[AccountInfo],\n\n max_token_a_amount: u64,\n\n max_token_b_amount: u64,\n\n) -> ProgramResult {\n\n msg!(\"Processing AmmInstruction::AddLiquidity\");\n\n msg!(\"max_token_a_amount {} \", max_token_a_amount);\n\n msg!(\"max_token_b_amount {} \", max_token_b_amount);\n\n\n\n #[allow(clippy::deprecated_cfg_attr)]\n\n #[cfg_attr(rustfmt, rustfmt_skip)]\n\n if let [\n\n user_account,\n\n user_token_a_account,\n\n user_token_b_account,\n\n user_lp_token_account,\n\n pool_program_id,\n\n pool_token_a_account,\n\n pool_token_b_account,\n\n lp_token_mint,\n", "file_path": "farms/router-saber/src/add_liquidity.rs", "rank": 35, "score": 302917.97782625846 }, { "content": "pub fn print_all(client: &FarmClient, config: &Config, target: &str) {\n\n info!(\"Loading {} objects...\", target);\n\n\n\n match target {\n\n \"program\" => {\n\n let storage = client.get_program_ids().unwrap();\n\n for (name, key) in storage.iter() {\n\n println!(\"{}: {}\", name, key);\n\n }\n\n }\n\n \"vault\" => {\n\n let storage = client.get_vaults().unwrap();\n\n for (name, key) in storage.iter() {\n\n print_object(config, &client.get_vault_ref(name).unwrap(), key);\n\n }\n\n }\n\n \"farm\" => {\n\n let storage = client.get_farms().unwrap();\n\n for (name, key) in storage.iter() {\n\n print_object(config, &client.get_farm_ref(name).unwrap(), key);\n", "file_path": "farms/farm-client/src/cli/printer.rs", "rank": 36, "score": 302136.71085316327 }, { "content": "pub fn list_all(client: &FarmClient, _config: &Config, target: &str) {\n\n info!(\"Loading {} objects...\", target);\n\n\n\n match target {\n\n \"program\" => {\n\n let storage = client.get_program_ids().unwrap();\n\n for (name, key) in storage.iter() {\n\n println!(\"{}: {}\", name, key);\n\n }\n\n }\n\n \"vault\" => {\n\n let storage = client.get_vault_refs().unwrap();\n\n for (name, key) in storage.iter() {\n\n println!(\"{}: {}\", name, key);\n\n }\n\n }\n\n \"farm\" => {\n\n let storage = client.get_farm_refs().unwrap();\n\n for (name, key) in storage.iter() {\n\n println!(\"{}: {}\", name, key);\n", "file_path": "farms/farm-client/src/cli/printer.rs", "rank": 37, "score": 302136.71085316327 }, { "content": "pub fn to_token_amount(ui_amount: f64, decimals: u8) -> Result<u64, ProgramError> {\n\n let mut amount = ui_amount;\n\n for _ in 0..decimals {\n\n amount *= 10.0;\n\n }\n\n math::checked_as_u64(amount)\n\n}\n\n\n", "file_path": "farms/farm-sdk/src/program/account.rs", "rank": 38, "score": 299245.737498419 }, { "content": "pub fn shutdown(client: &FarmClient, config: &Config, vault_names: &str) {\n\n let vaults = vault_names.split(',').collect::<Vec<_>>();\n\n for vault in vaults {\n\n info!(\"Shutting down Vault {}...\", vault);\n\n info!(\n\n \"Signature: {}\",\n\n client\n\n .shutdown_vault(config.keypair.as_ref(), vault)\n\n .unwrap()\n\n );\n\n }\n\n info!(\"Done.\")\n\n}\n\n\n", "file_path": "farms/farm-ctrl/src/vault.rs", "rank": 39, "score": 294399.1245811544 }, { "content": "pub fn run_test(pool_name: &str, swaps: Vec<Swap>, cleanup_swaps: Vec<Swap>, pool_only: bool) {\n\n let (endpoint, keypair) = utils::get_endpoint_and_keypair();\n\n let client = FarmClient::new_with_commitment(&endpoint, CommitmentConfig::confirmed());\n\n let wallet = keypair.pubkey();\n\n\n\n cleanup(\n\n &client,\n\n &keypair,\n\n pool_name,\n\n cleanup_swaps.clone(),\n\n pool_only,\n\n );\n\n\n\n println!(\"\\n>>> Testing {}...\", pool_name);\n\n let (token_a_str, token_b_str, lp_token_name) = client.get_pool_token_names(pool_name).unwrap();\n\n\n\n let (_, _) = utils::get_balances(&client, &wallet, &token_a_str, &token_b_str, \"Initial\");\n\n //initial swaps\n\n for swap in swaps {\n\n do_swap(&client, &keypair, &swap);\n", "file_path": "farms/farm-client/tests/pool_actions/mod.rs", "rank": 40, "score": 294346.9638375782 }, { "content": "pub fn get_integer_val<'a>(matches: &ArgMatches<'a>, argname: &str) -> u64 {\n\n matches.value_of(argname).unwrap().parse::<u64>().unwrap()\n\n}\n\n\n", "file_path": "farms/farm-client/src/cli/config.rs", "rank": 41, "score": 292053.68899546994 }, { "content": "pub fn convert_serum_program_id(client: &FarmClient, program_id: &str) -> Pubkey {\n\n match program_id {\n\n \"SERUM_PROGRAM_ID_V2\" => client.get_program_id(&\"SerumV2\".to_string()).unwrap(),\n\n \"SERUM_PROGRAM_ID_V3\" => client.get_program_id(&\"SerumV3\".to_string()).unwrap(),\n\n _ => convert_pubkey(program_id),\n\n }\n\n}\n\n\n", "file_path": "farms/farm-ctrl/src/loaders/utils.rs", "rank": 42, "score": 291932.74105300114 }, { "content": "pub fn convert_raydium_program_id(client: &FarmClient, program_id: &str) -> Pubkey {\n\n match program_id {\n\n \"LIQUIDITY_POOL_PROGRAM_ID_V2\" => client.get_program_id(&\"RaydiumV2\".to_string()).unwrap(),\n\n \"LIQUIDITY_POOL_PROGRAM_ID_V3\" => client.get_program_id(&\"RaydiumV3\".to_string()).unwrap(),\n\n \"LIQUIDITY_POOL_PROGRAM_ID_V4\" => client.get_program_id(&\"RaydiumV4\".to_string()).unwrap(),\n\n \"STAKE_PROGRAM_ID\" => client.get_program_id(&\"RaydiumStake\".to_string()).unwrap(),\n\n \"STAKE_PROGRAM_ID_V4\" => client\n\n .get_program_id(&\"RaydiumStakeV4\".to_string())\n\n .unwrap(),\n\n \"STAKE_PROGRAM_ID_V5\" => client\n\n .get_program_id(&\"RaydiumStakeV5\".to_string())\n\n .unwrap(),\n\n _ => convert_pubkey(program_id),\n\n }\n\n}\n\n\n", "file_path": "farms/farm-ctrl/src/loaders/utils.rs", "rank": 43, "score": 291932.74105300114 }, { "content": "/// Convert a raw amount to its UI representation (using the decimals field defined in its mint)\n\npub fn amount_to_ui_amount_string(amount: u64, decimals: u8) -> String {\n\n let decimals = decimals as usize;\n\n if decimals > 0 {\n\n // Left-pad zeros to decimals + 1, so we at least have an integer zero\n\n let mut s = format!(\"{:01$}\", amount, decimals + 1);\n\n // Add the decimal point (Sorry, \",\" locales!)\n\n s.insert(s.len() - decimals, '.');\n\n s\n\n } else {\n\n amount.to_string()\n\n }\n\n}\n\n\n", "file_path": "token/program/src/lib.rs", "rank": 44, "score": 291850.7590017716 }, { "content": "/// Convert a raw amount to its UI representation (using the decimals field defined in its mint)\n\npub fn amount_to_ui_amount_string(amount: u64, decimals: u8) -> String {\n\n let decimals = decimals as usize;\n\n if decimals > 0 {\n\n // Left-pad zeros to decimals + 1, so we at least have an integer zero\n\n let mut s = format!(\"{:01$}\", amount, decimals + 1);\n\n // Add the decimal point (Sorry, \",\" locales!)\n\n s.insert(s.len() - decimals, '.');\n\n s\n\n } else {\n\n amount.to_string()\n\n }\n\n}\n\n\n", "file_path": "token/program-2022/src/lib.rs", "rank": 45, "score": 291850.7590017716 }, { "content": "pub fn enable_withdrawal(client: &FarmClient, config: &Config, vault_names: &str) {\n\n let vaults = vault_names.split(',').collect::<Vec<_>>();\n\n for vault in vaults {\n\n info!(\"Enabling withdrawals for Vault {}...\", vault);\n\n info!(\n\n \"Signature: {}\",\n\n client\n\n .enable_withdrawal_vault(config.keypair.as_ref(), vault)\n\n .unwrap()\n\n );\n\n }\n\n info!(\"Done.\")\n\n}\n\n\n", "file_path": "farms/farm-ctrl/src/vault.rs", "rank": 46, "score": 290732.0355116704 }, { "content": "pub fn disable_deposit(client: &FarmClient, config: &Config, vault_names: &str) {\n\n let vaults = vault_names.split(',').collect::<Vec<_>>();\n\n for vault in vaults {\n\n info!(\"Disabling deposits for Vault {}...\", vault);\n\n info!(\n\n \"Signature: {}\",\n\n client\n\n .disable_deposit_vault(config.keypair.as_ref(), vault)\n\n .unwrap()\n\n );\n\n }\n\n info!(\"Done.\")\n\n}\n\n\n", "file_path": "farms/farm-ctrl/src/vault.rs", "rank": 47, "score": 290732.0355116704 }, { "content": "pub fn enable_deposit(client: &FarmClient, config: &Config, vault_names: &str) {\n\n let vaults = vault_names.split(',').collect::<Vec<_>>();\n\n for vault in vaults {\n\n info!(\"Enabling deposits for Vault {}...\", vault);\n\n info!(\n\n \"Signature: {}\",\n\n client\n\n .enable_deposit_vault(config.keypair.as_ref(), vault)\n\n .unwrap()\n\n );\n\n }\n\n info!(\"Done.\")\n\n}\n\n\n", "file_path": "farms/farm-ctrl/src/vault.rs", "rank": 48, "score": 290732.0355116704 }, { "content": "pub fn disable_withdrawal(client: &FarmClient, config: &Config, vault_names: &str) {\n\n let vaults = vault_names.split(',').collect::<Vec<_>>();\n\n for vault in vaults {\n\n info!(\"Disabling withdrawals for Vault {}...\", vault);\n\n info!(\n\n \"Signature: {}\",\n\n client\n\n .disable_withdrawal_vault(config.keypair.as_ref(), vault)\n\n .unwrap()\n\n );\n\n }\n\n info!(\"Done.\")\n\n}\n\n\n", "file_path": "farms/farm-ctrl/src/vault.rs", "rank": 49, "score": 290732.0355116704 }, { "content": "pub fn get_info(client: &FarmClient, config: &Config, vault_names: &str) {\n\n let vaults = vault_names.split(',').collect::<Vec<_>>();\n\n for vault in vaults {\n\n info!(\"Retreiving stats for Vault {}...\", vault);\n\n\n\n let info = client.get_vault_info(vault).unwrap();\n\n\n\n if config.no_pretty_print {\n\n println!(\"{}\", info);\n\n } else {\n\n println!(\"{}\", to_pretty_json(&info).unwrap());\n\n }\n\n }\n\n info!(\"Done.\")\n\n}\n", "file_path": "farms/farm-ctrl/src/vault.rs", "rank": 50, "score": 290732.0355116704 }, { "content": "#[allow(dead_code)]\n\npub fn get_endpoint_and_keypair() -> (String, Keypair) {\n\n let cli_config = if let Some(ref config_file) = *solana_cli_config::CONFIG_FILE {\n\n solana_cli_config::Config::load(config_file).unwrap()\n\n } else {\n\n solana_cli_config::Config::default()\n\n };\n\n\n\n (\n\n cli_config.json_rpc_url.to_string(),\n\n read_keypair_file(&cli_config.keypair_path).unwrap_or_else(|_| {\n\n panic!(\"Filed to read keypair from \\\"{}\\\"\", cli_config.keypair_path)\n\n }),\n\n )\n\n}\n\n\n", "file_path": "farms/farm-client/tests/utils/mod.rs", "rank": 51, "score": 289629.4100884284 }, { "content": "/// Convert a raw amount to its UI representation using the given decimals field\n\n/// Excess zeroes or unneeded decimal point are trimmed.\n\npub fn amount_to_ui_amount_string_trimmed(amount: u64, decimals: u8) -> String {\n\n let mut s = amount_to_ui_amount_string(amount, decimals);\n\n if decimals > 0 {\n\n let zeros_trimmed = s.trim_end_matches('0');\n\n s = zeros_trimmed.trim_end_matches('.').to_string();\n\n }\n\n s\n\n}\n\n\n", "file_path": "token/program-2022/src/lib.rs", "rank": 52, "score": 288157.9191911729 }, { "content": "/// Convert a raw amount to its UI representation using the given decimals field\n\n/// Excess zeroes or unneeded decimal point are trimmed.\n\npub fn amount_to_ui_amount_string_trimmed(amount: u64, decimals: u8) -> String {\n\n let mut s = amount_to_ui_amount_string(amount, decimals);\n\n if decimals > 0 {\n\n let zeros_trimmed = s.trim_end_matches('0');\n\n s = zeros_trimmed.trim_end_matches('.').to_string();\n\n }\n\n s\n\n}\n\n\n", "file_path": "token/program/src/lib.rs", "rank": 53, "score": 288157.9191911729 }, { "content": "/// Creates a 'swap' instruction.\n\npub fn swap(\n\n program_id: &Pubkey,\n\n token_program_id: &Pubkey,\n\n swap_pubkey: &Pubkey,\n\n authority_pubkey: &Pubkey,\n\n user_transfer_authority_pubkey: &Pubkey,\n\n source_pubkey: &Pubkey,\n\n swap_source_pubkey: &Pubkey,\n\n swap_destination_pubkey: &Pubkey,\n\n destination_pubkey: &Pubkey,\n\n pool_mint_pubkey: &Pubkey,\n\n pool_fee_pubkey: &Pubkey,\n\n host_fee_pubkey: Option<&Pubkey>,\n\n instruction: Swap,\n\n) -> Result<Instruction, ProgramError> {\n\n let data = SwapInstruction::Swap(instruction).pack();\n\n\n\n let mut accounts = vec![\n\n AccountMeta::new_readonly(*swap_pubkey, false),\n\n AccountMeta::new_readonly(*authority_pubkey, false),\n", "file_path": "token-swap/program/src/instruction.rs", "rank": 54, "score": 281979.5303702885 }, { "content": "pub fn swap(\n\n accounts: &[AccountInfo],\n\n token_a_amount_in: u64,\n\n token_b_amount_in: u64,\n\n min_token_amount_out: u64,\n\n) -> ProgramResult {\n\n msg!(\"Processing AmmInstruction::Swap\");\n\n msg!(\"token_a_amount_in {} \", token_a_amount_in);\n\n msg!(\"token_b_amount_in {} \", token_b_amount_in);\n\n msg!(\"min_token_amount_out {} \", min_token_amount_out);\n\n\n\n #[allow(clippy::deprecated_cfg_attr)]\n\n #[cfg_attr(rustfmt, rustfmt_skip)]\n\n if let [\n\n user_account,\n\n user_token_a_account,\n\n user_token_b_account,\n\n pool_program_id,\n\n pool_coin_token_account,\n\n pool_pc_token_account,\n", "file_path": "farms/router-raydium/src/swap.rs", "rank": 55, "score": 281972.86146066425 }, { "content": "pub fn swap(\n\n accounts: &[AccountInfo],\n\n token_a_amount_in: u64,\n\n token_b_amount_in: u64,\n\n min_token_amount_out: u64,\n\n) -> ProgramResult {\n\n msg!(\"Processing AmmInstruction::Swap\");\n\n msg!(\"token_a_amount_in {} \", token_a_amount_in);\n\n msg!(\"token_b_amount_in {} \", token_b_amount_in);\n\n msg!(\"min_token_amount_out {} \", min_token_amount_out);\n\n\n\n #[allow(clippy::deprecated_cfg_attr)]\n\n #[cfg_attr(rustfmt, rustfmt_skip)]\n\n if let [\n\n user_account,\n\n user_token_a_account,\n\n user_token_b_account,\n\n pool_program_id,\n\n pool_token_a_account,\n\n pool_token_b_account,\n", "file_path": "farms/router-saber/src/swap.rs", "rank": 56, "score": 281972.86146066425 }, { "content": "pub fn swap(\n\n accounts: &[AccountInfo],\n\n token_a_amount_in: u64,\n\n token_b_amount_in: u64,\n\n min_token_amount_out: u64,\n\n) -> ProgramResult {\n\n msg!(\"Processing AmmInstruction::Swap\");\n\n msg!(\"token_a_amount_in {} \", token_a_amount_in);\n\n msg!(\"token_b_amount_in {} \", token_b_amount_in);\n\n msg!(\"min_token_amount_out {} \", min_token_amount_out);\n\n\n\n #[allow(clippy::deprecated_cfg_attr)]\n\n #[cfg_attr(rustfmt, rustfmt_skip)]\n\n if let [\n\n user_account,\n\n user_token_a_account,\n\n user_token_b_account,\n\n pool_program_id,\n\n pool_token_a_account,\n\n pool_token_b_account,\n", "file_path": "farms/router-orca/src/swap.rs", "rank": 57, "score": 281972.86146066425 }, { "content": "pub fn get_time_as_u64() -> Result<u64, ProgramError> {\n\n math::checked_as_u64(sysvar::clock::Clock::get()?.unix_timestamp)\n\n}\n\n\n", "file_path": "farms/vaults/src/clock.rs", "rank": 58, "score": 281945.73528837203 }, { "content": "/// The constant product swap calculation, factored out of its class for reuse.\n\n///\n\n/// This is guaranteed to work for all values such that:\n\n/// - 1 <= swap_source_amount * swap_destination_amount <= u128::MAX\n\n/// - 1 <= source_amount <= u64::MAX\n\npub fn swap(\n\n source_amount: u128,\n\n swap_source_amount: u128,\n\n swap_destination_amount: u128,\n\n) -> Option<SwapWithoutFeesResult> {\n\n let invariant = swap_source_amount.checked_mul(swap_destination_amount)?;\n\n\n\n let new_swap_source_amount = swap_source_amount.checked_add(source_amount)?;\n\n let (new_swap_destination_amount, new_swap_source_amount) =\n\n invariant.checked_ceil_div(new_swap_source_amount)?;\n\n\n\n let source_amount_swapped = new_swap_source_amount.checked_sub(swap_source_amount)?;\n\n let destination_amount_swapped =\n\n map_zero_to_none(swap_destination_amount.checked_sub(new_swap_destination_amount)?)?;\n\n\n\n Some(SwapWithoutFeesResult {\n\n source_amount_swapped,\n\n destination_amount_swapped,\n\n })\n\n}\n\n\n", "file_path": "token-swap/program/src/curve/constant_product.rs", "rank": 59, "score": 274517.20875768736 }, { "content": "pub fn remove(client: &FarmClient, config: &Config, target: StorageType, object: &str) {\n\n info!(\"Removing {} object {}...\", target, object);\n\n\n\n match target {\n\n StorageType::Program => {\n\n client\n\n .remove_program_id(config.keypair.as_ref(), object, None)\n\n .unwrap();\n\n }\n\n StorageType::Vault => {\n\n client\n\n .remove_vault(config.keypair.as_ref(), &object.to_uppercase())\n\n .unwrap();\n\n }\n\n StorageType::Farm => {\n\n client\n\n .remove_farm(config.keypair.as_ref(), &object.to_uppercase())\n\n .unwrap();\n\n }\n\n StorageType::Pool => {\n", "file_path": "farms/farm-ctrl/src/remove.rs", "rank": 60, "score": 274064.57437365537 }, { "content": "pub fn get(client: &FarmClient, config: &Config, target: StorageType, object: &str) {\n\n info!(\"Querying {} object {}...\", target, object);\n\n\n\n match target {\n\n StorageType::Program => {\n\n println!(\"{}: {}\", object, client.get_program_id(object).unwrap());\n\n }\n\n StorageType::Vault => {\n\n print_object(\n\n config,\n\n &client.get_vault_ref(&object.to_uppercase()).unwrap(),\n\n &client.get_vault(&object.to_uppercase()).unwrap(),\n\n );\n\n }\n\n StorageType::Farm => {\n\n print_object(\n\n config,\n\n &client.get_farm_ref(&object.to_uppercase()).unwrap(),\n\n &client.get_farm(&object.to_uppercase()).unwrap(),\n\n );\n", "file_path": "farms/farm-ctrl/src/get.rs", "rank": 61, "score": 274064.57437365537 }, { "content": "pub fn get_ref(client: &FarmClient, config: &Config, target: StorageType, object: &str) {\n\n info!(\"Querying {} object {}...\", target, object);\n\n\n\n let pubkey = Pubkey::from_str(object).unwrap();\n\n\n\n match target {\n\n StorageType::Program => {\n\n println!(\"{}: {}\", client.get_program_name(&pubkey).unwrap(), object);\n\n }\n\n StorageType::Vault => {\n\n print_object(config, &pubkey, &client.get_vault_by_ref(&pubkey).unwrap());\n\n }\n\n StorageType::Farm => {\n\n print_object(config, &pubkey, &client.get_farm_by_ref(&pubkey).unwrap());\n\n }\n\n StorageType::Pool => {\n\n print_object(config, &pubkey, &client.get_pool_by_ref(&pubkey).unwrap());\n\n }\n\n StorageType::Token => {\n\n print_object(config, &pubkey, &client.get_token_by_ref(&pubkey).unwrap());\n\n }\n\n _ => {\n\n unreachable!();\n\n }\n\n }\n\n\n\n info!(\"Done.\")\n\n}\n\n\n", "file_path": "farms/farm-ctrl/src/get.rs", "rank": 62, "score": 270672.9447723853 }, { "content": "pub fn load(client: &FarmClient, config: &Config, data: &str, remove_mode: bool) {\n\n let parsed: serde_json::Value = serde_json::from_str(data).unwrap();\n\n let mut last_index = client\n\n .get_refdb_last_index(&StorageType::Vault.to_string())\n\n .expect(\"Vault RefDB query error\");\n\n\n\n if parsed[\"name\"] != \"Solana Vaults List\" {\n\n panic!(\"Unsupported vaults file\");\n\n }\n\n let vaults = parsed[\"vaults\"].as_array().unwrap();\n\n for val in vaults {\n\n let json_vault: Vault = serde_json::from_value(val.clone()).unwrap();\n\n if !remove_mode {\n\n if config.skip_existing && client.get_vault(&json_vault.name).is_ok() {\n\n info!(\"Skipping existing Vault \\\"{}\\\"...\", json_vault.name);\n\n continue;\n\n }\n\n info!(\"Writing Vault \\\"{}\\\" to on-chain RefDB...\", json_vault.name);\n\n } else {\n\n info!(\n", "file_path": "farms/farm-ctrl/src/loaders/vault.rs", "rank": 63, "score": 270672.9447723853 }, { "content": "pub fn load(client: &FarmClient, config: &Config, data: &str, remove_mode: bool) {\n\n let parsed: Value = serde_json::from_str(data).unwrap();\n\n let last_index = client\n\n .get_refdb_last_index(&StorageType::Pool.to_string())\n\n .expect(\"Pool RefDB query error\");\n\n\n\n if parsed[\"name\"] == \"Raydium Pools\" {\n\n load_raydium_pool(client, config, remove_mode, &parsed, last_index);\n\n } else if parsed[\"name\"] == \"Orca Pools\" {\n\n load_orca_pool(client, config, remove_mode, &parsed, last_index);\n\n } else if parsed[\"pools\"] != json!(null) && parsed[\"addresses\"] != json!(null) {\n\n load_saber_pool(client, config, remove_mode, &parsed, last_index);\n\n } else {\n\n panic!(\"Unsupported pools file\");\n\n }\n\n}\n\n\n", "file_path": "farms/farm-ctrl/src/loaders/pool.rs", "rank": 64, "score": 270672.9447723853 }, { "content": "pub fn load(client: &FarmClient, config: &Config, data: &str, remove_mode: bool) {\n\n let parsed: JsonPrograms = serde_json::from_str(data).unwrap();\n\n\n\n for program in parsed.programs.iter() {\n\n if remove_mode {\n\n info!(\n\n \"Removing Program \\\"{}\\\" from on-chain RefDB...\",\n\n program.name\n\n );\n\n client\n\n .remove_program_id(config.keypair.as_ref(), &program.name, None)\n\n .unwrap();\n\n } else {\n\n if config.skip_existing && client.get_program_id(&program.name).is_ok() {\n\n info!(\"Skipping existing Program \\\"{}\\\"...\", program.name);\n\n continue;\n\n }\n\n info!(\"Writing Program \\\"{}\\\" to on-chain RefDB...\", program.name);\n\n client\n\n .add_program_id(\n", "file_path": "farms/farm-ctrl/src/loaders/program.rs", "rank": 65, "score": 270672.9447723853 }, { "content": "pub fn load(client: &FarmClient, config: &Config, data: &str, remove_mode: bool) {\n\n let parsed: Value = serde_json::from_str(data).unwrap();\n\n let last_index = client\n\n .get_refdb_last_index(&StorageType::Farm.to_string())\n\n .expect(\"Farm RefDB query error\");\n\n\n\n if parsed[\"name\"] == \"Raydium Farms\" {\n\n load_raydium_farm(client, config, remove_mode, &parsed, last_index);\n\n } else if parsed[\"name\"] == \"Orca Farms\" {\n\n load_orca_farm(client, config, remove_mode, &parsed, last_index);\n\n } else if parsed[\"pools\"] != json!(null) && parsed[\"addresses\"] != json!(null) {\n\n load_saber_farm(client, config, remove_mode, &parsed, last_index);\n\n } else {\n\n panic!(\"Unsupported farms file\");\n\n }\n\n}\n\n\n", "file_path": "farms/farm-ctrl/src/loaders/farm.rs", "rank": 66, "score": 270672.9447723853 }, { "content": "pub fn load(client: &FarmClient, config: &Config, data: &str, remove_mode: bool) {\n\n let parsed: Value = serde_json::from_str(data).unwrap();\n\n let last_index = client\n\n .get_refdb_last_index(&StorageType::Token.to_string())\n\n .expect(\"Token RefDB query error\");\n\n let is_saber = parsed[\"name\"] == \"Saber Tokens\";\n\n\n\n if parsed[\"name\"] == \"Solana Token List\" || is_saber {\n\n load_solana_tokens(client, config, remove_mode, &parsed, last_index);\n\n } else if parsed[\"name\"] == \"Raydium LP Tokens\" {\n\n load_raydium_tokens(client, config, remove_mode, &parsed, last_index);\n\n } else if parsed[\"name\"] == \"Orca Pools\" {\n\n load_orca_pool_tokens(client, config, remove_mode, &parsed, last_index);\n\n } else if parsed[\"name\"] == \"Orca Farms\" {\n\n load_orca_farm_tokens(client, config, remove_mode, &parsed, last_index);\n\n } else {\n\n panic!(\"Unsupported tokens file\");\n\n }\n\n}\n\n\n", "file_path": "farms/farm-ctrl/src/loaders/token.rs", "rank": 67, "score": 270672.9447723853 }, { "content": "pub fn remove_ref(client: &FarmClient, config: &Config, target: StorageType, object: &str) {\n\n info!(\"Removing {} reference {}...\", target, object);\n\n\n\n let refdb_index = client.get_refdb_index(&target.to_string(), object).unwrap();\n\n client\n\n .remove_reference(config.keypair.as_ref(), target, object, refdb_index)\n\n .unwrap();\n\n\n\n info!(\"Done.\")\n\n}\n\n\n", "file_path": "farms/farm-ctrl/src/remove.rs", "rank": 68, "score": 270672.9447723853 }, { "content": "pub fn add_liquidity(\n\n accounts: &[AccountInfo],\n\n max_token_a_amount: u64,\n\n max_token_b_amount: u64,\n\n) -> ProgramResult {\n\n if let [user_account, user_token_a_account, user_token_b_account, user_lp_token_account, pool_program_id, pool_token_a_account, pool_token_b_account, lp_token_mint, _spl_token_id, _clock_id, swap_account, swap_authority] =\n\n accounts\n\n {\n\n if &stable_swap_client::id() != pool_program_id.key {\n\n return Err(ProgramError::IncorrectProgramId);\n\n }\n\n\n\n let instruction = instruction::deposit(\n\n &spl_token::id(),\n\n swap_account.key,\n\n swap_authority.key,\n\n user_account.key,\n\n user_token_a_account.key,\n\n user_token_b_account.key,\n\n pool_token_a_account.key,\n", "file_path": "farms/farm-sdk/src/program/protocol/saber.rs", "rank": 69, "score": 269041.14565870754 }, { "content": "pub fn add_liquidity(\n\n accounts: &[AccountInfo],\n\n max_coin_token_amount: u64,\n\n max_pc_token_amount: u64,\n\n) -> ProgramResult {\n\n if let [user_account, user_token_a_account, user_token_b_account, user_lp_token_account, pool_program_id, pool_coin_token_account, pool_pc_token_account, lp_token_mint, spl_token_id, amm_id, amm_authority, amm_open_orders, amm_target, serum_market] =\n\n accounts\n\n {\n\n if !check_pool_program_id(pool_program_id.key) {\n\n return Err(ProgramError::IncorrectProgramId);\n\n }\n\n let raydium_accounts = vec![\n\n AccountMeta::new_readonly(*spl_token_id.key, false),\n\n AccountMeta::new(*amm_id.key, false),\n\n AccountMeta::new_readonly(*amm_authority.key, false),\n\n AccountMeta::new_readonly(*amm_open_orders.key, false),\n\n AccountMeta::new(*amm_target.key, false),\n\n AccountMeta::new(*lp_token_mint.key, false),\n\n AccountMeta::new(*pool_coin_token_account.key, false),\n\n AccountMeta::new(*pool_pc_token_account.key, false),\n", "file_path": "farms/farm-sdk/src/program/protocol/raydium.rs", "rank": 70, "score": 269041.14565870754 }, { "content": "pub fn unstake(accounts: &[AccountInfo], amount: u64) -> ProgramResult {\n\n msg!(\"Processing AmmInstruction::Unstake\");\n\n msg!(\"amount {} \", amount);\n\n\n\n #[allow(clippy::deprecated_cfg_attr)]\n\n #[cfg_attr(rustfmt, rustfmt_skip)]\n\n if let [\n\n user_account,\n\n user_info_account,\n\n user_lp_token_account,\n\n user_reward_token_account,\n\n user_farm_lp_token_account,\n\n farm_lp_token_mint,\n\n farm_program_id,\n\n base_token_vault,\n\n reward_token_vault,\n\n _spl_token_id,\n\n farm_id,\n\n farm_authority\n\n ] = accounts\n", "file_path": "farms/router-orca/src/unstake.rs", "rank": 71, "score": 268892.4264763087 }, { "content": "pub fn unstake(accounts: &[AccountInfo], amount: u64) -> ProgramResult {\n\n msg!(\"Processing AmmInstruction::Unstake\");\n\n msg!(\"amount {} \", amount);\n\n\n\n #[allow(clippy::deprecated_cfg_attr)]\n\n #[cfg_attr(rustfmt, rustfmt_skip)]\n\n if let [\n\n user_account,\n\n user_info_account,\n\n user_lp_token_account,\n\n user_reward_token_a_account,\n\n user_reward_token_b_account,\n\n farm_program_id,\n\n farm_lp_token_account,\n\n farm_reward_token_a_account,\n\n farm_reward_token_b_account,\n\n clock_id,\n\n spl_token_id,\n\n farm_id,\n\n farm_authority\n", "file_path": "farms/router-raydium/src/unstake.rs", "rank": 72, "score": 268892.4264763087 }, { "content": "pub fn unstake(accounts: &[AccountInfo], amount: u64) -> ProgramResult {\n\n msg!(\"Processing AmmInstruction::Unstake\");\n\n msg!(\"amount {} \", amount);\n\n\n\n #[allow(clippy::deprecated_cfg_attr)]\n\n #[cfg_attr(rustfmt, rustfmt_skip)]\n\n if let [\n\n user_account,\n\n user_lp_token_account,\n\n farm_program_id,\n\n _spl_token_id,\n\n miner,\n\n miner_vault,\n\n quarry,\n\n rewarder\n\n ] = accounts\n\n {\n\n if &quarry_mine::id() != farm_program_id.key {\n\n return Err(ProgramError::IncorrectProgramId);\n\n }\n", "file_path": "farms/router-saber/src/unstake.rs", "rank": 73, "score": 268892.4264763087 }, { "content": "pub fn stake(accounts: &[AccountInfo], amount: u64) -> ProgramResult {\n\n msg!(\"Processing AmmInstruction::Stake\");\n\n msg!(\"amount {} \", amount);\n\n\n\n #[allow(clippy::deprecated_cfg_attr)]\n\n #[cfg_attr(rustfmt, rustfmt_skip)]\n\n if let [\n\n user_account,\n\n user_lp_token_account,\n\n farm_program_id,\n\n _spl_token_id,\n\n miner,\n\n miner_vault,\n\n quarry,\n\n rewarder\n\n ] = accounts\n\n {\n\n if &quarry_mine::id() != farm_program_id.key {\n\n return Err(ProgramError::IncorrectProgramId);\n\n }\n", "file_path": "farms/router-saber/src/stake.rs", "rank": 74, "score": 268892.4264763087 }, { "content": "pub fn stake(accounts: &[AccountInfo], amount: u64) -> ProgramResult {\n\n msg!(\"Processing AmmInstruction::Stake\");\n\n msg!(\"amount {} \", amount);\n\n\n\n #[allow(clippy::deprecated_cfg_attr)]\n\n #[cfg_attr(rustfmt, rustfmt_skip)]\n\n if let [\n\n user_account,\n\n user_info_account,\n\n user_lp_token_account,\n\n user_reward_token_account,\n\n user_farm_lp_token_account,\n\n farm_lp_token_mint,\n\n farm_program_id,\n\n base_token_vault,\n\n reward_token_vault,\n\n _spl_token_id,\n\n farm_id,\n\n farm_authority\n\n ] = accounts\n", "file_path": "farms/router-orca/src/stake.rs", "rank": 75, "score": 268892.4264763087 }, { "content": "pub fn set_fee(client: &FarmClient, config: &Config, vault_names: &str, fee_percent: f32) {\n\n let vaults = vault_names.split(',').collect::<Vec<_>>();\n\n for vault in vaults {\n\n info!(\"Setting fee to {} for Vault {}...\", fee_percent, vault);\n\n info!(\n\n \"Signature: {}\",\n\n client\n\n .set_fee_vault(config.keypair.as_ref(), vault, fee_percent)\n\n .unwrap()\n\n );\n\n }\n\n info!(\"Done.\")\n\n}\n\n\n", "file_path": "farms/farm-ctrl/src/vault.rs", "rank": 76, "score": 267407.3238283196 }, { "content": "pub fn get_step(matches: &ArgMatches) -> u64 {\n\n matches.value_of(\"step\").unwrap().parse().unwrap()\n\n}\n\n\n", "file_path": "farms/farm-ctrl/src/config.rs", "rank": 77, "score": 266919.7872307572 }, { "content": "pub fn add_liquidity_with_seeds(\n\n accounts: &[AccountInfo],\n\n seeds: &[&[&[u8]]],\n\n max_token_a_amount: u64,\n\n max_token_b_amount: u64,\n\n) -> ProgramResult {\n\n if let [authority_account, token_a_custody_account, token_b_custody_account, lp_token_custody_account, pool_program_id, pool_token_a_account, pool_token_b_account, lp_token_mint, _spl_token_id, _clock_id, swap_account, swap_authority] =\n\n accounts\n\n {\n\n if &stable_swap_client::id() != pool_program_id.key {\n\n return Err(ProgramError::IncorrectProgramId);\n\n }\n\n\n\n let instruction = instruction::deposit(\n\n &spl_token::id(),\n\n swap_account.key,\n\n swap_authority.key,\n\n authority_account.key,\n\n token_a_custody_account.key,\n\n token_b_custody_account.key,\n", "file_path": "farms/farm-sdk/src/program/protocol/saber.rs", "rank": 78, "score": 264325.1508644995 }, { "content": "pub fn add_liquidity_with_seeds(\n\n accounts: &[AccountInfo],\n\n seeds: &[&[&[u8]]],\n\n max_coin_token_amount: u64,\n\n max_pc_token_amount: u64,\n\n) -> ProgramResult {\n\n if let [authority_account, token_a_custody_account, token_b_custody_account, lp_token_custody_account, pool_program_id, pool_coin_token_account, pool_pc_token_account, lp_token_mint, spl_token_id, amm_id, amm_authority, amm_open_orders, amm_target, serum_market] =\n\n accounts\n\n {\n\n if !check_pool_program_id(pool_program_id.key) {\n\n return Err(ProgramError::IncorrectProgramId);\n\n }\n\n let raydium_accounts = vec![\n\n AccountMeta::new_readonly(*spl_token_id.key, false),\n\n AccountMeta::new(*amm_id.key, false),\n\n AccountMeta::new_readonly(*amm_authority.key, false),\n\n AccountMeta::new_readonly(*amm_open_orders.key, false),\n\n AccountMeta::new(*amm_target.key, false),\n\n AccountMeta::new(*lp_token_mint.key, false),\n\n AccountMeta::new(*pool_coin_token_account.key, false),\n", "file_path": "farms/farm-sdk/src/program/protocol/raydium.rs", "rank": 79, "score": 264325.1508644995 }, { "content": "pub fn wrap_token(accounts: &[AccountInfo], amount: u64) -> ProgramResult {\n\n msg!(\"Processing AmmInstruction::WrapToken\");\n\n msg!(\"amount {} \", amount);\n\n\n\n #[allow(clippy::deprecated_cfg_attr)]\n\n #[cfg_attr(rustfmt, rustfmt_skip)]\n\n if let [\n\n user_account,\n\n user_underlying_token_account,\n\n underlying_token_mint,\n\n _spl_token_id,\n\n decimal_wrapper_program,\n\n user_wrapped_token_account,\n\n wrapped_token_mint,\n\n wrapped_token_vault,\n\n decimal_wrapper\n\n ] = accounts\n\n {\n\n let initial_underlying_token_user_balance =\n\n account::get_token_balance(user_underlying_token_account)?;\n", "file_path": "farms/router-saber/src/wrap_token.rs", "rank": 80, "score": 261213.73565329897 }, { "content": "/// Creates a 'withdraw_single_token_type_exact_amount_out' instruction.\n\npub fn withdraw_single_token_type_exact_amount_out(\n\n program_id: &Pubkey,\n\n token_program_id: &Pubkey,\n\n swap_pubkey: &Pubkey,\n\n authority_pubkey: &Pubkey,\n\n user_transfer_authority_pubkey: &Pubkey,\n\n pool_mint_pubkey: &Pubkey,\n\n fee_account_pubkey: &Pubkey,\n\n pool_token_source_pubkey: &Pubkey,\n\n swap_token_a_pubkey: &Pubkey,\n\n swap_token_b_pubkey: &Pubkey,\n\n destination_pubkey: &Pubkey,\n\n instruction: WithdrawSingleTokenTypeExactAmountOut,\n\n) -> Result<Instruction, ProgramError> {\n\n let data = SwapInstruction::WithdrawSingleTokenTypeExactAmountOut(instruction).pack();\n\n\n\n let accounts = vec![\n\n AccountMeta::new_readonly(*swap_pubkey, false),\n\n AccountMeta::new_readonly(*authority_pubkey, false),\n\n AccountMeta::new_readonly(*user_transfer_authority_pubkey, true),\n", "file_path": "token-swap/program/src/instruction.rs", "rank": 81, "score": 259472.50609779428 }, { "content": "/// Creates a 'deposit_single_token_type_exact_amount_in' instruction.\n\npub fn deposit_single_token_type_exact_amount_in(\n\n program_id: &Pubkey,\n\n token_program_id: &Pubkey,\n\n swap_pubkey: &Pubkey,\n\n authority_pubkey: &Pubkey,\n\n user_transfer_authority_pubkey: &Pubkey,\n\n source_token_pubkey: &Pubkey,\n\n swap_token_a_pubkey: &Pubkey,\n\n swap_token_b_pubkey: &Pubkey,\n\n pool_mint_pubkey: &Pubkey,\n\n destination_pubkey: &Pubkey,\n\n instruction: DepositSingleTokenTypeExactAmountIn,\n\n) -> Result<Instruction, ProgramError> {\n\n let data = SwapInstruction::DepositSingleTokenTypeExactAmountIn(instruction).pack();\n\n\n\n let accounts = vec![\n\n AccountMeta::new_readonly(*swap_pubkey, false),\n\n AccountMeta::new_readonly(*authority_pubkey, false),\n\n AccountMeta::new_readonly(*user_transfer_authority_pubkey, true),\n\n AccountMeta::new(*source_token_pubkey, false),\n", "file_path": "token-swap/program/src/instruction.rs", "rank": 82, "score": 259472.50609779428 }, { "content": "pub fn clone_keypair(source: &Keypair) -> Keypair {\n\n Keypair::from_bytes(&source.to_bytes()).unwrap()\n\n}\n\n\n\n/// NOP (No Operation) Override function\n", "file_path": "governance/test-sdk/src/tools.rs", "rank": 83, "score": 258401.87043100665 }, { "content": "pub fn drop_all(client: &FarmClient, config: &Config) {\n\n drop(client, config, StorageType::Vault);\n\n drop(client, config, StorageType::Farm);\n\n drop(client, config, StorageType::Pool);\n\n drop(client, config, StorageType::Token);\n\n drop(client, config, StorageType::Program);\n\n}\n", "file_path": "farms/farm-ctrl/src/refdb.rs", "rank": 84, "score": 251986.93806768642 }, { "content": "pub fn init_all(client: &FarmClient, config: &Config) {\n\n init(client, config, StorageType::Program);\n\n init(client, config, StorageType::Token);\n\n init(client, config, StorageType::Pool);\n\n init(client, config, StorageType::Farm);\n\n init(client, config, StorageType::Vault);\n\n}\n\n\n", "file_path": "farms/farm-ctrl/src/refdb.rs", "rank": 85, "score": 251986.93806768642 }, { "content": "pub fn stake(accounts: &[AccountInfo], amount: u64, harvest: bool) -> ProgramResult {\n\n msg!(\"Processing AmmInstruction::Stake\");\n\n msg!(\"amount {} \", amount);\n\n\n\n #[allow(clippy::deprecated_cfg_attr)]\n\n #[cfg_attr(rustfmt, rustfmt_skip)]\n\n if let [\n\n user_account,\n\n user_info_account,\n\n user_lp_token_account,\n\n user_reward_token_a_account,\n\n user_reward_token_b_account,\n\n farm_program_id,\n\n farm_lp_token_account,\n\n farm_reward_token_a_account,\n\n farm_reward_token_b_account,\n\n clock_id,\n\n spl_token_id,\n\n farm_id,\n\n farm_authority\n", "file_path": "farms/router-raydium/src/stake.rs", "rank": 86, "score": 249159.33411652708 }, { "content": "pub fn print_pda_all(client: &FarmClient, config: &Config) {\n\n print_pda(client, config, StorageType::Program);\n\n print_pda(client, config, StorageType::Token);\n\n print_pda(client, config, StorageType::Pool);\n\n print_pda(client, config, StorageType::Farm);\n\n print_pda(client, config, StorageType::Vault);\n\n}\n\n\n", "file_path": "farms/farm-ctrl/src/print.rs", "rank": 87, "score": 248916.0951105176 }, { "content": "pub fn print_size_all(client: &FarmClient, config: &Config) {\n\n print_size(client, config, StorageType::Program);\n\n print_size(client, config, StorageType::Token);\n\n print_size(client, config, StorageType::Pool);\n\n print_size(client, config, StorageType::Farm);\n\n print_size(client, config, StorageType::Vault);\n\n}\n\n\n", "file_path": "farms/farm-ctrl/src/print.rs", "rank": 88, "score": 248916.0951105176 }, { "content": "pub fn get_token_balance(account_data: &NativeAccountData) -> u64 {\n\n let account = TokenAccount::unpack(&account_data.data).unwrap();\n\n account.amount\n\n}\n\n\n", "file_path": "token-swap/program/fuzz/src/native_token.rs", "rank": 89, "score": 246812.06409848033 }, { "content": "/// Try to convert a UI represenation of a token amount to its raw amount using the given decimals\n\n/// field\n\npub fn try_ui_amount_into_amount(ui_amount: String, decimals: u8) -> Result<u64, ProgramError> {\n\n let decimals = decimals as usize;\n\n let mut parts = ui_amount.split('.');\n\n let mut amount_str = parts.next().unwrap().to_string(); // splitting a string, even an empty one, will always yield an iterator of at least len == 1\n\n let after_decimal = parts.next().unwrap_or(\"\");\n\n let after_decimal = after_decimal.trim_end_matches('0');\n\n if (amount_str.is_empty() && after_decimal.is_empty())\n\n || parts.next().is_some()\n\n || after_decimal.len() > decimals\n\n {\n\n return Err(ProgramError::InvalidArgument);\n\n }\n\n\n\n amount_str.push_str(after_decimal);\n\n for _ in 0..decimals.saturating_sub(after_decimal.len()) {\n\n amount_str.push('0');\n\n }\n\n amount_str\n\n .parse::<u64>()\n\n .map_err(|_| ProgramError::InvalidArgument)\n\n}\n\n\n\nsolana_program::declare_id!(\"TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb\");\n\n\n", "file_path": "token/program-2022/src/lib.rs", "rank": 90, "score": 246204.75163302792 }, { "content": "/// Try to convert a UI represenation of a token amount to its raw amount using the given decimals\n\n/// field\n\npub fn try_ui_amount_into_amount(ui_amount: String, decimals: u8) -> Result<u64, ProgramError> {\n\n let decimals = decimals as usize;\n\n let mut parts = ui_amount.split('.');\n\n let mut amount_str = parts.next().unwrap().to_string(); // splitting a string, even an empty one, will always yield an iterator of at least len == 1\n\n let after_decimal = parts.next().unwrap_or(\"\");\n\n let after_decimal = after_decimal.trim_end_matches('0');\n\n if (amount_str.is_empty() && after_decimal.is_empty())\n\n || parts.next().is_some()\n\n || after_decimal.len() > decimals\n\n {\n\n return Err(ProgramError::InvalidArgument);\n\n }\n\n\n\n amount_str.push_str(after_decimal);\n\n for _ in 0..decimals.saturating_sub(after_decimal.len()) {\n\n amount_str.push('0');\n\n }\n\n amount_str\n\n .parse::<u64>()\n\n .map_err(|_| ProgramError::InvalidArgument)\n\n}\n\n\n\nsolana_program::declare_id!(\"TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA\");\n\n\n", "file_path": "token/program/src/lib.rs", "rank": 91, "score": 246204.75163302792 }, { "content": "pub fn get_pool_swap_amounts<'a, 'b>(\n\n pool_coin_token_account: &'a AccountInfo<'b>,\n\n pool_pc_token_account: &'a AccountInfo<'b>,\n\n amm_open_orders: &'a AccountInfo<'b>,\n\n amm_id: &'a AccountInfo<'b>,\n\n coin_token_amount_in: u64,\n\n pc_token_amount_in: u64,\n\n) -> Result<(u64, u64), ProgramError> {\n\n if (coin_token_amount_in == 0 && pc_token_amount_in == 0)\n\n || (coin_token_amount_in > 0 && pc_token_amount_in > 0)\n\n {\n\n msg!(\"Error: One and only one of token amounts must be non-zero\");\n\n return Err(ProgramError::InvalidArgument);\n\n }\n\n let (coin_balance, pc_balance) = get_pool_token_balances(\n\n pool_coin_token_account,\n\n pool_pc_token_account,\n\n amm_open_orders,\n\n amm_id,\n\n )?;\n", "file_path": "farms/farm-sdk/src/program/protocol/raydium.rs", "rank": 92, "score": 244793.11484168674 }, { "content": "pub fn get_pool_swap_amounts<'a, 'b>(\n\n pool_token_a_account: &'a AccountInfo<'b>,\n\n pool_token_b_account: &'a AccountInfo<'b>,\n\n token_a_amount_in: u64,\n\n token_b_amount_in: u64,\n\n) -> Result<(u64, u64), ProgramError> {\n\n if (token_a_amount_in == 0 && token_b_amount_in == 0)\n\n || (token_a_amount_in > 0 && token_b_amount_in > 0)\n\n {\n\n msg!(\"Error: One and only one of token amounts must be non-zero\");\n\n return Err(ProgramError::InvalidArgument);\n\n }\n\n let (token_a_balance, token_b_balance) =\n\n get_pool_token_balances(pool_token_a_account, pool_token_b_account)?;\n\n if token_a_balance == 0 || token_b_balance == 0 {\n\n msg!(\"Error: Can't swap in an empty pool\");\n\n return Err(ProgramError::Custom(412));\n\n }\n\n let token_a_balance = token_a_balance as f64;\n\n let token_b_balance = token_b_balance as f64;\n", "file_path": "farms/farm-sdk/src/program/protocol/orca.rs", "rank": 93, "score": 244793.11484168674 }, { "content": "pub fn get_str_val<'a>(matches: &ArgMatches<'a>, argname: &str) -> String {\n\n matches\n\n .value_of(argname)\n\n .unwrap()\n\n .parse::<String>()\n\n .unwrap()\n\n .to_uppercase()\n\n}\n\n\n", "file_path": "farms/farm-client/src/cli/config.rs", "rank": 94, "score": 244592.31291790056 }, { "content": "/// Create U64 Multiplication instruction\n\npub fn u64_multiply(multiplicand: u64, multiplier: u64) -> Instruction {\n\n Instruction {\n\n program_id: id(),\n\n accounts: vec![],\n\n data: MathInstruction::U64Multiply {\n\n multiplicand,\n\n multiplier,\n\n }\n\n .try_to_vec()\n\n .unwrap(),\n\n }\n\n}\n\n\n", "file_path": "libraries/math/src/instruction.rs", "rank": 95, "score": 242241.569675927 }, { "content": "/// Create U64 Division instruction\n\npub fn u64_divide(dividend: u64, divisor: u64) -> Instruction {\n\n Instruction {\n\n program_id: id(),\n\n accounts: vec![],\n\n data: MathInstruction::U64Divide { dividend, divisor }\n\n .try_to_vec()\n\n .unwrap(),\n\n }\n\n}\n\n\n", "file_path": "libraries/math/src/instruction.rs", "rank": 96, "score": 242241.569675927 }, { "content": "/// Creates a `UiAmountToAmount` instruction\n\npub fn ui_amount_to_amount(\n\n token_program_id: &Pubkey,\n\n mint_pubkey: &Pubkey,\n\n ui_amount: &str,\n\n) -> Result<Instruction, ProgramError> {\n\n check_spl_token_program_account(token_program_id)?;\n\n\n\n Ok(Instruction {\n\n program_id: *token_program_id,\n\n accounts: vec![AccountMeta::new_readonly(*mint_pubkey, false)],\n\n data: TokenInstruction::UiAmountToAmount { ui_amount }.pack(),\n\n })\n\n}\n\n\n", "file_path": "token/program-2022/src/instruction.rs", "rank": 97, "score": 241334.71432302316 }, { "content": "/// Creates an `AmountToUiAmount` instruction\n\npub fn amount_to_ui_amount(\n\n token_program_id: &Pubkey,\n\n mint_pubkey: &Pubkey,\n\n amount: u64,\n\n) -> Result<Instruction, ProgramError> {\n\n check_program_account(token_program_id)?;\n\n\n\n Ok(Instruction {\n\n program_id: *token_program_id,\n\n accounts: vec![AccountMeta::new_readonly(*mint_pubkey, false)],\n\n data: TokenInstruction::AmountToUiAmount { amount }.pack(),\n\n })\n\n}\n\n\n", "file_path": "token/program/src/instruction.rs", "rank": 98, "score": 241334.71432302316 }, { "content": "/// Creates an `AmountToUiAmount` instruction\n\npub fn amount_to_ui_amount(\n\n token_program_id: &Pubkey,\n\n mint_pubkey: &Pubkey,\n\n amount: u64,\n\n) -> Result<Instruction, ProgramError> {\n\n check_spl_token_program_account(token_program_id)?;\n\n\n\n Ok(Instruction {\n\n program_id: *token_program_id,\n\n accounts: vec![AccountMeta::new_readonly(*mint_pubkey, false)],\n\n data: TokenInstruction::AmountToUiAmount { amount }.pack(),\n\n })\n\n}\n\n\n", "file_path": "token/program-2022/src/instruction.rs", "rank": 99, "score": 241334.71432302316 } ]
Rust
.cargo-task/generate-drivers/src/main.rs
flott-motion/stepper
88ff5a41251078943d33d4c3495d9b9103cbe8eb
use std::{ env, error::Error, fs::{create_dir_all, remove_dir_all, File}, io::prelude::*, path::PathBuf, }; use cargo_task_util::ct_info; use serde_derive::Serialize; use serde_json::Value; use tinytemplate::{format_unescaped, TinyTemplate}; mod config; use config::{load_cargo_toml, load_drivers_toml, Driver}; fn main() -> Result<(), Box<dyn Error>> { let root = env::current_dir()?; let drivers = root.join("drivers"); let templates = root.join("templates").join("driver"); let mut tt = TinyTemplate::new(); tt.set_default_formatter(&format_unescaped); tt.add_formatter("upper", format_upper); let cargo_toml = load_template(&templates.join("Cargo.toml.tmpl"))?; tt.add_template("cargo_toml", cargo_toml.as_str())?; let lib_rs = load_template(&templates.join("src").join("lib.rs.tmpl"))?; tt.add_template("lib_rs", lib_rs.as_str())?; let readme_md = load_template(&templates.join("README.md.tmpl"))?; tt.add_template("readme_md", readme_md.as_str())?; let manifest = load_cargo_toml(&root)?; let version = manifest.package.version; let authors = manifest.package.authors; let config = load_drivers_toml(&root)?; for driver in config.drivers { ct_info!("generating '{}' driver...", driver.name); let ctx = &Context::new(driver, &version, &authors); let driver_path = drivers.join(&ctx.name); if driver_path.exists() { remove_dir_all(&driver_path)?; } create_dir_all(&driver_path.join("src"))?; let cargo_toml_output = tt.render("cargo_toml", ctx)?; let lib_rs_output = tt.render("lib_rs", ctx)?; let readme_md_output = tt.render("readme_md", ctx)?; File::create(&driver_path.join("Cargo.toml"))? .write_all(cargo_toml_output.as_ref())?; File::create(&driver_path.join("src").join("lib.rs"))? .write_all(lib_rs_output.as_ref())?; File::create(&driver_path.join("README.md"))? .write_all(readme_md_output.as_ref())?; } Ok(()) } #[derive(Serialize)] struct Context { pub name: String, pub version: String, pub authors: Vec<String>, pub product_url: String, pub pololu_url: String, } impl Context { pub fn new( driver: Driver, version: &String, authors: &Vec<String>, ) -> Self { Self { name: driver.name, version: version.to_owned(), authors: authors.to_owned(), product_url: driver.product_url, pololu_url: driver.pololu_url, } } } fn format_upper( value: &Value, output: &mut String, ) -> Result<(), tinytemplate::error::Error> { let mut s = String::new(); format_unescaped(value, &mut s)?; output.push_str(&s.to_uppercase()); Ok(()) } fn load_template(path: &PathBuf) -> std::io::Result<String> { let mut contents = String::new(); File::open(path)?.read_to_string(&mut contents)?; Ok(contents) }
use std::{ env, error::Error, fs::{create_dir_all, remove_dir_all, File}, io::prelude::*, path::PathBuf, }; use cargo_task_util::ct_info; use serde_derive::Serialize; use serde_json::Value; use tinytemplate::{format_unescaped, TinyTemplate}; mod config; use config::{load_cargo_toml, load_drivers_toml, Driver}; fn main() -> Result<(), Box<dyn Error>> { let root = env::current_dir()?; let drivers = root.join("drivers"); let templates = root.join("templates").join("driver"); let mut tt = TinyTemplate::new(); tt.set_default_formatter(&format_unescaped); tt.add_formatter("upper", format_upper); let cargo_toml = load_template(&templates.join("Cargo.toml.tmpl"))?; tt.add_template("cargo_toml", cargo_toml.as_str())?; let lib_rs = load_template(&templates.join("src").join("lib.rs.tmpl"))?; tt.add_template("lib_rs", lib_rs.as_str())?; let readme_md = load_template(&templates.join("README.md.tmpl"))?; tt.add_template("readme_md", readme_md.as_str())?; let manifest = load_cargo_toml(&root)?; let version = manifest.package.version; let authors = manifest.package.authors; let config = load_drivers_toml(&root)?; for driver in config.drivers { ct_info!("generating '{}' driver...", driver.name); let ctx = &Context::new(driver, &version, &authors); let driver_path = drivers.join(&ctx.name); if driver_path.exists() { remove_dir_all(&driver_path)?; } create_dir_all(&driver_path.join("src"))?; let cargo_toml_output = tt.render("cargo_toml", ctx)?; let lib_rs_output = tt.render("lib_rs", ctx)?; let readme_md_output = tt.render("readme_md", ctx)?; File::create(&driver_path.join("Cargo.toml"))? .write_all(cargo_toml_output.as_ref())?; File::create(&driver_path.join("src").join("lib.rs"))? .write_all(lib_rs_output.as_ref())?; File::create(&driver_path.join("README.md"))? .write_all(readme_md_output.as_ref())?; } Ok(()) } #[derive(Serialize)] struct Context { pub name: String, pub version: String, pub authors: Vec<String>, pub product_url: String, pub pololu_url: String, } impl Context { pub fn new( driver: Driver, version: &String, authors: &Vec<String>, ) -> Self { Self { name: driver.name, version: version.to_owned(), authors: authors.to_owned(), product_url: driver.product_url, pololu_url: driver.pololu_url, } } } fn format_upper( value: &Value,
fn load_template(path: &PathBuf) -> std::io::Result<String> { let mut contents = String::new(); File::open(path)?.read_to_string(&mut contents)?; Ok(contents) }
output: &mut String, ) -> Result<(), tinytemplate::error::Error> { let mut s = String::new(); format_unescaped(value, &mut s)?; output.push_str(&s.to_uppercase()); Ok(()) }
function_block-function_prefix_line
[ { "content": "pub fn update<Driver, Timer, Profile, Convert>(\n\n mut state: State<Driver, Timer, Profile>,\n\n new_motion: &mut Option<Direction>,\n\n profile: &mut Profile,\n\n current_step: &mut i32,\n\n current_direction: &mut Direction,\n\n convert: &Convert,\n\n) -> (\n\n Result<\n\n bool,\n\n Error<\n\n <Driver as SetDirection>::Error,\n\n <<Driver as SetDirection>::Dir as OutputPin>::Error,\n\n <Driver as Step>::Error,\n\n <<Driver as Step>::Step as OutputPin>::Error,\n\n Timer::Error,\n\n <Timer::Time as TryFrom<Nanoseconds>>::Error,\n\n Convert::Error,\n\n >,\n\n >,\n", "file_path": "src/motion_control/state.rs", "rank": 0, "score": 83782.71780607826 }, { "content": "//! Parent module for all driver implementations\n\n//!\n\n//! This module contains the driver implementations that are currently supported\n\n//! by Stepper. Each sub-module is behind a feature gate, to allow users to only\n\n//! enable the drivers they actually need. By default, all drivers are enabled.\n\n\n\n#[cfg(feature = \"drv8825\")]\n\npub mod drv8825;\n\n\n\n#[cfg(feature = \"stspin220\")]\n\npub mod stspin220;\n", "file_path": "src/drivers/mod.rs", "rank": 1, "score": 55710.024640800366 }, { "content": "/// Implemented by drivers that support controlling the STEP signal\n\npub trait Step {\n\n /// The minimum length of a STEP pulse\n\n const PULSE_LENGTH: Nanoseconds;\n\n\n\n /// The type of the STEP pin\n\n type Step: OutputPin;\n\n\n\n /// The error that can occur while accessing the STEP pin\n\n type Error;\n\n\n\n /// Provides access to the STEP pin\n\n fn step(&mut self) -> Result<&mut Self::Step, Self::Error>;\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 2, "score": 38364.48287523482 }, { "content": "/// Implemented by drivers that have motion control capabilities\n\n///\n\n/// A software-based fallback implementation exists in the [`motion_control`]\n\n/// module, for drivers that implement [SetDirection] and [Step].\n\n///\n\n/// [`motion_control`]: crate::motion_control\n\npub trait MotionControl {\n\n /// The type used by the driver to represent velocity\n\n type Velocity: Copy;\n\n\n\n /// The type error that can happen when using this trait\n\n type Error;\n\n\n\n /// Move to the given position\n\n ///\n\n /// This method must arrange for the motion to start, but must not block\n\n /// until it is completed. If more attention is required during the motion,\n\n /// this should be handled in [`MotionControl::update`].\n\n fn move_to_position(\n\n &mut self,\n\n max_velocity: Self::Velocity,\n\n target_step: i32,\n\n ) -> Result<(), Self::Error>;\n\n\n\n /// Reset internal position to the given value\n\n ///\n", "file_path": "src/traits.rs", "rank": 3, "score": 36979.98394135244 }, { "content": "/// Implemented by drivers that support controlling the DIR signal\n\npub trait SetDirection {\n\n /// The time that the DIR signal must be held for a change to apply\n\n const SETUP_TIME: Nanoseconds;\n\n\n\n /// The type of the DIR pin\n\n type Dir: OutputPin;\n\n\n\n /// The error that can occur while accessing the DIR pin\n\n type Error;\n\n\n\n /// Provides access to the DIR pin\n\n fn dir(&mut self) -> Result<&mut Self::Dir, Self::Error>;\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 4, "score": 36979.91834765849 }, { "content": "/// Implemented by drivers that support controlling the microstepping mode\n\npub trait SetStepMode {\n\n /// The time the mode signals need to be held before re-enabling the driver\n\n const SETUP_TIME: Nanoseconds;\n\n\n\n /// The time the mode signals need to be held after re-enabling the driver\n\n const HOLD_TIME: Nanoseconds;\n\n\n\n /// The error that can occur while using this trait\n\n type Error;\n\n\n\n /// The type that defines the microstepping mode\n\n ///\n\n /// This crate includes a number of enums that can be used for this purpose.\n\n type StepMode: StepMode;\n\n\n\n /// Apply the new step mode configuration\n\n ///\n\n /// Typically this puts the driver into reset and sets the mode pins\n\n /// according to the new step mode.\n\n fn apply_mode_config(\n\n &mut self,\n\n step_mode: Self::StepMode,\n\n ) -> Result<(), Self::Error>;\n\n\n\n /// Re-enable the driver after the mode has been set\n\n fn enable_driver(&mut self) -> Result<(), Self::Error>;\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 5, "score": 35729.152110148585 }, { "content": "/// Implemented for all step mode enums\n\npub trait StepMode:\n\n Into<u16> + TryFrom<u16, Error = InvalidStepModeError> + Copy\n\n{\n\n /// The type of the iterator returned by [`StepMode::iter`]\n\n type Iter: Iterator<Item = Self>;\n\n\n\n /// Returns an iterator over all supported modes\n\n ///\n\n /// Starts at the mode for configuring full steps and ends at the highest\n\n /// supported number of microsteps per step.\n\n fn iter() -> Self::Iter;\n\n}\n\n\n\nmacro_rules! generate_step_mode_enums {\n\n (\n\n $(\n\n $max:expr => $($variant:expr),*;\n\n )*\n\n ) => {\n\n $(\n", "file_path": "src/step_mode.rs", "rank": 6, "score": 35726.1247299393 }, { "content": "```\n\n\n\nThis should generate a new driver crate in `drivers/` from the template in `templates/driver/`. Please note that the template is fairly specific to the currently existing drivers, and it might be necessary to adapt it for new ones.\n\n\n\nIf in doubt, feel free to skip creating the driver crate. The important part is having the driver in Stepper. An external crate can always be created later.\n", "file_path": "documentation/how-to-write-a-driver.md", "rank": 7, "score": 33669.239787793966 }, { "content": "### Driver Module\n\n\n\nWhen starting to write a new driver, start with copying an existing one. All existing drivers are located in `src/drivers/`, with each having a separate file. Choose one that you think is most similar to what you're going to need (if in doubt, pick `drv8825.rs`).\n\n\n\nAs an aside, it would be nice if there was less duplicated code between the drivers. A lot has already been done to reduce this, by simplifying the driver traits and moving non-trivial code out of the drivers, but a lot of boilerplate code for managing driver state remains. It should be possible to reduce this to a bare minimum using procedural macros, but that remains an area for future exploration.\n\n\n\nGo through the driver you copied, and change it as required. Most changes should fall into one of the following categories:\n\n\n\n- Update names that refer to hardware (the stepper driver itself and its pins).\n\n- Make sure the timing constants match the required timing, as per the documentation of the stepper driver.\n\n- Make sure all traits that the stepper driver can support are implemented.\n\n\n\nThe last item is the least trivial, as different driver hardware has different capabilities, and thus allow the driver to implement a different set of traits. Please use the information on the traits from the previous section as a guideline.\n\n\n\n### Driver Crate\n\n\n\nIn addition to the modules in Stepper itself, drivers can be used via external crates that re-export the relevant parts of Stepper. This has been done to provide more convenience to users that only need support for a specific driver, and to make it easier to discover Stepper through the driver hardware it supports.\n\n\n\nTo create an external driver crate, add an entry to `drivers.toml`, then run the `generate-drivers` task to generate the code for it:\n\n\n\n``` bash\n\ncargo install cargo-task # only required once\n\ncargo task generate-drivers\n", "file_path": "documentation/how-to-write-a-driver.md", "rank": 8, "score": 33667.24162577055 }, { "content": "# How to Write a Driver\n\n\n\n## Introduction\n\n\n\nStepper is a software library for controlling stepper motors. To actually do that, it needs to interface with hardware that can drive the motor. To simplify things, we're going to refer to this as \"driver hardware\" going forward.\n\n\n\nStepper provides a unified interface, so the code that uses it does not need to know about the specific driver hardware being used. Under the hood, Stepper needs to know all those specifics, of course. The code that handles that for a given model of driver hardware is called a \"driver\".\n\n\n\nTo add support for additional driver hardware, a new driver needs to be written. The next section introduces some basics that apply to all drivers. Later sections go into the different types of driver hardware, mainly stepper drivers and motion controllers, and how to write drivers for them.\n\n\n\n\n\n## Driver Traits\n\n\n\nThe `traits` module contains traits that can be implemented by drivers. The `Stepper` API uses these traits to abstract over drivers and provide a unified interface.\n\n\n\nTwo kinds of traits that are available:\n\n\n\n- Traits that represent hardware capabilities. `Step` or `SetDirection` are examples of this. Your driver should implement all capability traits that can be supported by the hardware.\n\n- Traits that allow enabling a given capability from software. For example `EnableStepControl` for `Step` or `EnableDirectionControl` for `SetDirection`. If your driver implements a capability trait, it should also implement the corresponding \"enable\" trait.\n\n\n", "file_path": "documentation/how-to-write-a-driver.md", "rank": 9, "score": 33666.636799036656 }, { "content": "# STSPIN220 Driver [![crates.io](https://img.shields.io/crates/v/stspin220.svg)](https://crates.io/crates/stspin220) [![Documentation](https://docs.rs/stspin220/badge.svg)](https://docs.rs/stspin220) ![CI Build](https://github.com/flott-motion/stepper/workflows/CI%20Build/badge.svg)\n\n\n\n## About\n\n\n\nRust driver crate for the [STSPIN220] stepper motor driver. Carrier boards for this chip are [available from Pololu].\n\n\n\nThis crate is a specialized facade for the [Stepper] library. Please consider using Stepper directly, as it provides drivers for more stepper motor drivers, as well as an interface to abstract over them.\n\n\n\nSee [Stepper] for more documentation and usage examples.\n\n\n\n## License\n\n\n\nThis project is open source software, licensed under the terms of the [Zero Clause BSD License] (0BSD, for short). This basically means you can do anything with the software, without any restrictions, but you can't hold the authors liable for problems.\n\n\n\nSee [LICENSE.md] for full details.\n\n\n\n[stspin220]: https://www.st.com/en/motor-drivers/stspin220.html\n\n[available from pololu]: https://www.pololu.com/category/260/\n\n[Stepper]: https://crates.io/crates/stepper\n\n[zero clause bsd license]: https://opensource.org/licenses/0BSD\n\n[license.md]: https://github.com/flott-motion/stepper/blob/main/LICENSE.md\n", "file_path": "drivers/stspin220/README.md", "rank": 10, "score": 33664.65778785808 }, { "content": "# DRV8825 Driver [![crates.io](https://img.shields.io/crates/v/drv8825.svg)](https://crates.io/crates/drv8825) [![Documentation](https://docs.rs/drv8825/badge.svg)](https://docs.rs/drv8825) ![CI Build](https://github.com/flott-motion/stepper/workflows/CI%20Build/badge.svg)\n\n\n\n## About\n\n\n\nRust driver crate for the [DRV8825] stepper motor driver. Carrier boards for this chip are [available from Pololu].\n\n\n\nThis crate is a specialized facade for the [Stepper] library. Please consider using Stepper directly, as it provides drivers for more stepper motor drivers, as well as an interface to abstract over them.\n\n\n\nSee [Stepper] for more documentation and usage examples.\n\n\n\n## License\n\n\n\nThis project is open source software, licensed under the terms of the [Zero Clause BSD License] (0BSD, for short). This basically means you can do anything with the software, without any restrictions, but you can't hold the authors liable for problems.\n\n\n\nSee [LICENSE.md] for full details.\n\n\n\n[drv8825]: https://www.ti.com/product/DRV8825\n\n[available from pololu]: https://www.pololu.com/category/154/\n\n[Stepper]: https://crates.io/crates/stepper\n\n[zero clause bsd license]: https://opensource.org/licenses/0BSD\n\n[license.md]: https://github.com/flott-motion/stepper/blob/main/LICENSE.md\n", "file_path": "drivers/drv8825/README.md", "rank": 11, "score": 33664.5382420446 }, { "content": "### Motion Controllers\n\n\n\nThe defining feature of motion controllers is that they provide a high-level motion control interface that allows for moving a specific number of steps, or at a specific speed. They also provide smooth acceleration between different speeds. This is functionality that would otherwise have to be implemented in software, if using just a low-level stepper driver.\n\n\n\nAs of this writing, Stepper does not support any motion controllers. However, the `MotionControl` trait was written with them in mind. Hopefully, it should be possible to implement `MotionControl` for any motion controllers, with little to no changes to the trait required.\n\n\n\nSince motion controllers should be able to make a single step, it should be possible to implement `Step`/`SetDirection` for them too. Whether that is desirable is a subject for future exploration. `SetStepMode`/`EnableStepModeControl` should be implemented, if the hardware can support them.\n\n\n\n### Other Hardware\n\n\n\nThere is more driver hardware that doesn't fall neatly into one of the two groups (stepper drivers and motion controllers) outlined above. At this point, Stepper doesn't support any of them. Whether they can be made to fit the existing traits, or wether modifications or entirely new traits are required, are open questions.\n\n\n\n\n\n## Writing a Driver\n\n\n", "file_path": "documentation/how-to-write-a-driver.md", "rank": 12, "score": 33663.6230467701 }, { "content": "## Types of Driver Hardware\n\n\n\n### Stepper Drivers\n\n\n\nLots of driver hardware implements a standard interface, consisting of digital STEP and DIR signals. Those allow for making steps and controlling direction. This type of driver hardware is often called \"stepper driver\".\n\n\n\nStepper drivers tend to be fairly low-level, so their drivers typically only need to implement the lower-level traits:\n\n\n\n- `Step`/`EnableStepControl` and `SetDirection`/`EnableDirectionControl`: This is an ubiquitous capability with stepper drivers. If driver hardware has STEP and DIR signals, it should implement these traits.\n\n- `SetStepMode`/`EnableStepModeControl`: If the hardware supports microstepping, and that configuration can be controlled from software, the driver should implement these traits.\n\n- `MotionControl`/`EnableMotionControl`: Typical stepper drivers don't have motion control capability and can't support these traits natively. A software-based fallback implementation based on `Step` and `SetDirection` is available, but as a driver author, you don't have to worry about that.\n\n\n\nPlease note that some driver hardware is a hybrid between a typical stepper driver (i.e. it provides STEP and DIR signals) and a higher-level motion controller. If you're faced with hardware like this, you can implement support for its low-level features, as laid out in this section. The next section goes into how to support motion control capability.\n\n\n", "file_path": "documentation/how-to-write-a-driver.md", "rank": 13, "score": 33663.24352821702 }, { "content": "Drivers use a pattern called \"type state\" to encode, at compile-time, which capabilities are available. To provide maximum flexibility to users, Stepper makes no assumptions about which capabilities can be controlled from software in a given situation. For example, if a user uses Stepper to generate step signals, but wants to wire up a physical switch to control the direction, this should be possible.\n\n\n\nTo that end, drivers start without any capabilities. To enable a capability (e.g. setting direction) the user needs to pass the resource required to do that (e.g. the `OutputPin` implementation that controls the DIR signal) to the driver. This happens through the unified API in `Stepper`, and the various \"enable\" traits make this possible.\n\n\n\nHere's an overview of all of the traits:\n\n\n\n- `Step`/`EnableStepControl`: `Step` is a fairly low-level trait that abstracts over making single steps.\n\n`SetDirection`/`EnableDirectionControl`: `SetDirection` controls the direction of steps made with `Step`. Implementing it only makes sense, if the driver implements `Step`.\n\n- `SetStepMode`/`EnableStepModeControl`: Microstepping is a technique for more fine-grained control of stepper motors. Most driver hardware seems to support it these days, but some might not. Some driver hardware has physical switches to control microstepping configuration, meaning that it can't be changed from software.\n\n- `MotionControl`/`EnableMotionControl`: `MotionControl` abstracts over high-level motion control capability, for example moving a specific number of steps while smoothly accelerating/deceleration to/from the maximum velocity.\n\n\n\nYour driver should implement all traits whose capabilities the hardware can support. The following sections have some more notes on what that might look like for different kinds of driver hardware.\n\n\n\n\n", "file_path": "documentation/how-to-write-a-driver.md", "rank": 14, "score": 33661.526339637596 }, { "content": "fn delay_left<Delay, Convert>(\n\n delay: Delay,\n\n pulse_length: Nanoseconds,\n\n convert: &Convert,\n\n) -> Result<\n\n Convert::Ticks,\n\n TimeConversionError<\n\n <Convert::Ticks as TryFrom<Nanoseconds>>::Error,\n\n Convert::Error,\n\n >,\n\n>\n\nwhere\n\n Convert: DelayToTicks<Delay>,\n\n Convert::Ticks: TryFrom<Nanoseconds> + ops::Sub<Output = Convert::Ticks>,\n\n{\n\n let delay: Convert::Ticks = convert\n\n .delay_to_ticks(delay)\n\n .map_err(|err| TimeConversionError::DelayToTicks(err))?;\n\n let pulse_length: Convert::Ticks = pulse_length\n\n .try_into()\n\n .map_err(|err| TimeConversionError::NanosecondsToTicks(err))?;\n\n\n\n let delay_left = delay - pulse_length;\n\n Ok(delay_left)\n\n}\n", "file_path": "src/motion_control/state.rs", "rank": 15, "score": 33646.574152781555 }, { "content": "/// Enable step control for a driver\n\n///\n\n/// The `Resources` type parameter defines the hardware resources required for\n\n/// step control.\n\npub trait EnableStepControl<Resources> {\n\n /// The type of the driver after step control has been enabled\n\n type WithStepControl: Step;\n\n\n\n /// Enable step control\n\n fn enable_step_control(self, res: Resources) -> Self::WithStepControl;\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 16, "score": 33031.960656651936 }, { "content": "/// Enable motion control for a driver\n\n///\n\n/// The `Resources` type parameter defines the hardware resources required for\n\n/// motion control.\n\npub trait EnableMotionControl<Resources> {\n\n /// The type of the driver after motion control has been enabled\n\n type WithMotionControl: MotionControl;\n\n\n\n /// Enable step control\n\n fn enable_motion_control(self, res: Resources) -> Self::WithMotionControl;\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 17, "score": 33031.960656651936 }, { "content": "/// Enable direction control for a driver\n\n///\n\n/// The `Resources` type parameter defines the hardware resources required for\n\n/// direction control.\n\npub trait EnableDirectionControl<Resources> {\n\n /// The type of the driver after direction control has been enabled\n\n type WithDirectionControl: SetDirection;\n\n\n\n /// Enable direction control\n\n fn enable_direction_control(\n\n self,\n\n res: Resources,\n\n ) -> Self::WithDirectionControl;\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 18, "score": 33031.960656651936 }, { "content": "/// Converts delay values from RampMaker into timer ticks\n\n///\n\n/// RampMaker is agnostic over the units used, and the unit of the timer ticks\n\n/// depend on the target platform. This trait allows Stepper to convert between\n\n/// both types. The user must supply an implementation that matches their\n\n/// environment.\n\n///\n\n/// The `Delay` parameter specifies the type of delay value used by RampMaker.\n\npub trait DelayToTicks<Delay> {\n\n /// The timer ticks the delay is being converted into\n\n type Ticks;\n\n\n\n /// The error that can happen during conversion\n\n type Error;\n\n\n\n /// Convert delay value into timer ticks\n\n fn delay_to_ticks(&self, delay: Delay) -> Result<Self::Ticks, Self::Error>;\n\n}\n", "file_path": "src/motion_control/conversion.rs", "rank": 19, "score": 32000.252443224927 }, { "content": "/// Enable microstepping mode control for a driver\n\n///\n\n/// The `Resources` type parameter defines the hardware resources required for\n\n/// controlling microstepping mode.\n\npub trait EnableStepModeControl<Resources> {\n\n /// The type of the driver after microstepping mode control has been enabled\n\n type WithStepModeControl: SetStepMode;\n\n\n\n /// Enable microstepping mode control\n\n fn enable_step_mode_control(\n\n self,\n\n res: Resources,\n\n ) -> Self::WithStepModeControl;\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 20, "score": 31996.476495689414 }, { "content": "{\n\n fn from(\n\n err: motion_control::Error<\n\n PinUnavailableError,\n\n PinError,\n\n PinUnavailableError,\n\n PinError,\n\n TimerError,\n\n NanosecondsToTicksError,\n\n DelayToTicksError,\n\n >,\n\n ) -> Self {\n\n Self::MotionControl(err)\n\n }\n\n}\n\n\n\n/// An error that can occur while using this API\n\n#[derive(Debug, Eq, PartialEq)]\n\npub enum SignalError<\n\n PinUnavailableError,\n", "file_path": "src/stepper/error.rs", "rank": 21, "score": 28853.270185071407 }, { "content": " TimerError,\n\n >\n\n{\n\n fn from(\n\n err: SignalError<\n\n PinUnavailableError,\n\n PinError,\n\n NanosecondsToTicksError,\n\n TimerError,\n\n >,\n\n ) -> Self {\n\n Self::Signal(err)\n\n }\n\n}\n\n\n\nimpl<\n\n PinUnavailableError,\n\n PinError,\n\n NanosecondsToTicksError,\n\n DelayToTicksError,\n", "file_path": "src/stepper/error.rs", "rank": 22, "score": 28851.802839636257 }, { "content": "use crate::motion_control;\n\n\n\n/// Unified error type\n\n///\n\n/// Unifies the two types of errors that can happen while using [`Stepper`]:\n\n/// Signal errors and motion control errors.\n\n///\n\n/// [`Stepper`]: crate::Stepper\n\n#[derive(Debug, Eq, PartialEq)]\n\npub enum Error<\n\n PinUnavailableError,\n\n PinError,\n\n NanosecondsToTicksError,\n\n DelayToTicksError,\n\n TimerError,\n\n> {\n\n /// A signal error\n\n Signal(\n\n SignalError<\n\n PinUnavailableError,\n", "file_path": "src/stepper/error.rs", "rank": 23, "score": 28850.86530578532 }, { "content": "impl<\n\n PinUnavailableError,\n\n PinError,\n\n NanosecondsToTicksError,\n\n DelayToTicksError,\n\n TimerError,\n\n >\n\n From<\n\n SignalError<\n\n PinUnavailableError,\n\n PinError,\n\n NanosecondsToTicksError,\n\n TimerError,\n\n >,\n\n >\n\n for Error<\n\n PinUnavailableError,\n\n PinError,\n\n NanosecondsToTicksError,\n\n DelayToTicksError,\n", "file_path": "src/stepper/error.rs", "rank": 24, "score": 28848.414868074356 }, { "content": " PinError,\n\n NanosecondsToTicksError,\n\n TimerError,\n\n> {\n\n /// A pin was not accessible\n\n PinUnavailable(PinUnavailableError),\n\n\n\n /// An error originated from using the [`OutputPin`] trait\n\n ///\n\n /// [`OutputPin`]: embedded_hal::digital::blocking::OutputPin\n\n Pin(PinError),\n\n\n\n /// An error occurred while converting nanoseconds to timer ticks\n\n NanosecondsToTicks(NanosecondsToTicksError),\n\n\n\n /// An error originated from working with a timer\n\n Timer(TimerError),\n\n}\n", "file_path": "src/stepper/error.rs", "rank": 25, "score": 28847.86598208791 }, { "content": " TimerError,\n\n >\n\n From<\n\n motion_control::Error<\n\n PinUnavailableError,\n\n PinError,\n\n PinUnavailableError,\n\n PinError,\n\n TimerError,\n\n NanosecondsToTicksError,\n\n DelayToTicksError,\n\n >,\n\n >\n\n for Error<\n\n PinUnavailableError,\n\n PinError,\n\n NanosecondsToTicksError,\n\n DelayToTicksError,\n\n TimerError,\n\n >\n", "file_path": "src/stepper/error.rs", "rank": 26, "score": 28846.049270729764 }, { "content": " PinError,\n\n NanosecondsToTicksError,\n\n TimerError,\n\n >,\n\n ),\n\n\n\n /// A motion control error\n\n MotionControl(\n\n motion_control::Error<\n\n PinUnavailableError,\n\n PinError,\n\n PinUnavailableError,\n\n PinError,\n\n TimerError,\n\n NanosecondsToTicksError,\n\n DelayToTicksError,\n\n >,\n\n ),\n\n}\n\n\n", "file_path": "src/stepper/error.rs", "rank": 27, "score": 28846.004569899065 }, { "content": " type Error = Infallible;\n\n\n\n fn dir(&mut self) -> Result<&mut Self::Dir, Self::Error> {\n\n Ok(&mut self.dir_mode4)\n\n }\n\n}\n\n\n\nimpl<\n\n EnableFault,\n\n StandbyReset,\n\n Mode1,\n\n Mode2,\n\n StepMode3,\n\n DirMode4,\n\n OutputPinError,\n\n > EnableStepControl<StepMode3>\n\n for STSPIN220<EnableFault, StandbyReset, Mode1, Mode2, (), DirMode4>\n\nwhere\n\n StepMode3: OutputPin<Error = OutputPinError>,\n\n{\n", "file_path": "src/drivers/stspin220.rs", "rank": 28, "score": 28025.981373670267 }, { "content": " self.mode2.set_state(mode2)?;\n\n self.step_mode3.set_state(mode3)?;\n\n self.dir_mode4.set_state(mode4)?;\n\n\n\n Ok(())\n\n }\n\n\n\n fn enable_driver(&mut self) -> Result<(), Self::Error> {\n\n // Leave standby mode.\n\n self.standby_reset.set_high()\n\n }\n\n}\n\n\n\nimpl<\n\n EnableFault,\n\n StandbyReset,\n\n Mode1,\n\n Mode2,\n\n StepMode3,\n\n DirMode4,\n", "file_path": "src/drivers/stspin220.rs", "rank": 29, "score": 28025.542660437473 }, { "content": "\n\nimpl<Reset, Mode0, Mode1, Mode2, Step, Dir, OutputPinError> StepTrait\n\n for DRV8825<(), (), (), Reset, Mode0, Mode1, Mode2, Step, Dir>\n\nwhere\n\n Step: OutputPin<Error = OutputPinError>,\n\n{\n\n // 7.6 Timing Requirements (page 7)\n\n // https://www.ti.com/lit/ds/symlink/drv8825.pdf\n\n const PULSE_LENGTH: Nanoseconds = Nanoseconds(1900);\n\n\n\n type Step = Step;\n\n type Error = Infallible;\n\n\n\n fn step(&mut self) -> Result<&mut Self::Step, Self::Error> {\n\n Ok(&mut self.step)\n\n }\n\n}\n", "file_path": "src/drivers/drv8825.rs", "rank": 30, "score": 28025.280610668546 }, { "content": " const HOLD_TIME: Nanoseconds = Nanoseconds(650);\n\n\n\n type Error = OutputPinError;\n\n type StepMode = StepMode32;\n\n\n\n fn apply_mode_config(\n\n &mut self,\n\n step_mode: Self::StepMode,\n\n ) -> Result<(), Self::Error> {\n\n // Reset the device's internal logic and disable the h-bridge drivers.\n\n self.reset.set_low()?;\n\n\n\n use PinState::*;\n\n use StepMode32::*;\n\n let (mode0, mode1, mode2) = match step_mode {\n\n Full => (Low, Low, Low),\n\n M2 => (High, Low, Low),\n\n M4 => (Low, High, Low),\n\n M8 => (High, High, Low),\n\n M16 => (Low, Low, High),\n", "file_path": "src/drivers/drv8825.rs", "rank": 31, "score": 28025.0573430956 }, { "content": "}\n\n\n\nimpl DRV8825<(), (), (), (), (), (), (), (), ()> {\n\n /// Create a new instance of `DRV8825`\n\n pub fn new() -> Self {\n\n Self {\n\n enable: (),\n\n fault: (),\n\n sleep: (),\n\n reset: (),\n\n mode0: (),\n\n mode1: (),\n\n mode2: (),\n\n step: (),\n\n dir: (),\n\n }\n\n }\n\n}\n\n\n\nimpl<Reset, Mode0, Mode1, Mode2, Step, Dir, OutputPinError>\n", "file_path": "src/drivers/drv8825.rs", "rank": 32, "score": 28024.907115377886 }, { "content": " M32 => (High, High, High),\n\n };\n\n\n\n // Set mode signals.\n\n self.mode0.set_state(mode0)?;\n\n self.mode1.set_state(mode1)?;\n\n self.mode2.set_state(mode2)?;\n\n\n\n Ok(())\n\n }\n\n\n\n fn enable_driver(&mut self) -> Result<(), Self::Error> {\n\n self.reset.set_high()\n\n }\n\n}\n\n\n\nimpl<Reset, Mode0, Mode1, Mode2, Step, Dir, OutputPinError>\n\n EnableDirectionControl<Dir>\n\n for DRV8825<(), (), (), Reset, Mode0, Mode1, Mode2, Step, ()>\n\nwhere\n", "file_path": "src/drivers/drv8825.rs", "rank": 33, "score": 28024.892334126424 }, { "content": "impl<Reset, Mode0, Mode1, Mode2, Step, Dir, OutputPinError> SetDirection\n\n for DRV8825<(), (), (), Reset, Mode0, Mode1, Mode2, Step, Dir>\n\nwhere\n\n Dir: OutputPin<Error = OutputPinError>,\n\n{\n\n // 7.6 Timing Requirements (page 7)\n\n // https://www.ti.com/lit/ds/symlink/drv8825.pdf\n\n const SETUP_TIME: Nanoseconds = Nanoseconds(650);\n\n\n\n type Dir = Dir;\n\n type Error = Infallible;\n\n\n\n fn dir(&mut self) -> Result<&mut Self::Dir, Self::Error> {\n\n Ok(&mut self.dir)\n\n }\n\n}\n\n\n\nimpl<Reset, Mode0, Mode1, Mode2, Step, Dir, OutputPinError>\n\n EnableStepControl<Step>\n\n for DRV8825<(), (), (), Reset, Mode0, Mode1, Mode2, (), Dir>\n", "file_path": "src/drivers/drv8825.rs", "rank": 34, "score": 28024.61104252774 }, { "content": " StandbyReset,\n\n Mode1,\n\n Mode2,\n\n StepMode3,\n\n DirMode4,\n\n OutputPinError,\n\n > Step\n\n for STSPIN220<EnableFault, StandbyReset, Mode1, Mode2, StepMode3, DirMode4>\n\nwhere\n\n StepMode3: OutputPin<Error = OutputPinError>,\n\n{\n\n const PULSE_LENGTH: Nanoseconds = Nanoseconds(100);\n\n\n\n type Step = StepMode3;\n\n type Error = Infallible;\n\n\n\n fn step(&mut self) -> Result<&mut Self::Step, Self::Error> {\n\n Ok(&mut self.step_mode3)\n\n }\n\n}\n", "file_path": "src/drivers/stspin220.rs", "rank": 35, "score": 28023.699617697985 }, { "content": " mode2: Mode2,\n\n step_mode3: StepMode3,\n\n dir_mode4: DirMode4,\n\n}\n\n\n\nimpl STSPIN220<(), (), (), (), (), ()> {\n\n /// Create a new instance of `STSPIN220`\n\n pub fn new() -> Self {\n\n Self {\n\n enable_fault: (),\n\n standby_reset: (),\n\n mode1: (),\n\n mode2: (),\n\n step_mode3: (),\n\n dir_mode4: (),\n\n }\n\n }\n\n}\n\n\n\nimpl<\n", "file_path": "src/drivers/stspin220.rs", "rank": 36, "score": 28022.28730094965 }, { "content": " SetDirection, SetStepMode, Step,\n\n },\n\n};\n\n\n\n/// The STSPIN220 driver API\n\n///\n\n/// Users are not expected to use this API directly, except to create an\n\n/// instance using [`STSPIN220::new`]. Please check out\n\n/// [`Stepper`](crate::Stepper) instead.\n\npub struct STSPIN220<\n\n EnableFault,\n\n StandbyReset,\n\n Mode1,\n\n Mode2,\n\n StepMode3,\n\n DirMode4,\n\n> {\n\n enable_fault: EnableFault,\n\n standby_reset: StandbyReset,\n\n mode1: Mode1,\n", "file_path": "src/drivers/stspin220.rs", "rank": 37, "score": 28018.738975438657 }, { "content": " DirMode4,\n\n OutputPinError,\n\n > SetStepMode\n\n for STSPIN220<EnableFault, StandbyReset, Mode1, Mode2, StepMode3, DirMode4>\n\nwhere\n\n StandbyReset: OutputPin<Error = OutputPinError>,\n\n Mode1: OutputPin<Error = OutputPinError>,\n\n Mode2: OutputPin<Error = OutputPinError>,\n\n StepMode3: OutputPin<Error = OutputPinError>,\n\n DirMode4: OutputPin<Error = OutputPinError>,\n\n{\n\n const SETUP_TIME: Nanoseconds = Nanoseconds(1_000);\n\n const HOLD_TIME: Nanoseconds = Nanoseconds(100_000);\n\n\n\n type Error = OutputPinError;\n\n type StepMode = StepMode256;\n\n\n\n fn apply_mode_config(\n\n &mut self,\n\n step_mode: Self::StepMode,\n", "file_path": "src/drivers/stspin220.rs", "rank": 38, "score": 28018.660617179943 }, { "content": " ) -> Result<(), Self::Error> {\n\n // Force driver into standby mode.\n\n self.standby_reset.set_low()?;\n\n\n\n use PinState::*;\n\n use StepMode256::*;\n\n let (mode1, mode2, mode3, mode4) = match step_mode {\n\n Full => (Low, Low, Low, Low),\n\n M2 => (High, Low, High, Low),\n\n M4 => (Low, High, Low, High),\n\n M8 => (High, High, High, Low),\n\n M16 => (High, High, High, High),\n\n M32 => (Low, High, Low, Low),\n\n M64 => (High, High, Low, High),\n\n M128 => (High, Low, Low, Low),\n\n M256 => (High, High, Low, Low),\n\n };\n\n\n\n // Set mode signals.\n\n self.mode1.set_state(mode1)?;\n", "file_path": "src/drivers/stspin220.rs", "rank": 39, "score": 28018.624664173978 }, { "content": " SetDirection, SetStepMode, Step as StepTrait,\n\n },\n\n};\n\n\n\n/// The DRV8825 driver API\n\n///\n\n/// Users are not expected to use this API directly, except to create an\n\n/// instance using [`DRV8825::new`]. Please check out\n\n/// [`Stepper`](crate::Stepper) instead.\n\npub struct DRV8825<Enable, Fault, Sleep, Reset, Mode0, Mode1, Mode2, Step, Dir>\n\n{\n\n enable: Enable,\n\n fault: Fault,\n\n sleep: Sleep,\n\n reset: Reset,\n\n mode0: Mode0,\n\n mode1: Mode1,\n\n mode2: Mode2,\n\n step: Step,\n\n dir: Dir,\n", "file_path": "src/drivers/drv8825.rs", "rank": 40, "score": 28018.111069769195 }, { "content": "//! DRV8825 Driver\n\n//!\n\n//! Platform-agnostic driver API for the DRV8825 stepper motor driver. Can be\n\n//! used on any platform for which implementations of the required\n\n//! [embedded-hal] traits are available.\n\n//!\n\n//! For the most part, users are not expected to use this API directly. Please\n\n//! check out [`Stepper`](crate::Stepper) instead.\n\n//!\n\n//! [embedded-hal]: https://crates.io/crates/embedded-hal\n\n\n\nuse core::convert::Infallible;\n\n\n\nuse embedded_hal::digital::{blocking::OutputPin, PinState};\n\nuse embedded_time::duration::Nanoseconds;\n\n\n\nuse crate::{\n\n step_mode::StepMode32,\n\n traits::{\n\n EnableDirectionControl, EnableStepControl, EnableStepModeControl,\n", "file_path": "src/drivers/drv8825.rs", "rank": 41, "score": 28016.10959768053 }, { "content": "//! STSPIN220 Driver\n\n//!\n\n//! Platform-agnostic driver API for the STSPIN220 stepper motor driver. Can be\n\n//! used on any platform for which implementations of the required\n\n//! [embedded-hal] traits are available.\n\n//!\n\n//! For the most part, users are not expected to use this API directly. Please\n\n//! check out [`Stepper`](crate::Stepper) instead.\n\n//!\n\n//! [embedded-hal]: https://crates.io/crates/embedded-hal\n\n\n\nuse core::convert::Infallible;\n\n\n\nuse embedded_hal::digital::{blocking::OutputPin, PinState};\n\nuse embedded_time::duration::Nanoseconds;\n\n\n\nuse crate::{\n\n step_mode::StepMode256,\n\n traits::{\n\n EnableDirectionControl, EnableStepControl, EnableStepModeControl,\n", "file_path": "src/drivers/stspin220.rs", "rank": 42, "score": 28016.10959768053 }, { "content": " mode0,\n\n mode1,\n\n mode2,\n\n step: self.step,\n\n dir: self.dir,\n\n }\n\n }\n\n}\n\n\n\nimpl<Reset, Mode0, Mode1, Mode2, Step, Dir, OutputPinError> SetStepMode\n\n for DRV8825<(), (), (), Reset, Mode0, Mode1, Mode2, Step, Dir>\n\nwhere\n\n Reset: OutputPin<Error = OutputPinError>,\n\n Mode0: OutputPin<Error = OutputPinError>,\n\n Mode1: OutputPin<Error = OutputPinError>,\n\n Mode2: OutputPin<Error = OutputPinError>,\n\n{\n\n // 7.6 Timing Requirements (page 7)\n\n // https://www.ti.com/lit/ds/symlink/drv8825.pdf\n\n const SETUP_TIME: Nanoseconds = Nanoseconds(650);\n", "file_path": "src/drivers/drv8825.rs", "rank": 43, "score": 28015.414406420845 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl<\n\n EnableFault,\n\n StandbyReset,\n\n Mode1,\n\n Mode2,\n\n StepMode3,\n\n DirMode4,\n\n OutputPinError,\n\n > SetDirection\n\n for STSPIN220<EnableFault, StandbyReset, Mode1, Mode2, StepMode3, DirMode4>\n\nwhere\n\n DirMode4: OutputPin<Error = OutputPinError>,\n\n{\n\n const SETUP_TIME: Nanoseconds = Nanoseconds(100);\n\n\n\n type Dir = DirMode4;\n", "file_path": "src/drivers/stspin220.rs", "rank": 44, "score": 28015.36966102261 }, { "content": " EnableStepModeControl<(Reset, Mode0, Mode1, Mode2)>\n\n for DRV8825<(), (), (), (), (), (), (), Step, Dir>\n\nwhere\n\n Reset: OutputPin<Error = OutputPinError>,\n\n Mode0: OutputPin<Error = OutputPinError>,\n\n Mode1: OutputPin<Error = OutputPinError>,\n\n Mode2: OutputPin<Error = OutputPinError>,\n\n{\n\n type WithStepModeControl =\n\n DRV8825<(), (), (), Reset, Mode0, Mode1, Mode2, Step, Dir>;\n\n\n\n fn enable_step_mode_control(\n\n self,\n\n (reset, mode0, mode1, mode2): (Reset, Mode0, Mode1, Mode2),\n\n ) -> Self::WithStepModeControl {\n\n DRV8825 {\n\n enable: self.enable,\n\n fault: self.fault,\n\n sleep: self.sleep,\n\n reset,\n", "file_path": "src/drivers/drv8825.rs", "rank": 45, "score": 28015.240026676696 }, { "content": " Dir: OutputPin<Error = OutputPinError>,\n\n{\n\n type WithDirectionControl =\n\n DRV8825<(), (), (), Reset, Mode0, Mode1, Mode2, Step, Dir>;\n\n\n\n fn enable_direction_control(self, dir: Dir) -> Self::WithDirectionControl {\n\n DRV8825 {\n\n enable: self.enable,\n\n fault: self.fault,\n\n sleep: self.sleep,\n\n reset: self.reset,\n\n mode0: self.mode0,\n\n mode1: self.mode1,\n\n mode2: self.mode2,\n\n step: self.step,\n\n dir,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/drivers/drv8825.rs", "rank": 46, "score": 28014.885899922865 }, { "content": "where\n\n Step: OutputPin<Error = OutputPinError>,\n\n{\n\n type WithStepControl =\n\n DRV8825<(), (), (), Reset, Mode0, Mode1, Mode2, Step, Dir>;\n\n\n\n fn enable_step_control(self, step: Step) -> Self::WithStepControl {\n\n DRV8825 {\n\n enable: self.enable,\n\n fault: self.fault,\n\n sleep: self.sleep,\n\n reset: self.reset,\n\n mode0: self.mode0,\n\n mode1: self.mode1,\n\n mode2: self.mode2,\n\n step,\n\n dir: self.dir,\n\n }\n\n }\n\n}\n", "file_path": "src/drivers/drv8825.rs", "rank": 47, "score": 28014.885899922865 }, { "content": " OutputPinError,\n\n > EnableDirectionControl<DirMode4>\n\n for STSPIN220<EnableFault, StandbyReset, Mode1, Mode2, StepMode3, ()>\n\nwhere\n\n DirMode4: OutputPin<Error = OutputPinError>,\n\n{\n\n type WithDirectionControl =\n\n STSPIN220<EnableFault, StandbyReset, Mode1, Mode2, StepMode3, DirMode4>;\n\n\n\n fn enable_direction_control(\n\n self,\n\n dir_mode4: DirMode4,\n\n ) -> Self::WithDirectionControl {\n\n STSPIN220 {\n\n enable_fault: self.enable_fault,\n\n standby_reset: self.standby_reset,\n\n mode1: self.mode1,\n\n mode2: self.mode2,\n\n step_mode3: self.step_mode3,\n\n dir_mode4,\n", "file_path": "src/drivers/stspin220.rs", "rank": 48, "score": 28014.487936406025 }, { "content": " self,\n\n (standby_reset, mode1, mode2): (StandbyReset, Mode1, Mode2),\n\n ) -> Self::WithStepModeControl {\n\n STSPIN220 {\n\n enable_fault: self.enable_fault,\n\n standby_reset,\n\n mode1,\n\n mode2,\n\n step_mode3: self.step_mode3,\n\n dir_mode4: self.dir_mode4,\n\n }\n\n }\n\n}\n\n\n\nimpl<\n\n EnableFault,\n\n StandbyReset,\n\n Mode1,\n\n Mode2,\n\n StepMode3,\n", "file_path": "src/drivers/stspin220.rs", "rank": 49, "score": 28014.445707851344 }, { "content": " type WithStepControl =\n\n STSPIN220<EnableFault, StandbyReset, Mode1, Mode2, StepMode3, DirMode4>;\n\n\n\n fn enable_step_control(\n\n self,\n\n step_mode3: StepMode3,\n\n ) -> Self::WithStepControl {\n\n STSPIN220 {\n\n enable_fault: self.enable_fault,\n\n standby_reset: self.standby_reset,\n\n mode1: self.mode1,\n\n mode2: self.mode2,\n\n step_mode3,\n\n dir_mode4: self.dir_mode4,\n\n }\n\n }\n\n}\n\n\n\nimpl<\n\n EnableFault,\n", "file_path": "src/drivers/stspin220.rs", "rank": 50, "score": 28014.13333888644 }, { "content": " EnableFault,\n\n StandbyReset,\n\n Mode1,\n\n Mode2,\n\n StepMode3,\n\n DirMode4,\n\n OutputPinError,\n\n > EnableStepModeControl<(StandbyReset, Mode1, Mode2)>\n\n for STSPIN220<EnableFault, (), (), (), StepMode3, DirMode4>\n\nwhere\n\n StandbyReset: OutputPin<Error = OutputPinError>,\n\n Mode1: OutputPin<Error = OutputPinError>,\n\n Mode2: OutputPin<Error = OutputPinError>,\n\n StepMode3: OutputPin<Error = OutputPinError>,\n\n DirMode4: OutputPin<Error = OutputPinError>,\n\n{\n\n type WithStepModeControl =\n\n STSPIN220<EnableFault, StandbyReset, Mode1, Mode2, StepMode3, DirMode4>;\n\n\n\n fn enable_step_mode_control(\n", "file_path": "src/drivers/stspin220.rs", "rank": 51, "score": 28013.678945812815 }, { "content": " Driver: MotionControl,\n\n {\n\n MoveToFuture::new(RefMut(&mut self.driver), max_velocity, target_step)\n\n }\n\n\n\n /// Reset the position to the given value\n\n ///\n\n /// This should never result in a movement, as this method only overwrites\n\n /// the internal position counter of the driver. However, it might influence\n\n /// an already ongoing movement.\n\n ///\n\n /// You might need to call [`Stepper::enable_motion_control`] to make this\n\n /// method available.\n\n pub fn reset_position(&mut self, step: i32) -> Result<(), Driver::Error>\n\n where\n\n Driver: MotionControl,\n\n {\n\n self.driver.reset_position(step)\n\n }\n\n}\n", "file_path": "src/stepper/mod.rs", "rank": 52, "score": 27707.342371646617 }, { "content": "/// required.\n\n///\n\n/// [RFC 2632]: https://github.com/rust-lang/rfcs/pull/2632\n\n/// [RFC 2920]: https://github.com/rust-lang/rfcs/pull/2920\n\npub struct Stepper<Driver> {\n\n driver: Driver,\n\n}\n\n\n\nimpl<Driver> Stepper<Driver> {\n\n /// Create a new `Stepper` instance from a driver\n\n pub fn from_driver(driver: Driver) -> Self {\n\n Self { driver }\n\n }\n\n\n\n /// Access a reference to the wrapped driver\n\n ///\n\n /// Can be used to access driver-specific functionality that can't be\n\n /// provided by `Stepper`'s abstract interface.\n\n pub fn driver(&self) -> &Driver {\n\n &self.driver\n", "file_path": "src/stepper/mod.rs", "rank": 53, "score": 27706.053885619425 }, { "content": " }\n\n\n\n /// Access a mutable reference to the wrapped driver\n\n ///\n\n /// Can be used to access driver-specific functionality that can't be\n\n /// provided by `Stepper`'s abstract interface.\n\n pub fn driver_mut(&mut self) -> &mut Driver {\n\n &mut self.driver\n\n }\n\n\n\n /// Release the wrapped driver\n\n ///\n\n /// Drops this instance of `Stepper` and returns the wrapped driver.\n\n pub fn release(self) -> Driver {\n\n self.driver\n\n }\n\n\n\n /// Enable microstepping mode control\n\n ///\n\n /// Consumes this instance of `Stepper` and returns a new instance that\n", "file_path": "src/stepper/mod.rs", "rank": 54, "score": 27705.067436495545 }, { "content": "mod error;\n\nmod move_to;\n\nmod set_direction;\n\nmod set_step_mode;\n\nmod step;\n\n\n\npub use self::{\n\n error::{Error, SignalError},\n\n move_to::MoveToFuture,\n\n set_direction::SetDirectionFuture,\n\n set_step_mode::SetStepModeFuture,\n\n step::StepFuture,\n\n};\n\n\n\nuse core::convert::{Infallible, TryFrom};\n\n\n\nuse embedded_hal::{digital::blocking::OutputPin, timer::nb as timer};\n\nuse embedded_time::duration::Nanoseconds;\n\n\n\nuse crate::{\n", "file_path": "src/stepper/mod.rs", "rank": 55, "score": 27703.071769505405 }, { "content": " res: Resources,\n\n initial: Direction,\n\n timer: &mut Timer,\n\n ) -> Result<\n\n Stepper<Driver::WithDirectionControl>,\n\n SignalError<\n\n <Driver::WithDirectionControl as SetDirection>::Error,\n\n <<Driver::WithDirectionControl as SetDirection>::Dir\n\n as OutputPin>::Error,\n\n <Timer::Time as TryFrom<Nanoseconds>>::Error,\n\n Timer::Error,\n\n >,\n\n >\n\n where\n\n Driver: EnableDirectionControl<Resources>,\n\n Timer: timer::CountDown,\n\n Timer::Time: TryFrom<Nanoseconds>,\n\n {\n\n let mut self_ = Stepper {\n\n driver: self.driver.enable_direction_control(res),\n", "file_path": "src/stepper/mod.rs", "rank": 56, "score": 27701.982406989944 }, { "content": " <Driver::WithStepModeControl as SetStepMode>::Error,\n\n <Timer::Time as TryFrom<Nanoseconds>>::Error,\n\n Timer::Error,\n\n >,\n\n >\n\n where\n\n Driver: EnableStepModeControl<Resources>,\n\n Timer: timer::CountDown,\n\n Timer::Time: TryFrom<Nanoseconds>,\n\n {\n\n let mut self_ = Stepper {\n\n driver: self.driver.enable_step_mode_control(res),\n\n };\n\n self_.set_step_mode(initial, timer).wait()?;\n\n\n\n Ok(self_)\n\n }\n\n\n\n /// Sets the microstepping mode\n\n ///\n", "file_path": "src/stepper/mod.rs", "rank": 57, "score": 27700.972287295488 }, { "content": " /// method available.\n\n pub fn step<'r, Timer>(\n\n &'r mut self,\n\n timer: &'r mut Timer,\n\n ) -> StepFuture<RefMut<'r, Driver>, RefMut<'r, Timer>>\n\n where\n\n Driver: Step,\n\n Timer: timer::CountDown,\n\n Timer::Time: TryFrom<Nanoseconds>,\n\n {\n\n StepFuture::new(RefMut(&mut self.driver), RefMut(timer))\n\n }\n\n\n\n /// Returns the step pulse length of the wrapped driver/controller\n\n ///\n\n /// The pulse length is also available through the [`Step`] trait. This\n\n /// method provides a more convenient way to access it.\n\n ///\n\n /// You might need to call [`Stepper::enable_step_control`] to make this\n\n /// method available.\n", "file_path": "src/stepper/mod.rs", "rank": 58, "score": 27700.888219806173 }, { "content": " };\n\n self_.set_direction(initial, timer).wait()?;\n\n\n\n Ok(self_)\n\n }\n\n\n\n /// Set direction for future movements\n\n ///\n\n /// You might need to call [`Stepper::enable_direction_control`] to make\n\n /// this method available.\n\n pub fn set_direction<'r, Timer>(\n\n &'r mut self,\n\n direction: Direction,\n\n timer: &'r mut Timer,\n\n ) -> SetDirectionFuture<RefMut<'r, Driver>, RefMut<'r, Timer>>\n\n where\n\n Driver: SetDirection,\n\n Timer: timer::CountDown,\n\n Timer::Time: TryFrom<Nanoseconds>,\n\n {\n", "file_path": "src/stepper/mod.rs", "rank": 59, "score": 27700.671698318372 }, { "content": " /// This method is only available, if the wrapped driver supports\n\n /// microstepping, and supports setting the step mode through software. Some\n\n /// hardware might not support microstepping at all, or only allow setting\n\n /// the step mode by changing physical switches.\n\n ///\n\n /// You might need to call [`Stepper::enable_step_mode_control`] to make\n\n /// this method available.\n\n pub fn set_step_mode<'r, Timer>(\n\n &'r mut self,\n\n step_mode: Driver::StepMode,\n\n timer: &'r mut Timer,\n\n ) -> SetStepModeFuture<RefMut<'r, Driver>, RefMut<'r, Timer>>\n\n where\n\n Driver: SetStepMode,\n\n Timer: timer::CountDown,\n\n Timer::Time: TryFrom<Nanoseconds>,\n\n {\n\n SetStepModeFuture::new(\n\n step_mode,\n\n RefMut(&mut self.driver),\n", "file_path": "src/stepper/mod.rs", "rank": 60, "score": 27700.24678203847 }, { "content": "//! Utility module for miscellaneous stuff that the rest of the crate needs\n\n\n\npub mod ref_mut;\n", "file_path": "src/util/mod.rs", "rank": 61, "score": 27700.048626854164 }, { "content": " /// provides control over the microstepping mode. Once this method has been\n\n /// called, the [`Stepper::set_step_mode`] method becomes available.\n\n ///\n\n /// Takes the hardware resources that are required for controlling the\n\n /// microstepping mode as an argument. What exactly those are depends on the\n\n /// specific driver. Typically they are the output pins that are connected\n\n /// to the mode pins of the driver.\n\n ///\n\n /// This method is only available, if the driver supports enabling step mode\n\n /// control. It might no longer be available, once step mode control has\n\n /// been enabled.\n\n pub fn enable_step_mode_control<Resources, Timer>(\n\n self,\n\n res: Resources,\n\n initial: <Driver::WithStepModeControl as SetStepMode>::StepMode,\n\n timer: &mut Timer,\n\n ) -> Result<\n\n Stepper<Driver::WithStepModeControl>,\n\n SignalError<\n\n Infallible, // only applies to `SetDirection`, `Step`\n", "file_path": "src/stepper/mod.rs", "rank": 62, "score": 27699.531456530378 }, { "content": " SetDirectionFuture::new(\n\n direction,\n\n RefMut(&mut self.driver),\n\n RefMut(timer),\n\n )\n\n }\n\n\n\n /// Enable step control\n\n ///\n\n /// Consumes this instance of `Stepper` and returns a new instance that\n\n /// provides control over stepping the motor. Once this method has been\n\n /// called, the [`Stepper::step`] method becomes available.\n\n ///\n\n /// Takes the hardware resources that are required for controlling the\n\n /// stepping as an argument. What exactly those are depends on the specific\n\n /// driver. Typically it's going to be the output pin that is connected to\n\n /// the hardware's STEP pin.\n\n ///\n\n /// This method is only available, if the driver/controller supports\n\n /// enabling step control. It might no longer be available, once step\n", "file_path": "src/stepper/mod.rs", "rank": 63, "score": 27698.894424565064 }, { "content": " RefMut(timer),\n\n )\n\n }\n\n\n\n /// Enable direction control\n\n ///\n\n /// Consumes this instance of `Stepper` and returns a new instance that\n\n /// provides control over the motor direction. Once this method has been\n\n /// called, the [`Stepper::set_direction`] method becomes available.\n\n ///\n\n /// Takes the hardware resources that are required for controlling the\n\n /// direction as an argument. What exactly those are depends on the specific\n\n /// driver. Typically it's going to be the output pin that is connected to\n\n /// the hardware's DIR pin.\n\n ///\n\n /// This method is only available, if the driver supports enabling direction\n\n /// control. It might no longer be available, once direction control has\n\n /// been enabled.\n\n pub fn enable_direction_control<Resources, Timer>(\n\n self,\n", "file_path": "src/stepper/mod.rs", "rank": 64, "score": 27698.19588952447 }, { "content": " traits::{\n\n EnableDirectionControl, EnableMotionControl, EnableStepControl,\n\n EnableStepModeControl, MotionControl, SetDirection, SetStepMode, Step,\n\n },\n\n util::ref_mut::RefMut,\n\n Direction,\n\n};\n\n\n\n/// Unified stepper motor interface\n\n///\n\n/// Wraps a driver that interfaces with the motor-controlling hardware and\n\n/// abstracts over it, providing an interface that works the same, no matter\n\n/// what kind of hardware controls the stepper motor.\n\n///\n\n/// You can construct an instance of this type using [`Stepper::from_driver`].\n\n///\n\n/// # Nomenclature\n\n///\n\n/// This structs wraps a software component that interfaces with hardware that\n\n/// controls a stepper motor. That software component is called a \"driver\",\n", "file_path": "src/stepper/mod.rs", "rank": 65, "score": 27697.45908928582 }, { "content": " /// Moves the motor to the given position (`target_step`), while respecting\n\n /// the maximum velocity (`max_velocity`). The specifics of the motion\n\n /// profile (like acceleration and jerk) are driver-defined.\n\n ///\n\n /// It might be possible to influence the parameters of the motion profile\n\n /// through the resources passed to [`Stepper::enable_motion_control`],\n\n /// which might include configuration.\n\n ///\n\n /// To modify on ongoing movement, you can drop the future returned by this\n\n /// method and call it again with different parameters (or call another\n\n /// method).\n\n ///\n\n /// You might need to call [`Stepper::enable_motion_control`] to make this\n\n /// method available.\n\n pub fn move_to_position<'r>(\n\n &'r mut self,\n\n max_velocity: Driver::Velocity,\n\n target_step: i32,\n\n ) -> MoveToFuture<RefMut<'r, Driver>>\n\n where\n", "file_path": "src/stepper/mod.rs", "rank": 66, "score": 27697.04080475099 }, { "content": " pub fn pulse_length(&self) -> Nanoseconds\n\n where\n\n Driver: Step,\n\n {\n\n Driver::PULSE_LENGTH\n\n }\n\n\n\n /// Enable motion control\n\n ///\n\n /// Consumes this instance of `Stepper` and returns a new instance that\n\n /// provides motion control capabilities. Once this method has been called,\n\n /// the motion control API ([`Stepper::move_to_position`],\n\n /// [`Stepper::reset_position`]) becomes available.\n\n ///\n\n /// Takes the hardware resources that are required for motion control as an\n\n /// argument. What exactly those are depends on the specific driver.\n\n /// Typically it's either going to be some kind of communication interface,\n\n /// for drivers that have access to hardware support for motion control, or\n\n /// a motion profile from the RampMaker library, for drivers that have\n\n /// support for setting direction and stepping and require a software\n", "file_path": "src/stepper/mod.rs", "rank": 67, "score": 27696.934665213077 }, { "content": "/// `Stepper` provides a number of `enable_*` methods that enable access to a\n\n/// specific hardware capability, if the hardware and driver support this. Once\n\n/// that method has been called, the methods that control the hardware\n\n/// capability are available.\n\n///\n\n/// ## Step mode control\n\n///\n\n/// Enable this capability with [`Stepper::enable_step_mode_control`] and use it\n\n/// with [`Stepper::set_step_mode`]. Since not all stepper drivers support\n\n/// microstepping and of those that do, not all support setting it from\n\n/// software, this capability might not be available for all drivers.\n\n///\n\n/// ## Direction control & step control\n\n///\n\n/// Enable direction control with [`Stepper::enable_direction_control`] and use\n\n/// it with [`Stepper::set_direction`]. Enable step control with\n\n/// [`Stepper::enable_step_control`] and use ith with [`Stepper::step`].\n\n///\n\n/// These capabilities are supported by virtually all stepper drivers, but might\n\n/// not be available for motion controllers. Where they are available, they are\n", "file_path": "src/stepper/mod.rs", "rank": 68, "score": 27695.887399033883 }, { "content": " /// fallback for motion control.\n\n ///\n\n /// This method should be available for virtually all drivers, either via\n\n /// hardware support, or through the aforementioned software fallback. It\n\n /// might no longer be available, once motion control support has been\n\n /// enabled.\n\n pub fn enable_motion_control<Resources>(\n\n self,\n\n res: Resources,\n\n ) -> Stepper<Driver::WithMotionControl>\n\n where\n\n Driver: EnableMotionControl<Resources>,\n\n {\n\n Stepper {\n\n driver: self.driver.enable_motion_control(res),\n\n }\n\n }\n\n\n\n /// Move the motor to the given position\n\n ///\n", "file_path": "src/stepper/mod.rs", "rank": 69, "score": 27695.66969995781 }, { "content": " /// control has been enabled.\n\n pub fn enable_step_control<Resources>(\n\n self,\n\n res: Resources,\n\n ) -> Stepper<Driver::WithStepControl>\n\n where\n\n Driver: EnableStepControl<Resources>,\n\n {\n\n Stepper {\n\n driver: self.driver.enable_step_control(res),\n\n }\n\n }\n\n\n\n /// Rotates the motor one (micro-)step in the given direction\n\n ///\n\n /// Steps the motor one step in the direction that was previously set,\n\n /// according to current microstepping configuration. To achieve a specific\n\n /// speed, the user must call this method at an appropriate frequency.\n\n ///\n\n /// You might need to call [`Stepper::enable_step_control`] to make this\n", "file_path": "src/stepper/mod.rs", "rank": 70, "score": 27695.16765003252 }, { "content": "/// typically available together. They are modeled as separate capabilities, as\n\n/// to not make any assumptions. If you want to generate steps from software,\n\n/// for example, but control direction via some other means, then you can.\n\n///\n\n/// ## Motion control\n\n///\n\n/// Enable motion control with [`Stepper::enable_motion_control`] and use it\n\n/// with [`Stepper::move_to_position`] and [`Stepper::reset_position`].\n\n///\n\n/// Motion control capability is directly supported by motion control chips, but\n\n/// a software implementation based on direction and step control exists in the\n\n/// [`motion_control`] module, to make the capability available for all drivers.\n\n///\n\n/// [`motion_control`]: crate::motion_control\n\n///\n\n/// # Notes on timer use\n\n///\n\n/// Some of this struct's methods take a timer argument. This is expected to be\n\n/// an implementation of [`embedded_hal::timer::nb::CountDown`], with the\n\n/// additional requirement that `CountDown::Time` has a `TryFrom<Nanoseconds>`\n", "file_path": "src/stepper/mod.rs", "rank": 71, "score": 27694.070843268644 }, { "content": "/// implementation, where `Nanoseconds` refers to\n\n/// [`embedded_time::duration::Nanoseconds`].\n\n///\n\n/// Not every `CountDown` implementation provides this for its `Time` type, so\n\n/// it might be necessary that the user either adds this `embedded_time`\n\n/// integration to the HAL library they are using, or provides a wrapper around\n\n/// the `CountDown` implementation in their own code, adding the conversion\n\n/// there.\n\n///\n\n/// Every method that takes a timer argument internally performs the conversion\n\n/// from `Nanoseconds` to the timers `Time` type. Since the nanosecond values\n\n/// are constant and the `CountDown` implementation is known statically, the\n\n/// compiler should have enough information to perform this conversion at\n\n/// compile-time.\n\n///\n\n/// Unfortunately there is currently no way to make sure that this optimization\n\n/// actually happens. Additions like [RFC 2632], [RFC 2920], and possibly others\n\n/// along those lines, could help with this in the future. For now, users must\n\n/// manually inspect the generated code and tweak optimization settings (and\n\n/// possibly the HAL-specific conversion code), if this level of performance is\n", "file_path": "src/stepper/mod.rs", "rank": 72, "score": 27693.25933688425 }, { "content": "/// because it \"drives\" the hardware it interfaces with.\n\n///\n\n/// The driven hardware typically comes in two forms:\n\n///\n\n/// - A low-level chip controlled by STEP and DIR signals, often called a\n\n/// stepper driver (yes, somewhat confusing) or stepper controller.\n\n/// - A higher-level chip, typically controlled through some serial interface,\n\n/// often called a motion controller.\n\n///\n\n/// In practice, a given product can cleanly fall into one of the two camps,\n\n/// both, or anything in between.\n\n///\n\n/// # Hardware capabilities\n\n///\n\n/// Depending on the actual hardware we're interfacing with, we might only have\n\n/// access to the bare minimum functionality (STEP and DIR pins) or high-level\n\n/// motion control features. Since `Stepper` is agnostic on the driver and the\n\n/// hardware it interfaces with, there must be a way to deal with those\n\n/// differing capabilities.\n\n///\n", "file_path": "src/stepper/mod.rs", "rank": 73, "score": 27692.405443427542 }, { "content": "{\n\n const SETUP_TIME: Nanoseconds = T::SETUP_TIME;\n\n const HOLD_TIME: Nanoseconds = T::HOLD_TIME;\n\n\n\n type Error = T::Error;\n\n type StepMode = T::StepMode;\n\n\n\n fn apply_mode_config(\n\n &mut self,\n\n step_mode: Self::StepMode,\n\n ) -> Result<(), Self::Error> {\n\n self.0.apply_mode_config(step_mode)\n\n }\n\n\n\n fn enable_driver(&mut self) -> Result<(), Self::Error> {\n\n self.0.enable_driver()\n\n }\n\n}\n\n\n\nimpl<'r, T> Step for RefMut<'r, T>\n", "file_path": "src/util/ref_mut.rs", "rank": 74, "score": 27431.72014951331 }, { "content": "impl<'r, T> timer::CountDown for RefMut<'r, T>\n\nwhere\n\n T: timer::CountDown,\n\n{\n\n type Error = T::Error;\n\n type Time = T::Time;\n\n\n\n fn start<Time>(&mut self, count: Time) -> Result<(), Self::Error>\n\n where\n\n Time: Into<Self::Time>,\n\n {\n\n self.0.start(count)\n\n }\n\n\n\n fn wait(&mut self) -> nb::Result<(), Self::Error> {\n\n self.0.wait()\n\n }\n\n}\n\n\n\nimpl<'r, T> MotionControl for RefMut<'r, T>\n", "file_path": "src/util/ref_mut.rs", "rank": 75, "score": 27427.95355024864 }, { "content": " }\n\n}\n\n\n\nimpl<'r, T> SetDirection for RefMut<'r, T>\n\nwhere\n\n T: SetDirection,\n\n{\n\n const SETUP_TIME: Nanoseconds = T::SETUP_TIME;\n\n\n\n type Dir = T::Dir;\n\n type Error = T::Error;\n\n\n\n fn dir(&mut self) -> Result<&mut Self::Dir, Self::Error> {\n\n self.0.dir()\n\n }\n\n}\n\n\n\nimpl<'r, T> SetStepMode for RefMut<'r, T>\n\nwhere\n\n T: SetStepMode,\n", "file_path": "src/util/ref_mut.rs", "rank": 76, "score": 27426.400842558287 }, { "content": "where\n\n T: MotionControl,\n\n{\n\n type Velocity = T::Velocity;\n\n type Error = T::Error;\n\n\n\n fn move_to_position(\n\n &mut self,\n\n max_velocity: Self::Velocity,\n\n target_step: i32,\n\n ) -> Result<(), Self::Error> {\n\n self.0.move_to_position(max_velocity, target_step)\n\n }\n\n\n\n fn reset_position(&mut self, step: i32) -> Result<(), Self::Error> {\n\n self.0.reset_position(step)\n\n }\n\n\n\n fn update(&mut self) -> Result<bool, Self::Error> {\n\n self.0.update()\n", "file_path": "src/util/ref_mut.rs", "rank": 77, "score": 27425.271255478077 }, { "content": "where\n\n T: Step,\n\n{\n\n const PULSE_LENGTH: Nanoseconds = T::PULSE_LENGTH;\n\n\n\n type Step = T::Step;\n\n type Error = T::Error;\n\n\n\n fn step(&mut self) -> Result<&mut Self::Step, Self::Error> {\n\n self.0.step()\n\n }\n\n}\n", "file_path": "src/util/ref_mut.rs", "rank": 78, "score": 27424.427020005056 }, { "content": "//! Generic wrapper around a mutable reference\n\n//!\n\n//! See [`RefMut`] for more information.\n\n\n\nuse embedded_hal::timer::nb as timer;\n\nuse embedded_time::duration::Nanoseconds;\n\n\n\nuse crate::traits::{MotionControl, SetDirection, SetStepMode, Step};\n\n\n\n/// Generic wrapper around a mutable reference\n\n///\n\n/// This is used as a means of implementing traits that are already implemented\n\n/// for `T` for `&mut T` too. While this is redundant for the traits from this\n\n/// crate, we couldn't do this for `embedded_hal::timer::CountDown` without a\n\n/// crate-local type.\n\n///\n\n/// The purpose of this is to make the future types more flexible, making it\n\n/// possible to move types into them, or just provide mutable references.\n\npub struct RefMut<'r, T>(pub &'r mut T);\n\n\n", "file_path": "src/util/ref_mut.rs", "rank": 79, "score": 27420.10752631719 }, { "content": "}\n\n\n\n/// An error occurred while converting between time formats\n\n#[derive(Debug, Eq, PartialEq)]\n\npub enum TimeConversionError<NanosecondsToTicksError, DelayToTicksError> {\n\n /// Error converting from nanoseconds to timer ticks\n\n NanosecondsToTicks(NanosecondsToTicksError),\n\n\n\n /// Error converting from RampMaker delay value to timer ticks\n\n DelayToTicks(DelayToTicksError),\n\n}\n\n\n\n/// The software motion control was busy, or another generic error occurred\n\n#[derive(Debug, Eq, PartialEq)]\n\npub enum BusyError<T> {\n\n /// The software motion control was busy\n\n ///\n\n /// This happens while a movement is going on, and the driver is not\n\n /// available.\n\n Busy,\n\n\n\n /// Another error has occurred\n\n Other(T),\n\n}\n", "file_path": "src/motion_control/error.rs", "rank": 80, "score": 27308.78918164436 }, { "content": "/// An error that can occur while using [`SoftwareMotionControl`]\n\n///\n\n/// [`SoftwareMotionControl`]: super::SoftwareMotionControl\n\n#[derive(Debug, Eq, PartialEq)]\n\npub enum Error<\n\n SetDirectionPinUnavailable,\n\n SetDirectionError,\n\n StepPinUnavailable,\n\n StepError,\n\n TimerError,\n\n NanosecondsToTicksError,\n\n DelayToTicksError,\n\n> {\n\n /// Error while setting direction\n\n SetDirection(\n\n crate::SignalError<\n\n SetDirectionPinUnavailable,\n\n SetDirectionError,\n\n NanosecondsToTicksError,\n\n TimerError,\n", "file_path": "src/motion_control/error.rs", "rank": 81, "score": 27306.866361577355 }, { "content": " >,\n\n ),\n\n\n\n /// Error while stepping the motor\n\n Step(\n\n crate::SignalError<\n\n StepPinUnavailable,\n\n StepError,\n\n NanosecondsToTicksError,\n\n TimerError,\n\n >,\n\n ),\n\n\n\n /// Error while converting between time formats\n\n TimeConversion(\n\n TimeConversionError<NanosecondsToTicksError, DelayToTicksError>,\n\n ),\n\n\n\n /// Error while waiting for a step to finish\n\n StepDelay(TimerError),\n", "file_path": "src/motion_control/error.rs", "rank": 82, "score": 27303.27807056388 }, { "content": "//! STSPIN220 Driver\n\n//!\n\n//! Platform-agnostic driver library for the STSPIN220 stepper motor driver.\n\n//! This crate is a specialized facade for the [Stepper] library. Please\n\n//! consider using Stepper directly, as it provides drivers for more stepper\n\n//! motor drivers, as well as an interface to abstract over them.\n\n//!\n\n//! See [Stepper] for more documentation and usage examples.\n\n//!\n\n//! [Stepper]: https://crates.io/crates/stepper\n\n\n\n#![no_std]\n\n#![deny(missing_docs)]\n\n\n\npub use stepper::{drivers::stspin220::*, *};\n", "file_path": "drivers/stspin220/src/lib.rs", "rank": 83, "score": 26522.447343413678 }, { "content": "//! DRV8825 Driver\n\n//!\n\n//! Platform-agnostic driver library for the DRV8825 stepper motor driver.\n\n//! This crate is a specialized facade for the [Stepper] library. Please\n\n//! consider using Stepper directly, as it provides drivers for more stepper\n\n//! motor drivers, as well as an interface to abstract over them.\n\n//!\n\n//! See [Stepper] for more documentation and usage examples.\n\n//!\n\n//! [Stepper]: https://crates.io/crates/stepper\n\n\n\n#![no_std]\n\n#![deny(missing_docs)]\n\n\n\npub use stepper::{drivers::drv8825::*, *};\n", "file_path": "drivers/drv8825/src/lib.rs", "rank": 84, "score": 26522.447343413678 }, { "content": " ///\n\n /// [`Stepper::step`]: crate::Stepper::step\n\n pub fn step(\n\n &mut self,\n\n ) -> Result<StepFuture<RefMut<Driver>, RefMut<Timer>>, BusyError<Infallible>>\n\n where\n\n Driver: Step,\n\n Timer: timer::CountDown,\n\n Timer::Time: TryFrom<Nanoseconds>,\n\n {\n\n let future = match &mut self.state {\n\n State::Idle { driver, timer } => {\n\n StepFuture::new(RefMut(driver), RefMut(timer))\n\n }\n\n _ => return Err(BusyError::Busy),\n\n };\n\n\n\n Ok(future)\n\n }\n\n}\n", "file_path": "src/motion_control/mod.rs", "rank": 85, "score": 26227.190071741872 }, { "content": " }\n\n\n\n fn reset_position(&mut self, step: i32) -> Result<(), Self::Error> {\n\n self.current_step = step;\n\n Ok(())\n\n }\n\n\n\n fn update(&mut self) -> Result<bool, Self::Error> {\n\n // Otherwise the closure will borrow all of `self`.\n\n let new_motion = &mut self.new_motion;\n\n let profile = &mut self.profile;\n\n let current_step = &mut self.current_step;\n\n let current_direction = &mut self.current_direction;\n\n let convert = &self.convert;\n\n\n\n replace_with_and_return(\n\n &mut self.state,\n\n || State::Invalid,\n\n |state| {\n\n state::update(\n", "file_path": "src/motion_control/mod.rs", "rank": 86, "score": 26226.793815330035 }, { "content": " Driver: SetStepMode,\n\n Profile: MotionProfile,\n\n{\n\n const SETUP_TIME: Nanoseconds = Driver::SETUP_TIME;\n\n const HOLD_TIME: Nanoseconds = Driver::HOLD_TIME;\n\n\n\n type Error = BusyError<Driver::Error>;\n\n type StepMode = Driver::StepMode;\n\n\n\n fn apply_mode_config(\n\n &mut self,\n\n step_mode: Self::StepMode,\n\n ) -> Result<(), Self::Error> {\n\n match self.driver_mut() {\n\n Some(driver) => driver\n\n .apply_mode_config(step_mode)\n\n .map_err(|err| BusyError::Other(err)),\n\n None => Err(BusyError::Busy),\n\n }\n\n }\n", "file_path": "src/motion_control/mod.rs", "rank": 87, "score": 26226.50082004285 }, { "content": " type Error = BusyError<Driver::Error>;\n\n\n\n fn dir(&mut self) -> Result<&mut Self::Dir, Self::Error> {\n\n match self.driver_mut() {\n\n Some(driver) => driver.dir().map_err(|err| BusyError::Other(err)),\n\n None => Err(BusyError::Busy),\n\n }\n\n }\n\n}\n\n\n\nimpl<Driver, Timer, Profile, Convert> Step\n\n for SoftwareMotionControl<Driver, Timer, Profile, Convert>\n\nwhere\n\n Driver: Step,\n\n Profile: MotionProfile,\n\n{\n\n const PULSE_LENGTH: Nanoseconds = Driver::PULSE_LENGTH;\n\n\n\n type Step = Driver::Step;\n\n type Error = BusyError<Driver::Error>;\n", "file_path": "src/motion_control/mod.rs", "rank": 88, "score": 26226.422986647915 }, { "content": "\n\n fn enable_driver(&mut self) -> Result<(), Self::Error> {\n\n match self.driver_mut() {\n\n Some(driver) => {\n\n driver.enable_driver().map_err(|err| BusyError::Other(err))\n\n }\n\n None => Err(BusyError::Busy),\n\n }\n\n }\n\n}\n\n\n\nimpl<Driver, Timer, Profile, Convert> SetDirection\n\n for SoftwareMotionControl<Driver, Timer, Profile, Convert>\n\nwhere\n\n Driver: SetDirection,\n\n Profile: MotionProfile,\n\n{\n\n const SETUP_TIME: Nanoseconds = Driver::SETUP_TIME;\n\n\n\n type Dir = Driver::Dir;\n", "file_path": "src/motion_control/mod.rs", "rank": 89, "score": 26224.9777222298 }, { "content": " /// Returns [`BusyError::Busy`], if a motion is ongoing.\n\n ///\n\n /// [`Stepper::set_direction`]: crate::Stepper::set_direction\n\n pub fn set_direction(\n\n &mut self,\n\n direction: Direction,\n\n ) -> Result<\n\n SetDirectionFuture<RefMut<Driver>, RefMut<Timer>>,\n\n BusyError<Infallible>,\n\n >\n\n where\n\n Driver: SetDirection,\n\n Timer: timer::CountDown,\n\n Timer::Time: TryFrom<Nanoseconds>,\n\n {\n\n let future = match &mut self.state {\n\n State::Idle { driver, timer } => SetDirectionFuture::new(\n\n direction,\n\n RefMut(driver),\n\n RefMut(timer),\n", "file_path": "src/motion_control/mod.rs", "rank": 90, "score": 26224.28826499805 }, { "content": "\n\n fn step(&mut self) -> Result<&mut Self::Step, Self::Error> {\n\n match self.driver_mut() {\n\n Some(driver) => driver.step().map_err(|err| BusyError::Other(err)),\n\n None => Err(BusyError::Busy),\n\n }\n\n }\n\n}\n\n\n\n// Blanket implementation of `EnableMotionControl` for all STEP/DIR stepper\n\n// drivers.\n\nimpl<Driver, Timer, Profile, Convert>\n\n EnableMotionControl<(Timer, Profile, Convert)> for Driver\n\nwhere\n\n Driver: SetDirection + Step,\n\n Profile: MotionProfile,\n\n Timer: timer::CountDown,\n\n Profile::Velocity: Copy,\n\n Convert: DelayToTicks<Profile::Delay, Ticks = Timer::Time>,\n\n Convert::Ticks: TryFrom<Nanoseconds> + ops::Sub<Output = Convert::Ticks>,\n", "file_path": "src/motion_control/mod.rs", "rank": 91, "score": 26224.182810404986 }, { "content": " >;\n\n\n\n fn move_to_position(\n\n &mut self,\n\n max_velocity: Self::Velocity,\n\n target_step: i32,\n\n ) -> Result<(), Self::Error> {\n\n let steps_from_here = target_step - self.current_step;\n\n\n\n self.profile\n\n .enter_position_mode(max_velocity, steps_from_here.abs() as u32);\n\n\n\n let direction = if steps_from_here > 0 {\n\n Direction::Forward\n\n } else {\n\n Direction::Backward\n\n };\n\n self.new_motion = Some(direction);\n\n\n\n Ok(())\n", "file_path": "src/motion_control/mod.rs", "rank": 92, "score": 26223.996839710602 }, { "content": " ///\n\n /// # Errors\n\n ///\n\n /// Returns [`BusyError::Busy`], if a motion is ongoing.\n\n ///\n\n /// [`Stepper::set_step_mode`]: crate::Stepper::set_step_mode\n\n pub fn set_step_mode(\n\n &mut self,\n\n step_mode: Driver::StepMode,\n\n ) -> Result<\n\n SetStepModeFuture<RefMut<Driver>, RefMut<Timer>>,\n\n BusyError<Infallible>,\n\n >\n\n where\n\n Driver: SetStepMode,\n\n Timer: timer::CountDown,\n\n Timer::Time: TryFrom<Nanoseconds>,\n\n {\n\n let future = match &mut self.state {\n\n State::Idle { driver, timer } => {\n", "file_path": "src/motion_control/mod.rs", "rank": 93, "score": 26222.600052659513 }, { "content": " ///\n\n /// Instead of using this constructor directly, you can instead use\n\n /// [`Stepper::enable_motion_control`] with any driver that implements\n\n /// [`SetDirection`] and [`Step`], providing timer and a motion profile.\n\n /// This module provides a blanket implementation of [`EnableMotionControl`]\n\n /// to make this work.\n\n ///\n\n /// [`Stepper::enable_motion_control`]: crate::Stepper::enable_motion_control\n\n pub fn new(\n\n driver: Driver,\n\n timer: Timer,\n\n profile: Profile,\n\n convert: Convert,\n\n ) -> Self {\n\n Self {\n\n state: State::Idle { driver, timer },\n\n new_motion: None,\n\n profile,\n\n current_step: 0,\n\n // Doesn't matter what we initialize it with. We're only using it\n", "file_path": "src/motion_control/mod.rs", "rank": 94, "score": 26222.46658682032 }, { "content": "//! Software implementation of motion control capability\n\n//!\n\n//! See [`SoftwareMotionControl`] for more information.\n\n\n\nmod conversion;\n\nmod error;\n\nmod state;\n\n\n\npub use self::{\n\n conversion::DelayToTicks,\n\n error::{BusyError, Error, TimeConversionError},\n\n};\n\n\n\nuse core::{\n\n convert::{Infallible, TryFrom},\n\n ops,\n\n};\n\n\n\nuse embedded_hal::{digital::blocking::OutputPin, timer::nb as timer};\n\nuse embedded_time::duration::Nanoseconds;\n", "file_path": "src/motion_control/mod.rs", "rank": 95, "score": 26222.06756625618 }, { "content": "/// You can use `SoftwareMotionControl` directly, but like a driver, it is\n\n/// designed to be used through the [`Stepper`] API.\n\n///\n\n/// [`Stepper`]: crate::Stepper\n\npub struct SoftwareMotionControl<Driver, Timer, Profile: MotionProfile, Convert>\n\n{\n\n state: State<Driver, Timer, Profile>,\n\n new_motion: Option<Direction>,\n\n profile: Profile,\n\n current_step: i32,\n\n current_direction: Direction,\n\n convert: Convert,\n\n}\n\n\n\nimpl<Driver, Timer, Profile, Convert>\n\n SoftwareMotionControl<Driver, Timer, Profile, Convert>\n\nwhere\n\n Profile: MotionProfile,\n\n{\n\n /// Construct a new instance of `SoftwareMotionControl`\n", "file_path": "src/motion_control/mod.rs", "rank": 96, "score": 26221.332490641205 }, { "content": " SetStepModeFuture::new(step_mode, RefMut(driver), RefMut(timer))\n\n }\n\n _ => return Err(BusyError::Busy),\n\n };\n\n\n\n Ok(future)\n\n }\n\n\n\n /// Set direction of the wrapped driver\n\n ///\n\n /// This method is a more convenient alternative to\n\n /// [`Stepper::set_direction`], which requires a timer, while this methods\n\n /// reuses the timer that `SoftwareMotionControl` already owns.\n\n ///\n\n /// However, while [`Stepper::set_direction`] is part of the generic API,\n\n /// this method is only available, if you statically know that you're\n\n /// working with a driver wrapped by `SoftwareMotionControl`.\n\n ///\n\n /// # Errors\n\n ///\n", "file_path": "src/motion_control/mod.rs", "rank": 97, "score": 26221.292671685824 }, { "content": " /// This is only possible if there is no ongoing movement.\n\n pub fn driver_mut(&mut self) -> Option<&mut Driver> {\n\n if let State::Idle { driver, .. } = &mut self.state {\n\n return Some(driver);\n\n }\n\n\n\n None\n\n }\n\n\n\n /// Access a reference to the wrapped timer\n\n ///\n\n /// This is only possible if there is no ongoing movement.\n\n pub fn timer(&self) -> Option<&Timer> {\n\n if let State::Idle { timer, .. } = &self.state {\n\n return Some(timer);\n\n }\n\n\n\n None\n\n }\n\n\n", "file_path": "src/motion_control/mod.rs", "rank": 98, "score": 26219.644131125064 }, { "content": "use ramp_maker::MotionProfile;\n\nuse replace_with::replace_with_and_return;\n\n\n\nuse crate::{\n\n traits::{\n\n EnableMotionControl, MotionControl, SetDirection, SetStepMode, Step,\n\n },\n\n util::ref_mut::RefMut,\n\n Direction, SetDirectionFuture, SetStepModeFuture, StepFuture,\n\n};\n\n\n\nuse self::state::State;\n\n\n\n/// Software implementation of motion control capability\n\n///\n\n/// Some driver natively support motion control capability. This is a software\n\n/// implementation of the [`MotionControl`] trait for those drivers that don't.\n\n/// It wraps a driver that implements [`SetDirection`] and [`Step`], and in turn\n\n/// acts like a driver itself, adding to the wrapped driver's capabilities.\n\n///\n", "file_path": "src/motion_control/mod.rs", "rank": 99, "score": 26218.140166128927 } ]
Rust
turbo-txpool/src/error.rs
quilt/turbo
d4efba95f82669d3976120295c50090b31c08428
pub(crate) mod decode_error { use snafu::{ResultExt, Snafu}; use ethereum_interfaces::txpool::ImportResult; #[derive(Debug, Snafu)] #[non_exhaustive] #[snafu(visibility = "pub(crate)")] pub enum DecodeError { RlpDecode { source: Box<dyn std::error::Error + Send + Sync>, field: Option<&'static str>, }, IntegerOverflow, } pub(crate) trait RlpResultExt<T> { fn context_field(self, field: &'static str) -> Result<T, DecodeError>; } impl<T> RlpResultExt<T> for Result<T, rlp::DecoderError> { fn context_field(self, field: &'static str) -> Result<T, DecodeError> { self.map_err(|e| Box::new(e).into()) .context(RlpDecode { field }) } } impl From<DecodeError> for ImportResult { fn from(e: DecodeError) -> ImportResult { match e { DecodeError::RlpDecode { .. } => ImportResult::Invalid, DecodeError::IntegerOverflow => ImportResult::InternalError, } } } } pub(crate) mod import_error { use ethereum_types::{Address, H256, U256}; use snafu::Snafu; use ethereum_interfaces::txpool::ImportResult; #[derive(Debug, Snafu)] #[non_exhaustive] #[snafu(visibility = "pub(crate)")] pub enum ImportError { NonceUsed { tx_hash: H256, }, #[snafu(display( "nonce is too far in the future tx_nonce={} from={} tx={}", from, tx_nonce, tx_hash, ))] NonceGap { from: Address, tx_nonce: u64, tx_hash: H256, }, InsufficientBalance { tx_hash: H256, }, NotReady, FeeTooLow { minimum: U256, }, Ecdsa { source: Box<dyn std::error::Error + Send + Sync>, }, #[snafu(context(false))] Decode { source: super::DecodeError, }, AlreadyExists { tx_hash: H256, }, RequestFailed { source: tonic::Status, }, IncompleteMessage, } impl From<ImportError> for ImportResult { fn from(e: ImportError) -> ImportResult { match e { ImportError::NonceGap { .. } => ImportResult::Invalid, ImportError::NonceUsed { .. } => ImportResult::Invalid, ImportError::FeeTooLow { .. } => ImportResult::FeeTooLow, ImportError::NotReady => ImportResult::InternalError, ImportError::Ecdsa { .. } => ImportResult::Invalid, ImportError::Decode { .. } => ImportResult::Invalid, ImportError::IncompleteMessage => ImportResult::Invalid, ImportError::AlreadyExists { .. } => { ImportResult::AlreadyExists } ImportError::RequestFailed { .. } => { ImportResult::InternalError } ImportError::InsufficientBalance { .. } => { ImportResult::Invalid } } } } } pub use self::decode_error::DecodeError; pub use self::import_error::ImportError;
pub(crate) mod decode_error { use snafu::{ResultExt, Snafu}; use ethereum_interfaces::txpool::ImportResult; #[derive(Debug, Snafu)] #[non_exhaustive] #[snafu(visibility = "pub(crate)")] pub enum DecodeError { RlpDecode { source: Box<dyn std::error::Error + Send + Sync>, field: Option<&'static str>, }, IntegerOverflow, } pub(crate) trait RlpResultExt<T> { fn context_field(self, field: &'static str) -> Result<T, DecodeError>; } impl<T> RlpResultExt<T> for Result<T, rlp::DecoderError> { fn context_field(self, field: &'static str) -> Result<T, DecodeError> { self.map_err(|e| Box::new(e).into()) .context(RlpDecode { field }) } } impl From<DecodeError> for ImportResult {
} } pub(crate) mod import_error { use ethereum_types::{Address, H256, U256}; use snafu::Snafu; use ethereum_interfaces::txpool::ImportResult; #[derive(Debug, Snafu)] #[non_exhaustive] #[snafu(visibility = "pub(crate)")] pub enum ImportError { NonceUsed { tx_hash: H256, }, #[snafu(display( "nonce is too far in the future tx_nonce={} from={} tx={}", from, tx_nonce, tx_hash, ))] NonceGap { from: Address, tx_nonce: u64, tx_hash: H256, }, InsufficientBalance { tx_hash: H256, }, NotReady, FeeTooLow { minimum: U256, }, Ecdsa { source: Box<dyn std::error::Error + Send + Sync>, }, #[snafu(context(false))] Decode { source: super::DecodeError, }, AlreadyExists { tx_hash: H256, }, RequestFailed { source: tonic::Status, }, IncompleteMessage, } impl From<ImportError> for ImportResult { fn from(e: ImportError) -> ImportResult { match e { ImportError::NonceGap { .. } => ImportResult::Invalid, ImportError::NonceUsed { .. } => ImportResult::Invalid, ImportError::FeeTooLow { .. } => ImportResult::FeeTooLow, ImportError::NotReady => ImportResult::InternalError, ImportError::Ecdsa { .. } => ImportResult::Invalid, ImportError::Decode { .. } => ImportResult::Invalid, ImportError::IncompleteMessage => ImportResult::Invalid, ImportError::AlreadyExists { .. } => { ImportResult::AlreadyExists } ImportError::RequestFailed { .. } => { ImportResult::InternalError } ImportError::InsufficientBalance { .. } => { ImportResult::Invalid } } } } } pub use self::decode_error::DecodeError; pub use self::import_error::ImportError;
fn from(e: DecodeError) -> ImportResult { match e { DecodeError::RlpDecode { .. } => ImportResult::Invalid, DecodeError::IntegerOverflow => ImportResult::InternalError, } }
function_block-full_function
[ { "content": "#[async_trait]\n\npub trait Control {\n\n type BlockStream: futures_core::stream::Stream<\n\n Item = Result<BlockDiff, Status>,\n\n >;\n\n\n\n async fn block_stream(\n\n &mut self,\n\n request: BlockStreamRequest,\n\n ) -> Result<Self::BlockStream, Status>;\n\n\n\n async fn account_info(\n\n &mut self,\n\n request: AccountInfoRequest,\n\n ) -> Result<AccountInfoReply, Status>;\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct PbControl {\n\n client: TxpoolControlClient<Channel>,\n\n}\n", "file_path": "turbo-txpool/src/control.rs", "rank": 0, "score": 50362.41261133185 }, { "content": " #[async_trait::async_trait]\n\n trait Sign {\n\n async fn sign_with(self, idx: usize) -> Tx;\n\n }\n\n\n\n #[async_trait::async_trait]\n\n impl Sign for TransactionRequest {\n\n async fn sign_with(self, idx: usize) -> Tx {\n\n let sig = wallet(idx).sign_transaction(&self).await.unwrap();\n\n let to = match self.to {\n\n Some(NameOrAddress::Address(to)) => Some(to),\n\n Some(NameOrAddress::Name(_)) => panic!(\"address only\"),\n\n None => None,\n\n };\n\n\n\n Tx {\n\n to,\n\n value: self.value.unwrap_or_default(),\n\n gas_limit: self.gas.unwrap().as_u64(),\n\n gas_price: self.gas_price.unwrap(),\n\n input: self.data.map(|x| x.to_vec()).unwrap_or_default(),\n", "file_path": "turbo-txpool/src/lib.rs", "rank": 1, "score": 28102.18037321889 }, { "content": "fn keccak(input: &[u8]) -> [u8; 32] {\n\n let mut keccak = Keccak::v256();\n\n keccak.update(input);\n\n\n\n let mut output = [0u8; 32];\n\n keccak.finalize(&mut output);\n\n output\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub(crate) struct VerifiedTx {\n\n hash: H256,\n\n from: Address,\n\n tx: Tx,\n\n}\n\n\n\nimpl VerifiedTx {\n\n pub fn new(tx: Tx) -> Result<Self, EcdsaError> {\n\n let v = 1 - (tx.v % 2);\n\n let r = tx.r.to_fixed_bytes();\n", "file_path": "turbo-txpool/src/tx.rs", "rank": 2, "score": 21156.023298119977 }, { "content": " use std::pin::Pin;\n\n\n\n use tokio::sync::broadcast::{self, Sender};\n\n\n\n #[derive(Clone, Debug)]\n\n pub struct TestControl {\n\n block_send: Sender<Result<BlockDiff, Status>>,\n\n }\n\n\n\n impl TestControl {\n\n pub const BALANCE: [u8; 32] = hex!(\n\n \"000000000000000000000000000000000000000000000000FFFFFFFFFFFFFFFF\"\n\n );\n\n\n\n pub fn new() -> Self {\n\n let (block_send, _) = broadcast::channel(1);\n\n Self { block_send }\n\n }\n\n\n\n #[allow(unused)] // TODO: Use this\n", "file_path": "turbo-txpool/src/control.rs", "rank": 4, "score": 13.988993333761538 }, { "content": " .account_info(request)\n\n .await\n\n .map(|r| r.into_inner())\n\n }\n\n}\n\n\n\nimpl PbControl {\n\n pub fn new(client: TxpoolControlClient<Channel>) -> Self {\n\n Self { client }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub(crate) mod tests {\n\n use ethereum_types::U256;\n\n\n\n use hex_literal::hex;\n\n\n\n use super::*;\n\n\n", "file_path": "turbo-txpool/src/control.rs", "rank": 7, "score": 10.931720454608717 }, { "content": "mod control;\n\npub mod error;\n\npub mod tx;\n\n\n\n#[cfg(feature = \"arbitrary\")]\n\nextern crate arbitrary_dep as arbitrary;\n\n\n\nuse crate::config::Config;\n\nuse crate::control::{Control, PbControl};\n\nuse crate::error::{import_error, ImportError};\n\nuse crate::tx::{Tx, VerifiedTx};\n\n\n\nuse ethereum_types::{Address, H256, U256};\n\n\n\nuse slab::Slab;\n\n\n\nuse snafu::{ensure, OptionExt, ResultExt};\n\n\n\nuse std::cmp::Reverse;\n\nuse std::collections::{hash_map, BTreeSet, BinaryHeap, HashMap};\n", "file_path": "turbo-txpool/src/lib.rs", "rank": 8, "score": 10.820287906707014 }, { "content": " pub fn stream_block(&self, result: Result<BlockDiff, Status>) {\n\n self.block_send.send(result).unwrap();\n\n }\n\n }\n\n\n\n #[async_trait]\n\n impl Control for TestControl {\n\n type BlockStream = Pin<\n\n Box<\n\n dyn futures_core::stream::Stream<\n\n Item = Result<BlockDiff, Status>,\n\n >,\n\n >,\n\n >;\n\n\n\n async fn block_stream(\n\n &mut self,\n\n _request: BlockStreamRequest,\n\n ) -> Result<Self::BlockStream, Status> {\n\n let recv = self.block_send.subscribe();\n", "file_path": "turbo-txpool/src/control.rs", "rank": 9, "score": 9.10172520663263 }, { "content": "\n\npub use k256::ecdsa::Error as EcdsaError;\n\nuse k256::ecdsa::{self, recoverable};\n\nuse k256::EncodedPoint;\n\n\n\nuse snafu::ensure;\n\n\n\nuse std::convert::{TryFrom, TryInto};\n\n\n\nuse tiny_keccak::{Hasher, Keccak};\n\n\n", "file_path": "turbo-txpool/src/tx.rs", "rank": 10, "score": 8.871146711788132 }, { "content": " /// Given a set of transaction hashes, return the full transaction details.\n\n pub async fn get_transactions(\n\n &self,\n\n request: Request<GetTransactionsRequest>,\n\n ) -> Result<Response<GetTransactionsReply>, Status> {\n\n self.inner.read().await.get_transactions(request)\n\n }\n\n}\n\n\n\nimpl server::Txpool for TxPool {\n\n fn find_unknown_transactions<'a, 'async_trait>(\n\n &'a self,\n\n request: Request<TxHashes>,\n\n ) -> Pin<\n\n Box<\n\n dyn Future<Output = Result<Response<TxHashes>, Status>>\n\n + Send\n\n + 'async_trait,\n\n >,\n\n >\n", "file_path": "turbo-txpool/src/lib.rs", "rank": 12, "score": 8.317453341600029 }, { "content": "}\n\n\n\nimpl Config {\n\n fn default_max_txs() -> usize {\n\n 1024\n\n }\n\n\n\n /// The maximum number of transactions to store in the pool at any time.\n\n pub fn max_txs(&self) -> usize {\n\n self.max_txs\n\n }\n\n\n\n /// The URL of the `txpool_control` server.\n\n pub fn control(&self) -> &str {\n\n &self.control\n\n }\n\n}\n", "file_path": "turbo-txpool/src/config.rs", "rank": 13, "score": 8.172517516594318 }, { "content": " {\n\n if let Some(stripped) = txt.strip_prefix(\"0x\") {\n\n T::from_hex(stripped).map_err(HexError::FromHex)\n\n } else {\n\n Err(HexError::NotHex)\n\n }\n\n }\n\n\n\n #[derive(Debug, StructOpt)]\n\n pub struct TxAccountInfo {\n\n #[structopt(parse(try_from_str=hex))]\n\n block_hash: [u8; 32],\n\n\n\n #[structopt(parse(try_from_str=hex))]\n\n account: [u8; 20],\n\n }\n\n\n\n impl TxAccountInfo {\n\n pub async fn run(\n\n self,\n", "file_path": "turbo-cli/src/main.rs", "rank": 14, "score": 7.742853419144182 }, { "content": " impl TxControl {\n\n pub async fn run(\n\n self,\n\n dst: Uri,\n\n ) -> Result<(), Box<dyn std::error::Error>> {\n\n let client = TxpoolControlClient::connect(dst).await?;\n\n\n\n match self {\n\n TxControl::AccountInfo(acct) => acct.run(client).await,\n\n }\n\n }\n\n }\n\n\n\n #[derive(Debug, StructOpt)]\n\n pub struct TxUnknown {\n\n #[structopt(parse(try_from_str=hex))]\n\n hashes: Vec<[u8; 32]>,\n\n }\n\n\n\n impl TxUnknown {\n", "file_path": "turbo-cli/src/main.rs", "rank": 15, "score": 7.5049751253006605 }, { "content": " }\n\n\n\n #[derive(Debug, StructOpt)]\n\n pub struct TxGet {\n\n #[structopt(parse(try_from_str=hex))]\n\n hashes: Vec<[u8; 32]>,\n\n }\n\n\n\n impl TxGet {\n\n pub async fn run(\n\n self,\n\n dst: Uri,\n\n ) -> Result<(), Box<dyn std::error::Error>> {\n\n let hashes: Vec<_> = self\n\n .hashes\n\n .into_iter()\n\n .map(H256::from)\n\n .map(Into::into)\n\n .collect();\n\n\n", "file_path": "turbo-cli/src/main.rs", "rank": 16, "score": 7.432804770311345 }, { "content": " let mut client = TxpoolClient::connect(dst).await?;\n\n let txs = client\n\n .get_transactions(GetTransactionsRequest { hashes })\n\n .await?;\n\n\n\n println!(\"{:?}\", txs);\n\n\n\n Ok(())\n\n }\n\n }\n\n\n\n #[derive(Debug, StructOpt)]\n\n pub struct TxImport {\n\n #[structopt(parse(try_from_str=hex))]\n\n txs: Vec<Vec<u8>>,\n\n }\n\n\n\n impl TxImport {\n\n pub async fn run(\n\n self,\n", "file_path": "turbo-cli/src/main.rs", "rank": 17, "score": 7.17420686551097 }, { "content": "\n\nimpl PartialEq for VerifiedTx {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.hash == other.hash\n\n }\n\n}\n\n\n\n/// An Ethereum transaction.\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\n#[cfg_attr(feature = \"arbitrary\", derive(arbitrary::Arbitrary))]\n\n#[non_exhaustive]\n\npub struct Tx {\n\n /// The unique index of the transaction, used for replay protection.\n\n pub nonce: u64,\n\n\n\n /// The destination address of the transaction, or `None` when creating a\n\n /// contract.\n\n pub to: Option<Address>,\n\n\n\n /// The value, in Wei, sent along with this transaction.\n", "file_path": "turbo-txpool/src/tx.rs", "rank": 18, "score": 6.870371667051241 }, { "content": " where\n\n 'a: 'async_trait,\n\n Self: 'async_trait,\n\n {\n\n Box::pin(self.find_unknown_transactions(request))\n\n }\n\n\n\n fn import_transactions<'a, 'async_trait>(\n\n &'a self,\n\n request: Request<ImportRequest>,\n\n ) -> Pin<\n\n Box<\n\n dyn Future<Output = Result<Response<ImportReply>, Status>>\n\n + Send\n\n + 'async_trait,\n\n >,\n\n >\n\n where\n\n 'a: 'async_trait,\n\n Self: 'async_trait,\n", "file_path": "turbo-txpool/src/lib.rs", "rank": 20, "score": 6.28024265799069 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::control::tests::TestControl;\n\n\n\n use ethers_core::types::{NameOrAddress, TransactionRequest};\n\n\n\n use ethers_signers::{LocalWallet, Signer};\n\n\n\n use std::convert::TryInto;\n\n\n\n use super::*;\n\n\n\n use ethereum_interfaces::txpool::AccountInfo;\n\n\n", "file_path": "turbo-txpool/src/lib.rs", "rank": 21, "score": 6.252918924590052 }, { "content": " }\n\n }\n\n stream.append(&self.value);\n\n stream.append(&self.input);\n\n stream.append(&self.v);\n\n stream.append(&self.r);\n\n stream.append(&self.s);\n\n }\n\n\n\n pub(crate) fn decode(stream: &rlp::Rlp) -> Result<Self, DecodeError> {\n\n let to = {\n\n let field = stream.at(3).context_field(\"to\")?;\n\n if field.is_empty() {\n\n if field.is_data() {\n\n None\n\n } else {\n\n return Err(DecodeError::RlpDecode {\n\n source: Box::new(\n\n rlp::DecoderError::RlpExpectedToBeData,\n\n )\n", "file_path": "turbo-txpool/src/tx.rs", "rank": 22, "score": 6.193018347168929 }, { "content": "// Copyright 2021 ConsenSys\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse async_trait::async_trait;\n\n\n\nuse tonic::transport::Channel;\n\nuse tonic::{Status, Streaming};\n\n\n\nuse ethereum_interfaces::txpool::txpool_control_client::TxpoolControlClient;\n\nuse ethereum_interfaces::txpool::{\n\n AccountInfoReply, AccountInfoRequest, BlockDiff, BlockStreamRequest,\n\n};\n\n\n\n#[async_trait]\n", "file_path": "turbo-txpool/src/control.rs", "rank": 24, "score": 6.092744985851217 }, { "content": "use std::future::Future;\n\nuse std::net::SocketAddr;\n\nuse std::pin::Pin;\n\nuse std::sync::Arc;\n\n\n\nuse tokio::sync::RwLock;\n\nuse tokio::task::JoinHandle;\n\n\n\nuse tokio_stream::StreamExt;\n\n\n\nuse tonic::transport::Server;\n\nuse tonic::{Request, Response, Status};\n\n\n\nuse tracing::{debug, info};\n\n\n\nuse ethereum_interfaces::txpool::block_stream_request::StartWith;\n\nuse ethereum_interfaces::txpool::txpool_control_client as client;\n\nuse ethereum_interfaces::txpool::txpool_server as server;\n\nuse ethereum_interfaces::txpool::{\n\n block_diff, AccountInfo, AccountInfoRequest, AppliedBlock, BlockDiff,\n\n BlockStreamRequest, GetTransactionsReply, GetTransactionsRequest,\n\n ImportReply, ImportRequest, ImportResult, RevertedBlock, TxHashes,\n\n};\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq)]\n", "file_path": "turbo-txpool/src/lib.rs", "rank": 25, "score": 5.9714283129403665 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use hex_literal::hex;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn encode_deploy() {\n\n let input: &[_] = &hex!(\n\n \"\n\n 6080604052336000806101000a81548173ffffffffffffffffffffffffffffffff\n\n ffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217\n\n 90555034801561005057600080fd5b50610207806100606000396000f3fe608060\n\n 405234801561001057600080fd5b50600436106100415760003560e01c8063445d\n\n f0ac146100465780638da5cb5b14610064578063fdacd57614610098575b600080\n\n fd5b61004e6100c6565b6040518082815260200191505060405180910390f35b61\n\n 006c6100cc565b604051808273ffffffffffffffffffffffffffffffffffffffff\n\n 16815260200191505060405180910390f35b6100c4600480360360208110156100\n", "file_path": "turbo-txpool/src/tx.rs", "rank": 26, "score": 5.960815801667472 }, { "content": " {\n\n Box::pin(self.import_transactions(request))\n\n }\n\n\n\n fn get_transactions<'a, 'async_trait>(\n\n &'a self,\n\n request: Request<GetTransactionsRequest>,\n\n ) -> Pin<\n\n Box<\n\n dyn Future<Output = Result<Response<GetTransactionsReply>, Status>>\n\n + Send\n\n + 'async_trait,\n\n >,\n\n >\n\n where\n\n 'a: 'async_trait,\n\n Self: 'async_trait,\n\n {\n\n Box::pin(self.get_transactions(request))\n\n }\n", "file_path": "turbo-txpool/src/lib.rs", "rank": 27, "score": 5.955228687074583 }, { "content": "\n\nuse std::fmt;\n\n\n\nuse structopt::StructOpt;\n\n\n\nuse tonic::transport::channel::Channel;\n\nuse tonic::transport::Uri;\n\n\n\nuse ethereum_interfaces::txpool::txpool_client::TxpoolClient;\n\nuse ethereum_interfaces::txpool::txpool_control_client::TxpoolControlClient;\n\nuse ethereum_interfaces::txpool::{\n\n AccountInfoRequest, GetTransactionsRequest, ImportRequest, TxHashes,\n\n};\n\n\n\nuse ethereum_types::{Address, H256};\n\n\n\nmod cmd {\n\n use super::*;\n\n\n\n #[derive(Debug)]\n", "file_path": "turbo-cli/src/main.rs", "rank": 28, "score": 5.792813141037217 }, { "content": " &self.from\n\n }\n\n\n\n pub fn nonce(&self) -> u64 {\n\n self.tx.nonce\n\n }\n\n\n\n // TODO: Add getters as needed.\n\n\n\n pub fn is_runnable(\n\n &self,\n\n account_nonce: U256,\n\n balance: U256,\n\n ) -> Result<bool, ImportError> {\n\n let verified_nonce = U256::from(self.nonce());\n\n ensure!(\n\n verified_nonce >= account_nonce,\n\n import_error::NonceUsed { tx_hash: self.hash }\n\n );\n\n\n", "file_path": "turbo-txpool/src/tx.rs", "rank": 30, "score": 5.135590969110101 }, { "content": " impl TxPool {\n\n pub async fn run(\n\n self,\n\n dst: Uri,\n\n ) -> Result<(), Box<dyn std::error::Error>> {\n\n match self {\n\n TxPool::Unknown(un) => un.run(dst).await,\n\n TxPool::Import(import) => import.run(dst).await,\n\n TxPool::Get(get) => get.run(dst).await,\n\n TxPool::Control(ctrl) => ctrl.run(dst).await,\n\n }\n\n }\n\n }\n\n\n\n #[derive(Debug, StructOpt)]\n\n pub enum Command {\n\n TxPool(TxPool),\n\n }\n\n\n\n impl Command {\n", "file_path": "turbo-cli/src/main.rs", "rank": 31, "score": 5.108201647344094 }, { "content": "// Copyright 2020 ConsenSys\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! An implementation of turbo-geth's transaction pool interfaces for the\n\n//! Ethereum network.\n\n\n\n#![deny(unsafe_code, missing_docs, missing_debug_implementations)]\n\n\n\npub mod config;\n", "file_path": "turbo-txpool/src/lib.rs", "rank": 32, "score": 5.017919831474218 }, { "content": "//! let config = Config::builder()\n\n//! .max_txs(1024)\n\n//! .control(\"http://127.0.0.1:9092\")\n\n//! .build();\n\n//! ```\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse typed_builder::TypedBuilder;\n\n\n\n/// Configuration for the transaction pool. See the module documentation for an\n\n/// example.\n\n#[derive(Debug, TypedBuilder, Serialize, Deserialize)]\n\npub struct Config {\n\n #[builder(default = Config::default_max_txs())]\n\n #[serde(default = \"Config::default_max_txs\")]\n\n max_txs: usize,\n\n\n\n #[builder(setter(into))]\n\n control: String,\n", "file_path": "turbo-txpool/src/config.rs", "rank": 33, "score": 4.659184204222194 }, { "content": "/// let config = Config::builder()\n\n/// .max_txs(1024)\n\n/// .control(\"http://127.0.0.1:9092\")\n\n/// .build();\n\n///\n\n/// let pool = TxPool::with_config(config).await?;\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\n#[derive(Debug)]\n\npub struct TxPool {\n\n inner: Arc<RwLock<Inner<PbControl>>>,\n\n background: Option<JoinHandle<()>>,\n\n}\n\n\n\nimpl TxPool {\n\n /// Construct a new `TxPool` instance with the given configuration.\n\n pub async fn with_config(\n\n config: Config,\n\n ) -> Result<Self, tonic::transport::Error> {\n", "file_path": "turbo-txpool/src/lib.rs", "rank": 34, "score": 4.569218952684 }, { "content": "\n\n#[async_trait]\n\nimpl Control for PbControl {\n\n type BlockStream = Streaming<BlockDiff>;\n\n\n\n async fn block_stream(\n\n &mut self,\n\n request: BlockStreamRequest,\n\n ) -> Result<Self::BlockStream, Status> {\n\n self.client\n\n .block_stream(request)\n\n .await\n\n .map(|r| r.into_inner())\n\n }\n\n\n\n async fn account_info(\n\n &mut self,\n\n request: AccountInfoRequest,\n\n ) -> Result<AccountInfoReply, Status> {\n\n self.client\n", "file_path": "turbo-txpool/src/control.rs", "rank": 35, "score": 4.145568296556037 }, { "content": " enum HexError<E> {\n\n NotHex,\n\n FromHex(E),\n\n }\n\n\n\n impl<E> fmt::Display for HexError<E>\n\n where\n\n E: fmt::Display,\n\n {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n HexError::NotHex => write!(f, \"argument requires `0x` prefix\"),\n\n HexError::FromHex(e) => e.fmt(f),\n\n }\n\n }\n\n }\n\n\n\n fn hex<T>(txt: &str) -> Result<T, HexError<T::Error>>\n\n where\n\n T: FromHex,\n", "file_path": "turbo-cli/src/main.rs", "rank": 36, "score": 4.1067337674621935 }, { "content": " .into(),\n\n field: Some(\"to\"),\n\n });\n\n }\n\n } else {\n\n Some(field.as_val().context_field(\"to\")?)\n\n }\n\n };\n\n\n\n let v = stream.val_at(6).context_field(\"v\")?;\n\n let r = stream.val_at(7).context_field(\"r\")?;\n\n let s = stream.val_at(8).context_field(\"s\")?;\n\n\n\n let nonce = match stream.val_at::<U256>(0).context_field(\"nonce\")? {\n\n x if x > u64::max_value().into() => {\n\n return Err(DecodeError::IntegerOverflow);\n\n }\n\n x => x.as_u64(),\n\n };\n\n\n", "file_path": "turbo-txpool/src/tx.rs", "rank": 37, "score": 3.965155809165827 }, { "content": " pub value: U256,\n\n\n\n /// The amount of Wei to transfer to the miner, per unit of gas consumed.\n\n pub gas_price: U256,\n\n\n\n /// The maximum units of gas this transaction is allowed to consume.\n\n pub gas_limit: u64,\n\n\n\n /// The calldata (or contract initialization code) sent with the\n\n /// transaction.\n\n pub input: Vec<u8>,\n\n\n\n /// The `v` component (or recovery id) of the transaction signature.\n\n pub v: u64,\n\n\n\n /// The `r` component of the transaction signature.\n\n pub r: H256,\n\n\n\n /// The `s` component of the transaction signature.\n\n pub s: H256,\n", "file_path": "turbo-txpool/src/tx.rs", "rank": 38, "score": 3.706452545528875 }, { "content": " let gas_limit =\n\n match stream.val_at::<U256>(2).context_field(\"gas_limit\")? {\n\n x if x > u64::max_value().into() => {\n\n return Err(DecodeError::IntegerOverflow);\n\n }\n\n x => x.as_u64(),\n\n };\n\n\n\n Ok(Self {\n\n nonce,\n\n gas_price: stream.val_at(1).context_field(\"gas_price\")?,\n\n gas_limit,\n\n to,\n\n value: stream.val_at(4).context_field(\"value\")?,\n\n input: stream.val_at(5).context_field(\"input\")?,\n\n v,\n\n r,\n\n s,\n\n })\n\n }\n", "file_path": "turbo-txpool/src/tx.rs", "rank": 39, "score": 3.647804756748811 }, { "content": " stream.clear();\n\n tx.encode(&mut stream);\n\n let hash = H256::from(keccak(stream.as_raw()));\n\n\n\n Ok(Self { hash, from, tx })\n\n }\n\n\n\n pub fn gas_price(&self) -> &U256 {\n\n &self.tx.gas_price\n\n }\n\n\n\n pub fn hash(&self) -> &H256 {\n\n &self.hash\n\n }\n\n\n\n pub fn tx(&self) -> &Tx {\n\n &self.tx\n\n }\n\n\n\n pub fn from(&self) -> &Address {\n", "file_path": "turbo-txpool/src/tx.rs", "rank": 40, "score": 3.5744834470623292 }, { "content": "// Copyright 2021 ConsenSys\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! Transactions as understood by the transaction pool.\n\n\n\nuse crate::error::decode_error::{DecodeError, RlpResultExt};\n\nuse crate::error::import_error::{self, ImportError};\n\n\n\nuse ethereum_types::{Address, H256, U256};\n", "file_path": "turbo-txpool/src/tx.rs", "rank": 41, "score": 3.110555474689959 }, { "content": " };\n\n\n\n imported.push(result as i32);\n\n }\n\n\n\n Ok(ImportReply { imported })\n\n }\n\n}\n\n\n\n/// An implementation of turbo-geth's transaction pool interfaces for the\n\n/// Ethereum network.\n\n///\n\n/// # Example\n\n///\n\n/// ```no_run\n\n/// use turbo_txpool::TxPool;\n\n/// use turbo_txpool::config::Config;\n\n///\n\n/// # #[tokio::main(flavor = \"current_thread\")]\n\n/// # async fn main() -> Result<(), Box<dyn std::error::Error>> {\n", "file_path": "turbo-txpool/src/lib.rs", "rank": 42, "score": 2.9310770788239795 }, { "content": " let required = (self.tx.gas_price * self.tx.gas_limit) + self.tx.value;\n\n\n\n ensure!(\n\n balance >= required,\n\n import_error::InsufficientBalance { tx_hash: self.hash }\n\n );\n\n\n\n Ok(verified_nonce == account_nonce)\n\n }\n\n}\n\n\n\nimpl TryFrom<Tx> for VerifiedTx {\n\n type Error = EcdsaError;\n\n\n\n fn try_from(tx: Tx) -> Result<Self, Self::Error> {\n\n VerifiedTx::new(tx)\n\n }\n\n}\n\n\n\nimpl Eq for VerifiedTx {}\n", "file_path": "turbo-txpool/src/tx.rs", "rank": 43, "score": 2.828328171209649 }, { "content": "// Copyright 2021 ConsenSys\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! Configuration definition for the transaction pool. Can be deserialized, or\n\n//! built:\n\n//!\n\n//! ```rust\n\n//! use turbo_txpool::config::Config;\n\n//!\n", "file_path": "turbo-txpool/src/config.rs", "rank": 44, "score": 2.535636046334433 }, { "content": " self.inner.read().await.find_unknown_transactions(request)\n\n }\n\n\n\n /// Insert the given transactions `txs` into this transaction pool.\n\n pub async fn insert_transactions(\n\n &mut self,\n\n txs: Vec<Tx>,\n\n ) -> Vec<ImportResult> {\n\n self.inner.write().await.insert_transactions(txs).await\n\n }\n\n\n\n /// Decode the given transactions, and insert them into this transaction\n\n /// pool.\n\n pub async fn import_transactions(\n\n &self,\n\n request: Request<ImportRequest>,\n\n ) -> Result<Response<ImportReply>, Status> {\n\n self.inner.write().await.import_transactions(request).await\n\n }\n\n\n", "file_path": "turbo-txpool/src/lib.rs", "rank": 45, "score": 2.5311382033446614 }, { "content": "\n\n Ok(Response::new(TxHashes { hashes }))\n\n }\n\n\n\n pub fn with_config(control: C, config: Config) -> Self {\n\n Self {\n\n latest_block: None,\n\n max_txs: config.max_txs(),\n\n txs: Slab::with_capacity(config.max_txs()),\n\n by_hash: HashMap::with_capacity(config.max_txs()),\n\n by_price: BTreeSet::new(),\n\n soon: HashMap::with_capacity(config.max_txs()),\n\n control,\n\n }\n\n }\n\n\n\n pub fn get_transactions(\n\n &self,\n\n request: Request<GetTransactionsRequest>,\n\n ) -> Result<Response<GetTransactionsReply>, Status> {\n", "file_path": "turbo-txpool/src/lib.rs", "rank": 46, "score": 2.4564645324862964 }, { "content": "// Copyright 2021 ConsenSys\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! Testing harness for interacting with the `txpool` and `txpool_control` gRPC\n\n//! endpoints.\n\n\n\n#![deny(unsafe_code, missing_docs, missing_debug_implementations)]\n\n\n\nuse hex::FromHex;\n", "file_path": "turbo-cli/src/main.rs", "rank": 47, "score": 2.4494290006719317 }, { "content": " ptx.tx.nonce(),\n\n ptx.tx.gas_price(),\n\n ptx.tx.hash(),\n\n );\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<C> Inner<C>\n\nwhere\n\n C: Clone + Control,\n\n{\n\n fn recheck(&mut self, account_infos: &[AccountInfo]) {\n\n #[derive(Default)]\n\n struct Account {\n\n balance: U256,\n\n nonce: U256,\n\n }\n", "file_path": "turbo-txpool/src/lib.rs", "rank": 48, "score": 2.365634109947396 }, { "content": " guard.block_diff(diff).await;\n\n }\n\n }\n\n });\n\n\n\n Ok(Self {\n\n inner,\n\n background: Some(background),\n\n })\n\n }\n\n\n\n /// Begin servicing requests, binding to `addr`.\n\n pub async fn run<I>(\n\n mut self,\n\n addr: I,\n\n ) -> Result<(), tonic::transport::Error>\n\n where\n\n I: Into<SocketAddr>,\n\n {\n\n let background = self.background.take().unwrap();\n", "file_path": "turbo-txpool/src/lib.rs", "rank": 49, "score": 2.2937939864608774 }, { "content": " pub async fn run(\n\n self,\n\n dst: Uri,\n\n ) -> Result<(), Box<dyn std::error::Error>> {\n\n match self {\n\n cmd::Command::TxPool(tx) => tx.run(dst).await,\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, StructOpt)]\n\n#[structopt(setting=structopt::clap::AppSettings::VersionlessSubcommands)]\n", "file_path": "turbo-cli/src/main.rs", "rank": 50, "score": 2.2203922223585417 }, { "content": " pub async fn run(\n\n self,\n\n dst: Uri,\n\n ) -> Result<(), Box<dyn std::error::Error>> {\n\n let hashes: Vec<_> = self\n\n .hashes\n\n .into_iter()\n\n .map(H256::from)\n\n .map(Into::into)\n\n .collect();\n\n\n\n let mut client = TxpoolClient::connect(dst).await?;\n\n let txs = client\n\n .find_unknown_transactions(TxHashes { hashes })\n\n .await?;\n\n\n\n println!(\"{:?}\", txs);\n\n\n\n Ok(())\n\n }\n", "file_path": "turbo-cli/src/main.rs", "rank": 51, "score": 2.1079667812714655 }, { "content": " nonce: self.nonce.unwrap().as_u64(),\n\n v: sig.v,\n\n r: sig.r,\n\n s: sig.s,\n\n }\n\n }\n\n }\n\n\n\n const KEYS: &[&str] =\n\n &[\"dcf2cbdd171a21c480aa7f53d77f31bb102282b3ff099c78e3118b37348c72f7\"];\n\n\n\n fn wallet(idx: usize) -> LocalWallet {\n\n KEYS[idx].parse().unwrap()\n\n }\n\n\n\n fn inner() -> Inner<TestControl> {\n\n let ctrl = TestControl::new();\n\n let cfg = Config::builder().max_txs(10).control(String::new()).build();\n\n let mut out = Inner::with_config(ctrl, cfg);\n\n out.latest_block = Some(Default::default());\n", "file_path": "turbo-txpool/src/lib.rs", "rank": 52, "score": 2.0931164102882582 }, { "content": "}\n\n\n\nimpl Tx {\n\n fn signature_encode(&self, stream: &mut rlp::RlpStream) {\n\n if self.v >= 35 {\n\n self.signature_encode_9(stream);\n\n } else {\n\n self.signature_encode_6(stream);\n\n }\n\n }\n\n\n\n fn signature_encode_6(&self, stream: &mut rlp::RlpStream) {\n\n stream.begin_list(6);\n\n stream.append(&U256::from(self.nonce));\n\n stream.append(&self.gas_price);\n\n stream.append(&U256::from(self.gas_limit));\n\n match self.to {\n\n None => {\n\n stream.append_empty_data();\n\n }\n", "file_path": "turbo-txpool/src/tx.rs", "rank": 54, "score": 2.0720457842038926 }, { "content": " };\n\n\n\n let removed =\n\n Self::remove_soon(&mut self.soon, *ptx.tx.from(), &nonced);\n\n assert!(removed, \"desync in soon\");\n\n }\n\n\n\n ptx\n\n }\n\n\n\n pub fn find_unknown_transactions(\n\n &self,\n\n request: Request<TxHashes>,\n\n ) -> Result<Response<TxHashes>, Status> {\n\n let hashes = request\n\n .into_inner()\n\n .hashes\n\n .into_iter()\n\n .filter(|vec| !self.by_hash.contains_key(&H256::from(vec.clone())))\n\n .collect();\n", "file_path": "turbo-txpool/src/lib.rs", "rank": 55, "score": 2.045815148386796 }, { "content": " );\n\n\n\n // TODO: Ensure the tx can actually fit in a block.\n\n\n\n let runnable = verified.is_runnable(nonce, balance)?;\n\n\n\n Ok(PooledTx {\n\n tx: verified,\n\n runnable,\n\n })\n\n }\n\n\n\n pub async fn insert_transactions(\n\n &mut self,\n\n txs: Vec<Tx>,\n\n ) -> Vec<ImportResult> {\n\n let verified: Vec<_> = futures_util::future::join_all(\n\n txs.into_iter().map(|tx| self.log_qualify(tx)),\n\n )\n\n .await;\n", "file_path": "turbo-txpool/src/lib.rs", "rank": 56, "score": 2.025904450428751 }, { "content": " mut client: TxpoolControlClient<Channel>,\n\n ) -> Result<(), Box<dyn std::error::Error>> {\n\n let resp = client\n\n .account_info(AccountInfoRequest {\n\n block_hash: Some(H256::from(&self.block_hash).into()),\n\n account: Some(Address::from(&self.account).into()),\n\n })\n\n .await?;\n\n\n\n println!(\"{:#?}\", resp);\n\n\n\n Ok(())\n\n }\n\n }\n\n\n\n #[derive(Debug, StructOpt)]\n\n pub enum TxControl {\n\n AccountInfo(TxAccountInfo),\n\n }\n\n\n", "file_path": "turbo-cli/src/main.rs", "rank": 57, "score": 2.0063775748388846 }, { "content": " }\n\n }\n\n stream.append(&self.value);\n\n stream.append(&self.input);\n\n stream.append(&chainid);\n\n stream.append(&0u8);\n\n stream.append(&0u8);\n\n }\n\n\n\n pub(crate) fn encode(&self, stream: &mut rlp::RlpStream) {\n\n stream.begin_list(9);\n\n stream.append(&U256::from(self.nonce));\n\n stream.append(&self.gas_price);\n\n stream.append(&U256::from(self.gas_limit));\n\n match self.to {\n\n None => {\n\n stream.append_empty_data();\n\n }\n\n Some(to) => {\n\n stream.append(&to);\n", "file_path": "turbo-txpool/src/tx.rs", "rank": 58, "score": 1.931894565864991 }, { "content": "\n\n let mut result = Vec::with_capacity(verified.len());\n\n for res in verified.into_iter() {\n\n let ins = match res.and_then(|vx| self.log_insert(vx)) {\n\n Ok(()) => ImportResult::Success,\n\n Err(e) => e.into(),\n\n };\n\n result.push(ins);\n\n }\n\n\n\n result\n\n }\n\n\n\n pub async fn import_transactions(\n\n &mut self,\n\n request: Request<ImportRequest>,\n\n ) -> Result<Response<ImportReply>, Status> {\n\n self.import_transactions_request(request.into_inner())\n\n .await\n\n .map(Response::new)\n", "file_path": "turbo-txpool/src/lib.rs", "rank": 59, "score": 1.9141299437463566 }, { "content": " dst: Uri,\n\n ) -> Result<(), Box<dyn std::error::Error>> {\n\n let txs: Vec<_> = self.txs.into_iter().map(Into::into).collect();\n\n let mut client = TxpoolClient::connect(dst).await?;\n\n let txs = client.import_transactions(ImportRequest { txs }).await?;\n\n\n\n println!(\"{:?}\", txs);\n\n\n\n Ok(())\n\n }\n\n }\n\n\n\n #[derive(Debug, StructOpt)]\n\n pub enum TxPool {\n\n Unknown(TxUnknown),\n\n Import(TxImport),\n\n Get(TxGet),\n\n Control(TxControl),\n\n }\n\n\n", "file_path": "turbo-cli/src/main.rs", "rank": 60, "score": 1.8627436955927106 }, { "content": "turbo-txpool\n\n============\n\n\n\n`turbo-txpool` is a modular transaction pool for Ethereum clients based on the [turbo-geth interfaces][tg].\n\n\n\n[tg]: https://github.com/ledgerwatch/interfaces\n\n\n\n## Configuration\n\n\n\nThe configuration file is written using the [TOML][toml] format, and has the following options:\n\n\n\n```toml\n\nbind = \"127.0.0.1:54001\" # Expose the txpool service on this endpoint.\n\ncontrol = \"http://127.0.0.1:9092\" # Location of the txpool_control service.\n\nmax_txs = 1024 # Maximum number of transactions to store in the pool.\n\n```\n\n\n\n[toml]: https://toml.io/\n\n\n\n## Usage\n\n\n\n```\n\ncargo run -- -c /path/to/configuration/file.toml\n\n```\n", "file_path": "turbo-txpool/README.md", "rank": 61, "score": 1.789965925515351 }, { "content": "\n\n let server = Server::builder()\n\n .add_service(server::TxpoolServer::new(self))\n\n .serve(addr.into());\n\n\n\n tokio::select! {\n\n server_result = server => server_result,\n\n bg_result = background => {\n\n bg_result.expect(\"background task exited with panic\");\n\n panic!(\"background task exited unexpectedly (without panic)\");\n\n }\n\n }\n\n }\n\n\n\n /// Given a list of transaction hashes, return the ones unknown to this\n\n /// transaction pool.\n\n pub async fn find_unknown_transactions(\n\n &self,\n\n request: Request<TxHashes>,\n\n ) -> Result<Response<TxHashes>, Status> {\n", "file_path": "turbo-txpool/src/lib.rs", "rank": 62, "score": 1.7678264523116525 } ]
Rust
src/world/chunk/chunkmanager.rs
Ducolnd/ludwig-world-3d
3eb034d5c14cc95a29be7dab5d564c47c8423daf
use std::collections::HashMap; use std::time::Instant; use crate::world::{ chunk::{chunk::Chunk, pos::*}, constants::*, block::blocks::BlockID, world::World, }; use crate::render::{ low::{ renderer::Renderer, context::Context, }, meshing::chunkmeshing::ChunkMesh, drawables::chunk::ChunkDrawable, }; pub struct ChunkManager { loaded_chunks: HashMap<ChunkPos, Chunk>, chunks_meshes: HashMap<ChunkPos, ChunkMesh>, pub chunk_buffers: HashMap<ChunkPos, ChunkDrawable>, load_queue: Vec<ChunkPos>, render_distance: u32, center_chunk: ChunkPos, updated: bool, chunk_meshing_time: u128, chunk_loading_time: u128, } impl ChunkManager { pub fn new(render_distance: u32) -> Self { let loaded_chunks = HashMap::new(); let chunks_meshes = HashMap::new(); let chunk_buffers = HashMap::new(); Self { loaded_chunks, chunks_meshes, chunk_buffers, load_queue: vec![], render_distance, center_chunk: ChunkPos::new(0, 0, 0), updated: false, chunk_meshing_time: 1, chunk_loading_time: 1, } } pub fn set_camera_location(&mut self, coord: WorldCoord, renderer: &mut Renderer) { let chunkpos = coord.to_chunk_coord(); if self.center_chunk != chunkpos { self.center_around(chunkpos, renderer); self.updated = false; } } pub fn center_around(&mut self, pos: ChunkPos, renderer: &mut Renderer) { let mut targets = vec![]; self.center_chunk = pos; for x in -1 * (self.render_distance as i32)..self.render_distance as i32 { for z in -1 * (self.render_distance as i32)..self.render_distance as i32 { targets.push(ChunkPos::new(pos.x + x, 0, pos.z + z)); } } for pos in self.loaded_chunks.keys().cloned().collect::<Vec<_>>() { if !targets.contains(&pos) { self.unload_chunk(&pos, renderer); } } for pos in targets { if !self.loaded_chunks.contains_key(&pos) { self.queue_chunk_load(pos) } } } pub fn load_chunk(&mut self, pos: ChunkPos, height: [u32; CHUNKSIZE * CHUNKSIZE], renderer: &mut Renderer) { let mut chunk = Chunk::new(pos); renderer.chunkpos_uniform.add(&renderer.queue, pos, pos.to_raw()); let now = Instant::now(); chunk.generate(height); let lapsed = now.elapsed(); self.loaded_chunks.insert( pos, chunk, ); self.chunk_loading_time += lapsed.as_micros(); self.mesh_neighbors(pos); } pub fn mesh_neighbors(&mut self, pos: ChunkPos) { self.mesh_chunk(pos); self.mesh_chunk(ChunkPos {x: pos.x + 1, ..pos}); self.mesh_chunk(ChunkPos {x: pos.x - 1, ..pos}); self.mesh_chunk(ChunkPos {z: pos.z + 1, ..pos}); self.mesh_chunk(ChunkPos {z: pos.z - 1, ..pos}); } pub fn mesh_chunk(&mut self, pos: ChunkPos) { let c = &self.loaded_chunks.get(&pos); if !c.is_none() { let mut mesh = ChunkMesh::new(); let now = Instant::now(); mesh.create_simple_mesh(c.unwrap(), &self); let elapsed = now.elapsed(); self.chunks_meshes.insert( pos, mesh ); self.chunk_meshing_time += elapsed.as_micros(); } } pub fn get_neighbors(&self, center: ChunkPos) -> [Option<&Chunk>; 4]{ [ self.get_chunk_option(center + ChunkPos::new(0, 0, 1)), self.get_chunk_option(center + ChunkPos::new(1, 0, 0)), self.get_chunk_option(center + ChunkPos::new(0, 0, -1)), self.get_chunk_option(center + ChunkPos::new(-1, 0, 0)), ] } pub fn queue_chunk_load(&mut self, pos: ChunkPos) { self.load_queue.push(pos); } pub fn load_queue(&mut self, world: &World, renderer: &mut Renderer) { if self.load_queue.len() > 0 { for pos in self.load_queue.clone() { self.load_chunk(pos.clone(), world.map.create_heightmap(&pos), renderer); } self.load_queue.clear(); } } pub fn unload_chunk(&mut self, pos: &ChunkPos, renderer: &mut Renderer) { self.chunks_meshes.remove(pos); self.loaded_chunks.remove(pos); self.chunk_buffers.remove(pos); renderer.chunkpos_uniform.remove(pos); } pub fn update(&mut self, context: &mut Context, encoder: &mut wgpu::CommandEncoder) { if !self.updated { self.updated = true; for (pos, chunk) in &self.chunks_meshes { let mut c = ChunkDrawable::new(&context.renderer.device, *pos); c.from_chunk_mesh(&chunk, &context.renderer.device, encoder); self.chunk_buffers.insert(*pos, c); } } } pub fn get_chunk(&self, pos: ChunkPos) -> &Chunk { self.loaded_chunks.get(&pos).unwrap() } pub fn get_chunk_option(&self, pos: ChunkPos) -> Option<&Chunk> { self.loaded_chunks.get(&pos) } pub fn get_chunk_mut_option(&mut self, pos: ChunkPos) -> Option<&mut Chunk> { self.loaded_chunks.get_mut(&pos) } pub fn get_mesh(&self, pos: ChunkPos) -> &ChunkMesh { self.chunks_meshes.get(&pos).unwrap() } pub fn get_block_at_coord(&self, coord: WorldCoord) -> Option<BlockID> { if let Some(chunk) = self.loaded_chunks.get(&coord.to_chunk_coord()) { return Some(chunk.at_coord(coord.to_chunk_local())) } else { return None } } pub fn meshing_time(&self) -> u128 { self.chunk_meshing_time / self.chunks_meshes.len() as u128 } pub fn loading_time(&self) -> u128 { self.chunk_loading_time / self.chunks_meshes.len() as u128 } }
use std::collections::HashMap; use std::time::Instant; use crate::world::{ chunk::{chunk::Chunk, pos::*}, constants::*, block::blocks::BlockID, world::World, }; use crate::render::{ low::{ renderer::Renderer, context::Context, }, meshing::chunkmeshing::ChunkMesh, drawables::chunk::ChunkDrawable, }; pub struct ChunkManager { loaded_chunks: HashMap<ChunkPos, Chunk>, chunks_meshes: HashMap<ChunkPos, ChunkMesh>, pub chunk_buffers: HashMap<ChunkPos, ChunkDrawable>, load_queue: Vec<ChunkPos>, render_distance: u32, center_chunk: ChunkPos, updated: bool, chunk_meshing_time: u128, chunk_loading_time: u128, } impl ChunkManager { pub fn new(render_distance: u32) -> Self { let loaded_chunks = HashMap::new(); let chunks_meshes = HashMap::new(); let chunk_buffers = HashMap::new(); Self { loaded_chunks, chunks_meshes, chunk_buffers, load_queue: vec![], render_distance, center_chunk: ChunkPos::new(0, 0, 0), updated: false, chunk_meshing_time: 1, chunk_loading_time: 1, } } pub fn set_camera_location(&mut self, coord: WorldCoord, renderer: &mut Renderer) { let chunkpos = coord.to_chunk_coord(); if self.center_chunk != chunkpos { self.center_around(chunkpos, renderer); self.updated = false; } } pub fn center_around(&mut self, pos: ChunkPos, renderer: &mut Renderer) { let mut targets = vec![]; self.center_chunk = pos; for x in -1 * (self.render_distance as i32)..self.render_distance as i32 { for z in -1 * (self.render_distance as i32)..self.render_distance as i32 { targets.push(ChunkPos::new(pos.x + x, 0, pos.z + z)); } } for pos in self.loaded_chunks.keys().cloned().collect::<Vec<_>>() { if !targets.contains(&pos) { self.unload_chunk(&pos, renderer); } } for pos in targets { if !self.loaded_chunks.contains_key(&pos) { self.queue_chunk_load(pos) } } } pub fn load_chunk(&mut self, pos: ChunkPos, height: [u32; CHUNKSIZE * CHUNKSIZE], renderer: &mut Renderer) { let mut chunk = Chunk::new(pos); renderer.chunkpos_uniform.add(&renderer.queue, pos, pos.to_raw()); let now = Instant::now(); chunk.generate(height); let lapsed = now.elapsed(); self.loaded_chunks.insert( pos, chunk, ); self.chunk_loading_time += lapsed.as_micros(); self.mesh_neighbors(pos); } pub fn mesh_neighbors(&mut self, pos: ChunkPos) { self.mesh_chunk(pos); self.mesh_chunk(ChunkPos {x: pos.x + 1, ..pos}); self.mesh_chunk(ChunkPos {x: pos.x - 1, ..pos}); self.mesh_chunk(ChunkPos {z: pos.z + 1, ..pos}); self.mesh_chunk(ChunkPos {z: pos.z - 1, ..pos}); } pub fn mesh_chunk(&mut self, pos: ChunkPos) { let c = &self.loaded_chunks.get(&pos); if !c.is_none() { let mut mesh = ChunkMesh::new(); let now = Instant::now(); mesh.create_simple_mesh(c.unwrap(), &self); let elapsed = now.elapsed(); self.chunks_meshes.insert( pos, mesh ); self.chunk_meshing_time += elapsed.as_micros(); } } pub fn get_neighbors(&self, center: ChunkPos) -> [Option<&Chunk>;
pub fn queue_chunk_load(&mut self, pos: ChunkPos) { self.load_queue.push(pos); } pub fn load_queue(&mut self, world: &World, renderer: &mut Renderer) { if self.load_queue.len() > 0 { for pos in self.load_queue.clone() { self.load_chunk(pos.clone(), world.map.create_heightmap(&pos), renderer); } self.load_queue.clear(); } } pub fn unload_chunk(&mut self, pos: &ChunkPos, renderer: &mut Renderer) { self.chunks_meshes.remove(pos); self.loaded_chunks.remove(pos); self.chunk_buffers.remove(pos); renderer.chunkpos_uniform.remove(pos); } pub fn update(&mut self, context: &mut Context, encoder: &mut wgpu::CommandEncoder) { if !self.updated { self.updated = true; for (pos, chunk) in &self.chunks_meshes { let mut c = ChunkDrawable::new(&context.renderer.device, *pos); c.from_chunk_mesh(&chunk, &context.renderer.device, encoder); self.chunk_buffers.insert(*pos, c); } } } pub fn get_chunk(&self, pos: ChunkPos) -> &Chunk { self.loaded_chunks.get(&pos).unwrap() } pub fn get_chunk_option(&self, pos: ChunkPos) -> Option<&Chunk> { self.loaded_chunks.get(&pos) } pub fn get_chunk_mut_option(&mut self, pos: ChunkPos) -> Option<&mut Chunk> { self.loaded_chunks.get_mut(&pos) } pub fn get_mesh(&self, pos: ChunkPos) -> &ChunkMesh { self.chunks_meshes.get(&pos).unwrap() } pub fn get_block_at_coord(&self, coord: WorldCoord) -> Option<BlockID> { if let Some(chunk) = self.loaded_chunks.get(&coord.to_chunk_coord()) { return Some(chunk.at_coord(coord.to_chunk_local())) } else { return None } } pub fn meshing_time(&self) -> u128 { self.chunk_meshing_time / self.chunks_meshes.len() as u128 } pub fn loading_time(&self) -> u128 { self.chunk_loading_time / self.chunks_meshes.len() as u128 } }
4]{ [ self.get_chunk_option(center + ChunkPos::new(0, 0, 1)), self.get_chunk_option(center + ChunkPos::new(1, 0, 0)), self.get_chunk_option(center + ChunkPos::new(0, 0, -1)), self.get_chunk_option(center + ChunkPos::new(-1, 0, 0)), ] }
function_block-function_prefixed
[ { "content": "// Helper function\n\npub fn coords_to_float(coords: [u32; 2]) -> [f32; 2] {\n\n [\n\n coords[0] as f32 / TEXTURE_IMAGE_WIDTH as f32,\n\n coords[1] as f32 / TEXTURE_IMAGE_HEIGHT as f32,\n\n ]\n\n}\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub struct TextureTile {\n\n pub coords: Point2<u32>, // Index coord of texture\n\n}\n\n\n\nimpl TextureTile {\n\n pub fn to_usable(&self) -> [[f32; 2]; 4] {\n\n [\n\n coords_to_float([self.coords.x * TEXTURE_WIDTH + TEXTURE_WIDTH, self.coords.y * TEXTURE_HEIGHT]),\n\n coords_to_float([self.coords.x * TEXTURE_WIDTH, self.coords.y * TEXTURE_HEIGHT]),\n\n coords_to_float([self.coords.x * TEXTURE_WIDTH, self.coords.y * TEXTURE_HEIGHT + TEXTURE_HEIGHT]),\n\n coords_to_float([self.coords.x * TEXTURE_WIDTH + TEXTURE_WIDTH, self.coords.y * TEXTURE_HEIGHT + TEXTURE_HEIGHT]),\n\n ]\n", "file_path": "src/render/low/textures.rs", "rank": 0, "score": 167331.92123119996 }, { "content": "pub fn index_to_coord(index: usize) -> (u32, u32, u32) {\n\n let x = index % CHUNKSIZE;\n\n let y = (index / (CHUNKSIZE * CHUNKSIZE)) as u32;\n\n let z = (index / CHUNKSIZE) % CHUNKSIZE;\n\n\n\n (x as u32, y as u32, z as u32)\n\n}", "file_path": "src/world/chunk/chunk.rs", "rank": 1, "score": 161755.21956752994 }, { "content": "/// Y represents height, Z depth and X width\n\npub fn coord_to_index(x: i16, y: i16, z: i16) -> usize {\n\n ((x + z * CHUNKSIZE as i16) as i32 + y as i32 * (CHUNKSIZE * CHUNKSIZE) as i32) as usize\n\n}\n\n\n", "file_path": "src/world/chunk/chunk.rs", "rank": 2, "score": 128117.3510296266 }, { "content": "pub fn default_depth_texture(\n\n device: &wgpu::Device, \n\n sc_desc: &wgpu::SwapChainDescriptor,\n\n) -> (wgpu::Texture, wgpu::TextureView, wgpu::Sampler) {\n\n\n\n let desc = wgpu::TextureDescriptor {\n\n label: Some(\"depth texture\"),\n\n size : wgpu::Extent3d {\n\n width: sc_desc.width,\n\n height: sc_desc.height,\n\n depth: 1,\n\n },\n\n mip_level_count: 1,\n\n sample_count: 1,\n\n dimension: wgpu::TextureDimension::D2,\n\n format: wgpu::TextureFormat::Depth32Float,\n\n usage: wgpu::TextureUsage::RENDER_ATTACHMENT // 3.\n\n | wgpu::TextureUsage::SAMPLED,\n\n };\n\n\n", "file_path": "src/render/low/init.rs", "rank": 3, "score": 114764.42718758393 }, { "content": "struct Segment {\n\n size: usize,\n\n free: bool,\n\n}", "file_path": "src/render/low/buffer.rs", "rank": 4, "score": 92156.51599525004 }, { "content": "use crate::render::{\n\n vertexarray::VertexArray,\n\n shapes::shapes::Quad,\n\n meshing::meshing::*,\n\n};\n\nuse crate::world::{\n\n chunk::chunk::{Chunk, index_to_coord},\n\n chunk::chunkmanager::ChunkManager,\n\n constants::*,\n\n block::blocks::{get_block, BlockID, Blocks, Sides},\n\n chunk::pos::*,\n\n};\n\n\n\npub struct ChunkMesh {\n\n pub mesh: Mesh, // ToDo make private\n\n}\n\n\n\nimpl ChunkMesh {\n\n pub fn new() -> Self {\n\n Self {mesh: Mesh::new()}\n", "file_path": "src/render/meshing/chunkmeshing.rs", "rank": 5, "score": 81973.27260839209 }, { "content": " }\n\n\n\n pub fn to_vertex_array(&self) -> &VertexArray<Quad> {\n\n &self.mesh.quads\n\n }\n\n\n\n #[allow(dead_code)]\n\n /// Create mesh with every block representing 6 quads.\n\n /// This is extremely inefficient and should only be used for testing\n\n pub fn create_dumb_mesh(&mut self, chunk: &Chunk) {\n\n\n\n let mut mesh = Mesh::new();\n\n\n\n for i in 0..(CHUNKSIZE * CHUNKSIZE * WORLDHEIGHT) {\n\n let (x, y, z) = index_to_coord(i);\n\n\n\n let b = chunk.at_coord_bounds(ChunkCoord {x: x as i16, y: y as i16, z: z as i16});\n\n \n\n if get_block(b).transparent {\n\n continue\n", "file_path": "src/render/meshing/chunkmeshing.rs", "rank": 6, "score": 81969.80724147444 }, { "content": " pub fn create_simple_mesh(&mut self, chunk: &Chunk, chunk_manager: &ChunkManager) {\n\n\n\n let mut mesh = Mesh::new();\n\n\n\n for i in 0..(CHUNKSIZE * CHUNKSIZE * WORLDHEIGHT) {\n\n let (x, y, z) = index_to_coord(i);\n\n\n\n let coord = ChunkCoord {x: x as i16, y: y as i16, z: z as i16};\n\n\n\n let blockid = chunk.at_coord(coord);\n\n let block = get_block(blockid);\n\n\n\n if block.transparent {\n\n continue\n\n }\n\n\n\n // Left\n\n ChunkMesh::add_if_needed(chunk, &mut mesh, ChunkCoord {x: coord.x + 1, ..coord}, coord, Sides::LEFT, blockid, chunk_manager);\n\n // Right\n\n ChunkMesh::add_if_needed(chunk, &mut mesh, ChunkCoord {x: coord.x - 1, ..coord}, coord, Sides::RIGHT, blockid, chunk_manager);\n", "file_path": "src/render/meshing/chunkmeshing.rs", "rank": 7, "score": 81968.66515536228 }, { "content": " // Top\n\n ChunkMesh::add_if_needed(chunk, &mut mesh, ChunkCoord {y: coord.y + 1, ..coord}, coord, Sides::TOP, blockid, chunk_manager);\n\n // Bottom\n\n ChunkMesh::add_if_needed(chunk, &mut mesh, ChunkCoord {y: coord.y - 1, ..coord}, coord, Sides::BOTTOM, blockid, chunk_manager);\n\n // Back\n\n ChunkMesh::add_if_needed(chunk, &mut mesh, ChunkCoord {z: coord.z + 1, ..coord}, coord, Sides::BACK, blockid, chunk_manager);\n\n // Front\n\n ChunkMesh::add_if_needed(chunk, &mut mesh, ChunkCoord {z: coord.z - 1, ..coord}, coord, Sides::FRONT, blockid, chunk_manager); \n\n }\n\n\n\n self.mesh = mesh;\n\n }\n\n\n\n fn add_if_needed(\n\n chunk: &Chunk,\n\n mesh: &mut Mesh,\n\n neighbor_block: ChunkCoord,\n\n coord: ChunkCoord,\n\n face: Sides,\n\n block: BlockID,\n", "file_path": "src/render/meshing/chunkmeshing.rs", "rank": 8, "score": 81965.58329454921 }, { "content": " manager: &ChunkManager,\n\n ) {\n\n\n\n let blockid: BlockID;\n\n\n\n // If in bounds, get just get it from the current chunk (faster)\n\n if Chunk::in_bounds(neighbor_block) {\n\n blockid = chunk.at_coord(neighbor_block);\n\n\n\n // If not in bounds, request the block from the chunkmanager\n\n } else {\n\n // println!(\"Block {:?} in chunk {:?}\", neighbor_block, chunk.pos);\n\n blockid = manager.get_block_at_coord(WorldCoord::from_chunk_pos(chunk.pos, neighbor_block)).unwrap_or(Blocks::AIR as BlockID)\n\n }\n\n \n\n if get_block(blockid).transparent {\n\n mesh.add_face(MeshFace {\n\n coordinate: [coord.x as u32, coord.y as u32, coord.z as u32],\n\n face: face,\n\n blocktype: block,\n", "file_path": "src/render/meshing/chunkmeshing.rs", "rank": 9, "score": 81964.34458399547 }, { "content": " });\n\n }\n\n }\n\n\n\n #[allow(dead_code)]\n\n /// Create an optimized mesh where all adjecent block of the same type are\n\n /// formed into a single quad. This is only useful if GPU memory usage is high\n\n /// because generating this mesh is slower\n\n pub fn create_greedy_mesh() {\n\n // Todo implement this\n\n }\n\n}", "file_path": "src/render/meshing/chunkmeshing.rs", "rank": 10, "score": 81958.46794503405 }, { "content": " blocktype: b,\n\n });\n\n mesh.add_face(MeshFace {\n\n coordinate: [x, y, z],\n\n face: Sides::TOP,\n\n blocktype: b,\n\n });\n\n mesh.add_face(MeshFace {\n\n coordinate: [x, y, z],\n\n face: Sides::BOTTOM,\n\n blocktype: b,\n\n });\n\n }\n\n\n\n self.mesh = mesh;\n\n }\n\n\n\n #[allow(dead_code)]\n\n /// Creates a culled mesh. Faces that are not adjecent to a transparent\n\n /// will not be added to the mesh buffer\n", "file_path": "src/render/meshing/chunkmeshing.rs", "rank": 11, "score": 81954.64479557362 }, { "content": " }\n\n\n\n mesh.add_face(MeshFace {\n\n coordinate: [x, y, z],\n\n face: Sides::RIGHT,\n\n blocktype: b,\n\n });\n\n mesh.add_face(MeshFace {\n\n coordinate: [x, y, z],\n\n face: Sides::FRONT,\n\n blocktype: b,\n\n });\n\n mesh.add_face(MeshFace {\n\n coordinate: [x, y, z],\n\n face: Sides::BACK,\n\n blocktype: b,\n\n });\n\n mesh.add_face(MeshFace {\n\n coordinate: [x, y, z],\n\n face: Sides::LEFT,\n", "file_path": "src/render/meshing/chunkmeshing.rs", "rank": 12, "score": 81954.61090543604 }, { "content": "use crate::render::vertexarray::VertexArray;\n\nuse crate::render::shapes::shapes::Quad;\n\nuse crate::world::block::blocks::{BlockID, get_block, Sides};\n\n\n\n// Used for creating the corresponding faces. These represent coordinates of the 4 vertices in the correct order\n\npub const FACES: [Face; 6] = [\n\n Face {interval: [[0, 1, 0], [1, 1, 0], [1, 0, 0], [0, 0, 0]]},\n\n Face {interval: [[1, 1, 0], [1, 1, 1], [1, 0, 1], [1, 0, 0]]},\n\n Face {interval: [[1, 1, 1], [0, 1, 1], [0, 0, 1], [1, 0, 1]]},\n\n Face {interval: [[0, 1, 1], [0, 1, 0], [0, 0, 0], [0, 0, 1]]},\n\n Face {interval: [[0, 1, 1], [1, 1, 1], [1, 1, 0], [0, 1, 0]]},\n\n Face {interval: [[0, 0, 0], [1, 0, 0], [1, 0, 1], [0, 0, 1]]},\n\n];\n\n\n\n/// A mesh holds quads and renders these quads\n\npub struct Mesh {\n\n pub quads: VertexArray<Quad>,\n\n}\n\n\n\nimpl Mesh {\n", "file_path": "src/render/meshing/meshing.rs", "rank": 13, "score": 60233.13636421418 }, { "content": "}\n\n\n\npub struct Face {\n\n interval: [[u32; 3]; 4],\n\n}\n\n\n\npub struct MeshFace {\n\n pub coordinate: [u32; 3],\n\n pub face: Sides, // Which face of block\n\n pub blocktype: BlockID,\n\n}", "file_path": "src/render/meshing/meshing.rs", "rank": 14, "score": 60229.480385582945 }, { "content": " pub fn new() -> Self {\n\n let quads = VertexArray::new();\n\n\n\n Self {\n\n quads,\n\n }\n\n }\n\n\n\n pub fn add_face(&mut self, face: MeshFace) {\n\n let interval = &FACES[face.face as usize];\n\n self.quads.push(Quad {\n\n coords: [\n\n [(face.coordinate[0] + interval.interval[0][0]) as f32, (face.coordinate[1] + interval.interval[0][1]) as f32, (face.coordinate[2] + interval.interval[0][2]) as f32],\n\n [(face.coordinate[0] + interval.interval[1][0]) as f32, (face.coordinate[1] + interval.interval[1][1]) as f32, (face.coordinate[2] + interval.interval[1][2]) as f32],\n\n [(face.coordinate[0] + interval.interval[2][0]) as f32, (face.coordinate[1] + interval.interval[2][1]) as f32, (face.coordinate[2] + interval.interval[2][2]) as f32],\n\n [(face.coordinate[0] + interval.interval[3][0]) as f32, (face.coordinate[1] + interval.interval[3][1]) as f32, (face.coordinate[2] + interval.interval[3][2]) as f32],\n\n ],\n\n text_coords: get_block(face.blocktype).texture.sides[face.face as usize].to_usable(),\n\n })\n\n }\n", "file_path": "src/render/meshing/meshing.rs", "rank": 15, "score": 60228.12896075743 }, { "content": "pub fn get_block<'a>(id: BlockID) -> &'a Block {\n\n &BLOCKS[id as usize]\n\n}", "file_path": "src/world/block/blocks.rs", "rank": 16, "score": 59578.21838907353 }, { "content": "pub trait Drawable {\n\n /// Initialize all related things such as a uniform buffer specific for the Drawable\n\n /// and the renderpipeline.\n\n fn create_pipeline(renderer: &Renderer) -> RenderPipeline\n\n where Self: Sized;\n\n \n\n fn draw<'a>(&'a self, pass: &mut RenderPass<'a>, renderer: &'a Renderer);\n\n}", "file_path": "src/render/drawables/mod.rs", "rank": 17, "score": 58767.41918048846 }, { "content": "pub trait Shape {\n\n fn num_indices() -> usize;\n\n fn num_vertices() -> usize;\n\n fn vertices(&self) -> Vec<Vertex>;\n\n fn indexes(&self, offset: u32) -> Vec<u32>;\n\n}\n", "file_path": "src/render/shapes/shape.rs", "rank": 18, "score": 58767.41918048846 }, { "content": "use crate::world::chunk::pos::ChunkPos;\n\n\n\npub struct Renderer {\n\n // General gpu setup\n\n pub surface: wgpu::Surface,\n\n pub device: wgpu::Device,\n\n pub queue: wgpu::Queue,\n\n pub sc_desc: wgpu::SwapChainDescriptor,\n\n pub swap_chain: wgpu::SwapChain,\n\n pub size: PhysicalSize<u32>,\n\n\n\n // Other\n\n pub camera: Camera,\n\n pub textures: TextureManager,\n\n pub chunkpos_uniform: MultiUniform<ChunkPos, ChunkPositionUniform>,\n\n\n\n // Used when rendering\n\n pub pipelines: HashMap<TypeId, wgpu::RenderPipeline>,\n\n pub depth_view: wgpu::TextureView,\n\n}\n", "file_path": "src/render/low/renderer.rs", "rank": 31, "score": 56768.63707861329 }, { "content": " self.camera.update(&self.queue);\n\n }\n\n\n\n pub fn input(&mut self, event: &DeviceEvent) -> bool {\n\n self.camera.input(event)\n\n }\n\n\n\n pub fn default_pipeline(\n\n &self,\n\n vertex: wgpu::ShaderModuleDescriptor,\n\n fragment: wgpu::ShaderModuleDescriptor,\n\n bind_group_layouts: &[&wgpu::BindGroupLayout],\n\n ) -> wgpu::RenderPipeline {\n\n\n\n let render_pipeline_layout =\n\n self.device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {\n\n label: Some(\"Render Pipeline Layout\"),\n\n bind_group_layouts: bind_group_layouts,\n\n push_constant_ranges: &[],\n\n });\n", "file_path": "src/render/low/renderer.rs", "rank": 32, "score": 56766.33017015775 }, { "content": "\n\n pub fn render(\n\n &mut self,\n\n objs: Vec<&dyn Drawable>,\n\n encoder: &mut wgpu::CommandEncoder,\n\n frame: &wgpu::SwapChainFrame,\n\n ) { \n\n let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {\n\n label: Some(\"Render pass descriptor in renderer\"),\n\n color_attachments: &[\n\n wgpu::RenderPassColorAttachmentDescriptor {\n\n attachment: &frame.output.view,\n\n resolve_target: None,\n\n ops: wgpu::Operations {\n\n load: wgpu::LoadOp::Clear(wgpu::Color { // Clear color\n\n r: 0.1,\n\n g: 0.2,\n\n b: 0.3,\n\n a: 1.0,\n\n }),\n", "file_path": "src/render/low/renderer.rs", "rank": 33, "score": 56765.96442961137 }, { "content": "\n\n t\n\n }\n\n\n\n pub fn resize(&mut self, new_size: PhysicalSize<u32>) {\n\n // Update swap chain\n\n self.size = new_size;\n\n self.sc_desc.width = new_size.width;\n\n self.sc_desc.height = new_size.height;\n\n self.swap_chain = self.device.create_swap_chain(&self.surface, &self.sc_desc);\n\n\n\n // Update depth texture\n\n let (_, depth_view, _) = default_depth_texture(&self.device, &self.sc_desc);\n\n self.depth_view = depth_view;\n\n\n\n // Update camera\n\n self.camera.projection.resize(new_size.width, new_size.height);\n\n\n\n println!(\"New screensize: {}x{}\", new_size.width, new_size.height);\n\n }\n", "file_path": "src/render/low/renderer.rs", "rank": 34, "score": 56765.00421099684 }, { "content": "use wgpu;\n\nuse winit::{\n\n event::DeviceEvent,\n\n dpi::PhysicalSize,\n\n window::Window,\n\n};\n\n\n\nuse std::any::TypeId;\n\nuse std::collections::HashMap;\n\n\n\nuse crate::render::{\n\n low::{\n\n init::default_depth_texture,\n\n vertex::Vertex,\n\n textures::TextureManager,\n\n uniforms::{MultiUniform, ChunkPositionUniform},\n\n },\n\n camera::Camera,\n\n drawables::{Drawable, texture_vertex::TextureVertex, chunk::ChunkDrawable},\n\n};\n", "file_path": "src/render/low/renderer.rs", "rank": 35, "score": 56764.886686953265 }, { "content": " store: true,\n\n }\n\n }\n\n ],\n\n depth_stencil_attachment: Some(wgpu::RenderPassDepthStencilAttachmentDescriptor {\n\n attachment: &self.depth_view,\n\n depth_ops: Some(wgpu::Operations {\n\n load: wgpu::LoadOp::Clear(1.0),\n\n store: true,\n\n }),\n\n stencil_ops: None,\n\n }),\n\n });\n\n\n\n for obj in objs {\n\n obj.draw(&mut render_pass, &self);\n\n } \n\n }\n\n pub fn update(&mut self, dt: std::time::Duration) {\n\n self.camera.controller.update_camera(&mut self.camera.view, dt);\n", "file_path": "src/render/low/renderer.rs", "rank": 36, "score": 56764.37454217078 }, { "content": " }\n\n}\n\n\n\n// Helper functions\n\nimpl Renderer {\n\n pub fn start_frame(&self) -> wgpu::CommandEncoder {\n\n self.device.create_command_encoder(&wgpu::CommandEncoderDescriptor::default())\n\n }\n\n\n\n pub fn end_frame(&self, encoder: wgpu::CommandEncoder) {\n\n self.queue.submit(vec![encoder.finish()]);\n\n }\n\n\n\n pub fn get_pipeline<T: 'static + Drawable>(&self) -> &wgpu::RenderPipeline {\n\n &self\n\n .pipelines\n\n .get(&std::any::TypeId::of::<T>())\n\n .expect(\"Pipeline was not registered in context\")\n\n }\n\n\n\n pub fn register_pipeline<T: 'static + Drawable>(&mut self) {\n\n self.pipelines\n\n .insert(std::any::TypeId::of::<T>(), T::create_pipeline(self));\n\n }\n\n}", "file_path": "src/render/low/renderer.rs", "rank": 37, "score": 56764.32531205497 }, { "content": " let chunkpos_uniform = MultiUniform::new(&device, 3, 2);\n\n\n\n let mut t = Self {\n\n size,\n\n surface,\n\n queue,\n\n sc_desc,\n\n swap_chain,\n\n device,\n\n\n\n camera,\n\n textures: textures,\n\n chunkpos_uniform,\n\n\n\n pipelines: HashMap::new(),\n\n depth_view,\n\n };\n\n\n\n t.register_pipeline::<TextureVertex>();\n\n t.register_pipeline::<ChunkDrawable>();\n", "file_path": "src/render/low/renderer.rs", "rank": 38, "score": 56764.07434821858 }, { "content": " },\n\n None,\n\n ).await.unwrap();\n\n\n\n let sc_desc = wgpu::SwapChainDescriptor { // How should the swap chain be used?\n\n usage: wgpu::TextureUsage::RENDER_ATTACHMENT, // Texture usage\n\n format: wgpu::TextureFormat::Bgra8UnormSrgb,\n\n width: size.width,\n\n height: size.height,\n\n present_mode: wgpu::PresentMode::Fifo,\n\n };\n\n let swap_chain = device.create_swap_chain(&surface, &sc_desc);\n\n\n\n let (_, depth_view, _) = default_depth_texture(&device, &sc_desc);\n\n let camera = Camera::new(&device, sc_desc.width, sc_desc.height, cgmath::Point3 {x: 0.0, y: 0.0, z: 0.0});\n\n\n\n // Load a texture\n\n let mut textures = TextureManager::new(&device);\n\n textures.load(\"/home/duco/development/rust/gamedev/luwdigengine2d/assets/terrain.png\", &device, &queue);\n\n\n", "file_path": "src/render/low/renderer.rs", "rank": 39, "score": 56761.222110410716 }, { "content": "\n\nimpl Renderer {\n\n pub async fn new(window: &Window) -> Self {\n\n\n\n let size = window.inner_size();\n\n\n\n let instance = wgpu::Instance::new(wgpu::BackendBit::PRIMARY);\n\n let surface = unsafe { instance.create_surface(window) };\n\n let adapter = instance.request_adapter(\n\n &wgpu::RequestAdapterOptions {\n\n power_preference: wgpu::PowerPreference::default(),\n\n compatible_surface: Some(&surface),\n\n },\n\n ).await.unwrap();\n\n\n\n let (device, queue) = adapter.request_device(\n\n &wgpu::DeviceDescriptor {\n\n features: wgpu::Features::empty(),\n\n limits: wgpu::Limits::default(),\n\n label: Some(\"Device descriptior\"),\n", "file_path": "src/render/low/renderer.rs", "rank": 40, "score": 56760.75386241624 }, { "content": "\n\n self.device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {\n\n label: Some(\"Render Pipeline\"),\n\n layout: Some(&render_pipeline_layout),\n\n multisample: wgpu::MultisampleState {\n\n count: 1,\n\n mask: !0,\n\n alpha_to_coverage_enabled: false,\n\n },\n\n vertex: wgpu::VertexState {\n\n module: &self.device.create_shader_module(&vertex),\n\n entry_point: \"main\", \n\n buffers: &[Vertex::desc()],\n\n },\n\n fragment: Some(wgpu::FragmentState { \n\n module: &self.device.create_shader_module(&fragment),\n\n entry_point: \"main\",\n\n targets: &[wgpu::ColorTargetState {\n\n format: self.sc_desc.format,\n\n color_blend: wgpu::BlendState::REPLACE,\n", "file_path": "src/render/low/renderer.rs", "rank": 41, "score": 56760.268280210556 }, { "content": " alpha_blend: wgpu::BlendState::REPLACE,\n\n write_mask: wgpu::ColorWrite::ALL,\n\n }],\n\n }),\n\n primitive: wgpu::PrimitiveState {\n\n front_face: wgpu::FrontFace::Ccw,\n\n cull_mode: wgpu::CullMode::None,\n\n polygon_mode: wgpu::PolygonMode::Fill,\n\n topology: wgpu::PrimitiveTopology::TriangleList,\n\n strip_index_format: None,\n\n },\n\n depth_stencil: Some(wgpu::DepthStencilState {\n\n format: wgpu::TextureFormat::Depth32Float,\n\n depth_write_enabled: true,\n\n depth_compare: wgpu::CompareFunction::Less,\n\n stencil: wgpu::StencilState::default(),\n\n bias: wgpu::DepthBiasState::default(),\n\n clamp_depth: false,\n\n }),\n\n })\n", "file_path": "src/render/low/renderer.rs", "rank": 42, "score": 56754.77613955025 }, { "content": "pub mod meshing;\n\npub mod chunkmeshing;", "file_path": "src/render/meshing/mod.rs", "rank": 43, "score": 52698.3956296617 }, { "content": "use crate::render::{\n\n low::{\n\n buffer::DynamicBuffer,\n\n renderer::Renderer,\n\n vertex,\n\n },\n\n meshing::chunkmeshing::ChunkMesh,\n\n};\n\nuse crate::world::chunk::pos::ChunkPos;\n\nuse super::Drawable;\n\n\n\npub struct ChunkDrawable {\n\n vertex_buffer: DynamicBuffer<vertex::Vertex>,\n\n index_buffer: DynamicBuffer<u32>,\n\n pos: ChunkPos,\n\n}\n\n\n\nimpl ChunkDrawable {\n\n pub fn new(device: &wgpu::Device, pos: ChunkPos) -> Self {\n\n let vertex_buffer = DynamicBuffer::new(\n", "file_path": "src/render/drawables/chunk.rs", "rank": 44, "score": 51948.09087532717 }, { "content": " device, \n\n encoder,\n\n &mesh.to_vertex_array().to_vertices(),\n\n );\n\n\n\n self.index_buffer.insert_back(\n\n device, \n\n encoder,\n\n &mesh.to_vertex_array().to_indices(),\n\n );\n\n }\n\n}\n\n\n\nimpl Drawable for ChunkDrawable {\n\n fn create_pipeline(renderer: &Renderer) -> wgpu::RenderPipeline {\n\n renderer.default_pipeline(\n\n wgpu::include_spirv!(\"../low/shaders/chunk.vert.spv\"),\n\n wgpu::include_spirv!(\"../low/shaders/chunk.frag.spv\"), \n\n &[\n\n &renderer.camera.uniform.uniform_bind_group_layout, // set = 0\n", "file_path": "src/render/drawables/chunk.rs", "rank": 45, "score": 51938.939737883105 }, { "content": " 16000,\n\n device,\n\n wgpu::BufferUsage::VERTEX,\n\n );\n\n\n\n let index_buffer = DynamicBuffer::new(\n\n 16000,\n\n device,\n\n wgpu::BufferUsage::INDEX,\n\n );\n\n\n\n Self {\n\n vertex_buffer,\n\n index_buffer,\n\n pos,\n\n }\n\n }\n\n\n\n pub fn from_chunk_mesh(&mut self, mesh: &ChunkMesh, device: &wgpu::Device, encoder: &mut wgpu::CommandEncoder) {\n\n self.vertex_buffer.insert_back(\n", "file_path": "src/render/drawables/chunk.rs", "rank": 46, "score": 51938.0051054926 }, { "content": " &renderer.textures.texture_bind_group_layout, // set = 1\n\n &renderer.chunkpos_uniform.uniform_bind_group_layout, // set = 2\n\n ],\n\n )\n\n }\n\n\n\n fn draw<'a>(&'a self, pass: &mut wgpu::RenderPass<'a>, renderer: &'a Renderer) {\n\n pass.set_pipeline(renderer.get_pipeline::<Self>());\n\n pass.set_bind_group(renderer.camera.uniform.index, &renderer.camera.uniform.uniform_bind_group, &[]); // Camera\n\n pass.set_bind_group(1, renderer.textures.get_bind_group(), &[]); // Texture\n\n \n\n // Set correct chunkpos uniform\n\n let a = renderer.chunkpos_uniform.offset.get(&self.pos).unwrap() * wgpu::BIND_BUFFER_ALIGNMENT as u32;\n\n pass.set_bind_group(renderer.chunkpos_uniform.index, &renderer.chunkpos_uniform.uniform_bind_group, &[a]);\n\n\n\n // Draw\n\n pass.set_vertex_buffer(0, self.vertex_buffer.get_buffer().slice(..));\n\n pass.set_index_buffer(self.index_buffer.get_buffer().slice(..), wgpu::IndexFormat::Uint32);\n\n pass.draw_indexed(0..self.index_buffer.len as u32, 0, 0..1);\n\n }\n\n}", "file_path": "src/render/drawables/chunk.rs", "rank": 47, "score": 51936.17452259694 }, { "content": "\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone, bytemuck::Pod, bytemuck::Zeroable)]\n\npub struct ChunkPositionUniform {\n\n pub location: [f32; 3]\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone, bytemuck::Pod, bytemuck::Zeroable)]\n\npub struct CameraUniform {\n\n // We can't use cgmath with bytemuck directly so we'll have\n\n // to convert the Matrix4 into a 4x4 f32 array\n\n view_proj: [[f32; 4]; 4],\n\n}\n\n\n\nimpl CameraUniform {\n\n pub fn new() -> Self {\n\n use cgmath::SquareMatrix;\n\n Self {\n\n view_proj: cgmath::Matrix4::identity().into(),\n\n }\n\n }\n\n\n\n pub fn update_view_proj(&mut self, data: cgmath::Matrix4<f32>) {\n\n self.view_proj = data.into();\n\n }\n\n}", "file_path": "src/render/low/uniforms.rs", "rank": 48, "score": 50662.83354556413 }, { "content": " }\n\n\n\n pub const fn new(x: u32, y: u32) -> Self {\n\n Self {\n\n coords: Point2 {x, y},\n\n }\n\n }\n\n}\n\n\n\npub struct TextureManager {\n\n pub textures: Vec<wgpu::BindGroup>, // Todo support multple textures\n\n\n\n pub texture_bind_group_layout: wgpu::BindGroupLayout,\n\n}\n\n\n\nimpl TextureManager {\n\n pub fn new(device: &wgpu::Device) -> Self {\n\n let texture_bind_group_layout = device.create_bind_group_layout(\n\n &wgpu::BindGroupLayoutDescriptor {\n\n entries: &[\n", "file_path": "src/render/low/textures.rs", "rank": 49, "score": 50661.078534204615 }, { "content": "use image::GenericImageView;\n\nuse anyhow::*;\n\nuse cgmath::Point2;\n\nuse image;\n\n\n\npub const TEXTURE_WIDTH: u32 = 16;\n\npub const TEXTURE_HEIGHT: u32 = 16;\n\n\n\npub const TEXTURE_IMAGE_HEIGHT: u32 = 256;\n\npub const TEXTURE_IMAGE_WIDTH: u32 = 256;\n\n\n\n\n\n// Helper function\n", "file_path": "src/render/low/textures.rs", "rank": 50, "score": 50660.80876574776 }, { "content": "use winit::{\n\n event::*,\n\n dpi::PhysicalSize,\n\n event::{Event, WindowEvent},\n\n event_loop::{ControlFlow, EventLoop},\n\n window::WindowBuilder,\n\n window::Window,\n\n};\n\nuse futures::executor::block_on;\n\n\n\nuse crate::render::{\n\n low::renderer::Renderer,\n\n};\n\nuse crate::game::state::State;\n\n\n\npub struct Context {\n\n pub window: Window,\n\n pub event_loop: Option<EventLoop<()>>,\n\n pub renderer: Renderer,\n\n\n", "file_path": "src/render/low/context.rs", "rank": 51, "score": 50658.801999460076 }, { "content": "}\n\n\n\nimpl Context {\n\n pub fn new(window_title: String, window_size: [u32; 2]) -> Self {\n\n // Winit\n\n let event_loop = EventLoop::new();\n\n let window = WindowBuilder::new()\n\n .with_title(window_title)\n\n .with_inner_size(PhysicalSize::new(window_size[0], window_size[1]))\n\n .build(&event_loop)\n\n .unwrap();\n\n\n\n let renderer = block_on(Renderer::new(&window));\n\n\n\n Self {\n\n event_loop: Some(event_loop),\n\n window,\n\n renderer,\n\n }\n\n }\n", "file_path": "src/render/low/context.rs", "rank": 52, "score": 50658.18714076233 }, { "content": "use wgpu::util::DeviceExt;\n\nuse std::collections::HashMap;\n\nuse std::hash::Hash;\n\n\n\nconst UNIFORM_SIZE: usize = 200;\n\n\n\n/// A Uniform Buffer that can store multple things of T.\n\n/// In the renderpass the offset should be set accordingly. K is the type for indexing, T the data.\n\npub struct MultiUniform<K: Hash + Eq + Copy, T: bytemuck::Pod + bytemuck::Zeroable> {\n\n pub buffer: wgpu::Buffer,\n\n pub uniform_bind_group_layout: wgpu::BindGroupLayout,\n\n pub uniform_bind_group: wgpu::BindGroup,\n\n pub offset: HashMap<K, u32>, // Array of offsets\n\n open_spots: Vec<u32>,\n\n\n\n pub index: u32,\n\n pub binding: u32,\n\n \n\n phantom: std::marker::PhantomData<T>,\n\n}\n", "file_path": "src/render/low/uniforms.rs", "rank": 53, "score": 50656.953543314026 }, { "content": " Err(e) => println!(\"{:?}\", e),\n\n }\n\n }\n\n Some(swapchainframe) => {\n\n let mut encoder = self.renderer.start_frame();\n\n \n\n state.update(&mut self, &mut encoder);\n\n\n\n self.renderer.render(\n\n state.draw(),\n\n &mut encoder,\n\n &swapchainframe,\n\n );\n\n\n\n // println!(\"FPS: {}\", dt.as_secs_f64());\n\n\n\n self.renderer.end_frame(encoder);\n\n }\n\n }\n\n }\n\n Event::MainEventsCleared => {\n\n self.window.request_redraw();\n\n }\n\n _ => {}\n\n }\n\n });\n\n }\n\n}", "file_path": "src/render/low/context.rs", "rank": 54, "score": 50656.65689913576 }, { "content": "\n\n pub fn run<T: State + 'static>(mut self, mut state: T) {\n\n let mut last_render_time = std::time::Instant::now();\n\n \n\n let mut frame: Option<wgpu::SwapChainFrame> = None;\n\n\n\n self.event_loop.take().unwrap().run(move |event, _, control_flow| { \n\n match event {\n\n Event::DeviceEvent {\n\n ref event,\n\n .. // We're not using device_id currently\n\n } => {\n\n self.renderer.input(event);\n\n }\n\n\n\n Event::WindowEvent {\n\n ref event,\n\n window_id,\n\n } if window_id == self.window.id() => {\n\n \n", "file_path": "src/render/low/context.rs", "rank": 55, "score": 50655.89443571204 }, { "content": "#[repr(C)]\n\n#[derive(Copy, Clone, Debug, bytemuck::Pod, bytemuck::Zeroable)]\n\npub struct Vertex {\n\n pub position: [f32; 3],\n\n pub text_coords: [f32; 2],\n\n}\n\n\n\n// Vertex must be described for the pipeline\n\nimpl Vertex {\n\n pub fn desc<'a>() -> wgpu::VertexBufferLayout<'a> {\n\n wgpu::VertexBufferLayout {\n\n array_stride: std::mem::size_of::<Vertex>() as wgpu::BufferAddress, \n\n step_mode: wgpu::InputStepMode::Vertex, \n\n attributes: &[ \n\n wgpu::VertexAttribute {\n\n offset: 0, \n\n shader_location: 0, \n\n format: wgpu::VertexFormat::Float3, \n\n },\n\n wgpu::VertexAttribute {\n\n offset: std::mem::size_of::<[f32; 3]>() as wgpu::BufferAddress,\n\n shader_location: 1,\n\n format: wgpu::VertexFormat::Float2,\n\n }\n\n ]\n\n }\n\n }\n\n}", "file_path": "src/render/low/vertex.rs", "rank": 56, "score": 50654.05908159537 }, { "content": "\n\npub struct Texture {\n\n pub texture: wgpu::Texture,\n\n pub view: wgpu::TextureView,\n\n pub sampler: wgpu::Sampler,\n\n}\n\n\n\nimpl Texture {\n\n pub fn from_image(\n\n device: &wgpu::Device,\n\n queue: &wgpu::Queue,\n\n path: &str,\n\n label: Option<&str>\n\n ) -> Result<Self> {\n\n let img = image::open(path).unwrap();\n\n\n\n let rgba = img.as_rgba8().unwrap();\n\n let dimensions = img.dimensions();\n\n\n\n let size = wgpu::Extent3d {\n", "file_path": "src/render/low/textures.rs", "rank": 57, "score": 50654.02377528492 }, { "content": " label: Some(\"texture_bind_group_layout\"),\n\n }\n\n );\n\n\n\n let textures = Vec::<wgpu::BindGroup>::new();\n\n \n\n Self {\n\n textures,\n\n texture_bind_group_layout,\n\n }\n\n }\n\n\n\n pub fn load(&mut self, path: &str, device: &wgpu::Device, queue: &wgpu::Queue) {\n\n let diffuse_texture = Texture::from_image(&device, &queue, path, Some(\"A texture\")).unwrap();\n\n\n\n let diffuse_bind_group = device.create_bind_group(\n\n &wgpu::BindGroupDescriptor {\n\n layout: &self.texture_bind_group_layout,\n\n entries: &[\n\n wgpu::BindGroupEntry {\n", "file_path": "src/render/low/textures.rs", "rank": 58, "score": 50653.80291775483 }, { "content": "use wgpu;\n\nuse wgpu::util::DeviceExt;\n\n\n\n/// A wrapper around the wgpu::Buffer.\n\n/// Holds multple objects.\n\n/// Write only, not read.\n\npub struct DynamicBuffer <T: bytemuck::Pod + bytemuck::Zeroable> {\n\n buffer: wgpu::Buffer,\n\n usage: wgpu::BufferUsage,\n\n phantom: std::marker::PhantomData<T>,\n\n\n\n size: usize, // Reserved size\n\n pub len: usize, // Actual size\n\n}\n\n\n\nimpl<T: bytemuck::Pod + bytemuck::Zeroable> DynamicBuffer<T> {\n\n\n\n pub fn new (\n\n initial_size: usize, \n\n device: &wgpu::Device,\n", "file_path": "src/render/low/buffer.rs", "rank": 59, "score": 50653.75882400174 }, { "content": " });\n\n \n\n Self { \n\n uniform_buffer, \n\n uniform_bind_group_layout, \n\n uniform_bind_group,\n\n\n\n index,\n\n\n\n data,\n\n }\n\n }\n\n\n\n pub fn update(&self, queue: &wgpu::Queue) {\n\n queue.write_buffer(&self.uniform_buffer, 0, bytemuck::cast_slice(&[self.data]));\n\n }\n\n}\n\n\n\n// Uniform buffers are buffers that are available to every\n\n// shader instance. \n", "file_path": "src/render/low/uniforms.rs", "rank": 60, "score": 50653.586915391854 }, { "content": "\n\nimpl<K: Hash + Eq + Copy, T: bytemuck::Pod + bytemuck::Zeroable> MultiUniform<K, T> {\n\n pub fn new(device: &wgpu::Device, binding: u32, index: u32) -> Self {\n\n let t_size = std::mem::size_of::<T>() as u64;\n\n\n\n assert!(t_size < wgpu::BIND_BUFFER_ALIGNMENT);\n\n\n\n let uniform_bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {\n\n entries: &[\n\n wgpu::BindGroupLayoutEntry {\n\n binding: binding,\n\n visibility: wgpu::ShaderStage::VERTEX,\n\n ty: wgpu::BindingType::Buffer {\n\n ty: wgpu::BufferBindingType::Uniform,\n\n has_dynamic_offset: true,\n\n min_binding_size: wgpu::BufferSize::new(t_size),\n\n },\n\n count: None,\n\n }\n\n ],\n", "file_path": "src/render/low/uniforms.rs", "rank": 61, "score": 50653.2238413818 }, { "content": "pub mod buffer;\n\npub mod init;\n\npub mod context;\n\npub mod vertex;\n\npub mod renderer;\n\npub mod textures;\n\npub mod uniforms;\n\npub mod shaders;", "file_path": "src/render/low/mod.rs", "rank": 62, "score": 50653.196323687815 }, { "content": "impl<T: bytemuck::Pod + bytemuck::Zeroable> Uniform<T> {\n\n\n\n pub fn new(device: &wgpu::Device, data: T, binding: u32, index: u32) -> Self {\n\n let uniform_buffer = device.create_buffer_init(\n\n &wgpu::util::BufferInitDescriptor {\n\n label: Some(\"Unifor Buffer at binding\"),\n\n contents: bytemuck::cast_slice(&[data]),\n\n usage: wgpu::BufferUsage::UNIFORM | wgpu::BufferUsage::COPY_DST,\n\n }\n\n );\n\n \n\n let uniform_bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {\n\n entries: &[\n\n wgpu::BindGroupLayoutEntry {\n\n binding: binding,\n\n visibility: wgpu::ShaderStage::VERTEX,\n\n ty: wgpu::BindingType::Buffer {\n\n ty: wgpu::BufferBindingType::Uniform,\n\n has_dynamic_offset: false,\n\n min_binding_size: None,\n", "file_path": "src/render/low/uniforms.rs", "rank": 63, "score": 50652.93129369637 }, { "content": " encoder: &mut wgpu::CommandEncoder,\n\n ) {\n\n\n\n }\n\n\n\n pub fn get_buffer(&self) -> &wgpu::Buffer {\n\n &self.buffer\n\n }\n\n}\n\n\n", "file_path": "src/render/low/buffer.rs", "rank": 64, "score": 50652.674650018336 }, { "content": "\n\n pub fn add(&mut self, queue: &wgpu::Queue, at: K, data: T) {\n\n self.offset.insert(at, self.open_spots[0]);\n\n\n\n queue.write_buffer(\n\n &self.buffer, \n\n self.open_spots[0] as u64 * wgpu::BIND_BUFFER_ALIGNMENT, // This goes with the assumption that T is never bigger than BIND_BUFFER_ALIGNMENT (256 bytes)\n\n bytemuck::cast_slice(&[data])\n\n );\n\n\n\n self.open_spots.remove(0);\n\n }\n\n\n\n pub fn remove(&mut self, at: &K) {\n\n self.open_spots.insert(0, *self.offset.get(at).unwrap());\n\n self.offset.remove(at);\n\n }\n\n \n\n #[allow(dead_code)]\n\n pub fn modify(&mut self, queue: &wgpu::Queue, at: K, data: T) {\n", "file_path": "src/render/low/uniforms.rs", "rank": 65, "score": 50652.469488495764 }, { "content": " }\n\n Event::RedrawRequested(_) => {\n\n let now = std::time::Instant::now();\n\n let dt = now - last_render_time;\n\n last_render_time = now;\n\n \n\n self.renderer.update(dt);\n\n \n\n \n\n match frame.take() {\n\n None => {\n\n match self.renderer.swap_chain.get_current_frame() { \n\n Ok(swapchainframe) => {\n\n frame = Some(swapchainframe);\n\n }\n\n // Recreate the swap_chain if lost\n\n Err(wgpu::SwapChainError::Lost) => self.renderer.resize(self.renderer.size),\n\n // The system is out of memory, we should probably quit\n\n Err(wgpu::SwapChainError::OutOfMemory) => *control_flow = ControlFlow::Exit,\n\n // All other errors (Outdated, Timeout) should be resolved by the next frame\n", "file_path": "src/render/low/context.rs", "rank": 66, "score": 50652.40818384137 }, { "content": " let offset = self.offset.get(&at).unwrap();\n\n\n\n queue.write_buffer(\n\n &self.buffer, \n\n *offset as u64 * wgpu::BIND_BUFFER_ALIGNMENT, \n\n bytemuck::cast_slice(&[data])\n\n );\n\n }\n\n}\n\n\n\npub struct Uniform<T: bytemuck::Pod + bytemuck::Zeroable> {\n\n pub uniform_buffer: wgpu::Buffer,\n\n pub uniform_bind_group_layout: wgpu::BindGroupLayout,\n\n pub uniform_bind_group: wgpu::BindGroup,\n\n\n\n pub index: u32,\n\n\n\n pub data: T,\n\n}\n\n\n", "file_path": "src/render/low/uniforms.rs", "rank": 67, "score": 50652.18754747046 }, { "content": " encoder.copy_buffer_to_buffer(\n\n &to_add_buffer,\n\n 0,\n\n &self.buffer,\n\n (self.len * std::mem::size_of::<T>()) as u64,\n\n (data.len() * std::mem::size_of::<T>()) as u64,\n\n );\n\n\n\n\n\n\n\n self.len += data.len();\n\n }\n\n\n\n pub fn resize(\n\n &mut self, \n\n new_size: usize,\n\n device: &wgpu::Device,\n\n encoder: &mut wgpu::CommandEncoder,\n\n ) {\n\n let new_buffer = device.create_buffer(&wgpu::BufferDescriptor {\n", "file_path": "src/render/low/buffer.rs", "rank": 68, "score": 50651.59615757879 }, { "content": " match event {\n\n WindowEvent::CloseRequested => *control_flow = ControlFlow::Exit,\n\n WindowEvent::KeyboardInput { input, .. } => match input {\n\n KeyboardInput {\n\n state: ElementState::Pressed,\n\n virtual_keycode: Some(VirtualKeyCode::Escape),\n\n ..\n\n } => *control_flow = ControlFlow::Exit,\n\n _ => {}\n\n },\n\n WindowEvent::Resized(physical_size) => {\n\n self.renderer.resize(*physical_size);\n\n }\n\n WindowEvent::ScaleFactorChanged { new_inner_size, .. } => {\n\n // new_inner_size is &mut so w have to dereference it twice\n\n self.renderer.resize(**new_inner_size);\n\n }\n\n _ => {}\n\n }\n\n \n", "file_path": "src/render/low/context.rs", "rank": 69, "score": 50651.34257959466 }, { "content": "\n\n // Append to the buffer\n\n pub fn insert_back(\n\n &mut self,\n\n device: &wgpu::Device,\n\n encoder: &mut wgpu::CommandEncoder,\n\n data: &[T],\n\n ) {\n\n if data.len() + self.len > self.size {\n\n self.resize((data.len() + self.len) * 2, device, encoder); // Resize twice as much as is needed\n\n println!(\"Resized\");\n\n }\n\n\n\n let to_add_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {\n\n label: Some(\"To add Buffer\"),\n\n usage: wgpu::BufferUsage::COPY_SRC | self.usage,\n\n contents: bytemuck::cast_slice(data),\n\n });\n\n\n\n // Copy the new data into the buffer\n", "file_path": "src/render/low/buffer.rs", "rank": 70, "score": 50651.07302566565 }, { "content": "use wgpu;\n\n\n", "file_path": "src/render/low/init.rs", "rank": 71, "score": 50650.89716818035 }, { "content": " // Will insert starting at the 'at'th type T\n\n fn insert_at(\n\n &mut self,\n\n device: &wgpu::Device,\n\n encoder: &mut wgpu::CommandEncoder,\n\n data: &[T],\n\n at: usize,\n\n ) {\n\n self.insert_at_byte(device, encoder, data, at * std::mem::size_of::<T>())\n\n }\n\n\n\n // Insert at byte. Will replace anything, also through\n\n // object borders\n\n fn insert_at_byte(\n\n &mut self,\n\n device: &wgpu::Device,\n\n encoder: &mut wgpu::CommandEncoder,\n\n data: &[T],\n\n at_byte: usize,\n\n ) {\n", "file_path": "src/render/low/buffer.rs", "rank": 72, "score": 50650.70884587453 }, { "content": " label: None,\n\n mapped_at_creation: false,\n\n size: (new_size * std::mem::size_of::<T>()) as u64,\n\n usage: self.usage | wgpu::BufferUsage::COPY_DST | wgpu::BufferUsage::COPY_SRC,\n\n });\n\n encoder.copy_buffer_to_buffer(\n\n &self.buffer,\n\n 0,\n\n &new_buffer,\n\n 0,\n\n (self.len * std::mem::size_of::<T>()) as u64,\n\n );\n\n self.buffer = new_buffer;\n\n\n\n self.size = new_size;\n\n }\n\n\n\n pub fn remove_at(\n\n &mut self, \n\n device: &wgpu::Device,\n", "file_path": "src/render/low/buffer.rs", "rank": 73, "score": 50650.653332756796 }, { "content": " usage: wgpu::BufferUsage,\n\n ) -> Self {\n\n\n\n let buffer = device.create_buffer(&wgpu::BufferDescriptor {\n\n label: Some(\"Dynamic Buffer\"),\n\n size: (initial_size * std::mem::size_of::<T>()) as u64,\n\n usage: usage | wgpu::BufferUsage::COPY_DST | wgpu::BufferUsage::COPY_SRC,\n\n mapped_at_creation: false,\n\n });\n\n\n\n Self {\n\n buffer,\n\n usage,\n\n size: initial_size,\n\n len: 0,\n\n\n\n phantom: std::marker::PhantomData,\n\n }\n\n }\n\n\n", "file_path": "src/render/low/buffer.rs", "rank": 74, "score": 50650.01379618787 }, { "content": " }\n\n ],\n\n label: Some(\"uniform_bind_group multiuniform\"),\n\n });\n\n\n\n let offset = HashMap::new();\n\n\n\n Self {\n\n buffer,\n\n uniform_bind_group,\n\n uniform_bind_group_layout,\n\n offset,\n\n open_spots: (0..200).collect(),\n\n\n\n index,\n\n binding,\n\n\n\n phantom: std::marker::PhantomData,\n\n }\n\n }\n", "file_path": "src/render/low/uniforms.rs", "rank": 75, "score": 50649.69730410352 }, { "content": " width: dimensions.0,\n\n height: dimensions.1,\n\n depth: 1,\n\n };\n\n let texture = device.create_texture(\n\n &wgpu::TextureDescriptor {\n\n label,\n\n size,\n\n mip_level_count: 1,\n\n sample_count: 1,\n\n dimension: wgpu::TextureDimension::D2,\n\n format: wgpu::TextureFormat::Rgba8UnormSrgb,\n\n usage: wgpu::TextureUsage::SAMPLED | wgpu::TextureUsage::COPY_DST,\n\n }\n\n );\n\n\n\n queue.write_texture(\n\n wgpu::TextureCopyView {\n\n texture: &texture,\n\n mip_level: 0,\n", "file_path": "src/render/low/textures.rs", "rank": 76, "score": 50649.293446605385 }, { "content": " binding: 0,\n\n resource: wgpu::BindingResource::TextureView(&diffuse_texture.view),\n\n },\n\n wgpu::BindGroupEntry {\n\n binding: 1,\n\n resource: wgpu::BindingResource::Sampler(&diffuse_texture.sampler),\n\n }\n\n ],\n\n label: Some(\"diffuse_bind_group\"),\n\n }\n\n );\n\n\n\n self.textures.push(diffuse_bind_group);\n\n }\n\n\n\n pub fn get_bind_group(&self) -> &wgpu::BindGroup {\n\n &self.textures[0]\n\n }\n\n}\n\n\n", "file_path": "src/render/low/textures.rs", "rank": 77, "score": 50649.06076045831 }, { "content": " origin: wgpu::Origin3d::ZERO,\n\n },\n\n rgba,\n\n wgpu::TextureDataLayout {\n\n offset: 0,\n\n bytes_per_row: 4 * dimensions.0,\n\n rows_per_image: dimensions.1,\n\n },\n\n size,\n\n );\n\n\n\n let view = texture.create_view(&wgpu::TextureViewDescriptor::default());\n\n let sampler = device.create_sampler(\n\n &wgpu::SamplerDescriptor {\n\n address_mode_u: wgpu::AddressMode::ClampToEdge,\n\n address_mode_v: wgpu::AddressMode::ClampToEdge,\n\n address_mode_w: wgpu::AddressMode::ClampToEdge,\n\n mag_filter: wgpu::FilterMode::Nearest,\n\n min_filter: wgpu::FilterMode::Nearest,\n\n mipmap_filter: wgpu::FilterMode::Nearest,\n\n ..Default::default()\n\n }\n\n );\n\n \n\n Ok(Self { texture, view, sampler })\n\n }\n\n}", "file_path": "src/render/low/textures.rs", "rank": 78, "score": 50648.81145082306 }, { "content": " },\n\n count: None,\n\n }\n\n ],\n\n label: Some(\"uniform_bind_group_layout\"),\n\n });\n\n \n\n let uniform_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {\n\n layout: &uniform_bind_group_layout,\n\n entries: &[\n\n wgpu::BindGroupEntry {\n\n binding: binding,\n\n resource: wgpu::BindingResource::Buffer {\n\n buffer: &uniform_buffer,\n\n offset: 0,\n\n size: None,\n\n },\n\n }\n\n ],\n\n label: Some(\"uniform_bind_group simple uniform\"),\n", "file_path": "src/render/low/uniforms.rs", "rank": 79, "score": 50647.45283149165 }, { "content": " let texture = device.create_texture(&desc);\n\n let view = texture.create_view(&wgpu::TextureViewDescriptor::default());\n\n\n\n let sampler = device.create_sampler(\n\n &wgpu::SamplerDescriptor { // 4.\n\n address_mode_u: wgpu::AddressMode::ClampToEdge,\n\n address_mode_v: wgpu::AddressMode::ClampToEdge,\n\n address_mode_w: wgpu::AddressMode::ClampToEdge,\n\n mag_filter: wgpu::FilterMode::Linear,\n\n min_filter: wgpu::FilterMode::Linear,\n\n mipmap_filter: wgpu::FilterMode::Nearest,\n\n compare: Some(wgpu::CompareFunction::LessEqual), // 5.\n\n lod_min_clamp: -100.0,\n\n lod_max_clamp: 100.0,\n\n ..Default::default()\n\n }\n\n );\n\n \n\n (texture, view, sampler)\n\n}", "file_path": "src/render/low/init.rs", "rank": 80, "score": 50647.45283149165 }, { "content": " label: Some(\"uniform_bind_group_layout\"),\n\n });\n\n\n\n let buffer = device.create_buffer(&wgpu::BufferDescriptor {\n\n label: Some(\"Uniform Buffer\"),\n\n size: wgpu::BIND_BUFFER_ALIGNMENT * UNIFORM_SIZE as u64,\n\n usage: wgpu::BufferUsage::UNIFORM | wgpu::BufferUsage::COPY_DST,\n\n mapped_at_creation: false,\n\n });\n\n\n\n let uniform_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {\n\n layout: &uniform_bind_group_layout,\n\n entries: &[\n\n wgpu::BindGroupEntry {\n\n binding: binding,\n\n resource: wgpu::BindingResource::Buffer {\n\n buffer: &buffer,\n\n offset: 0,\n\n size: wgpu::BufferSize::new(wgpu::BIND_BUFFER_ALIGNMENT), // Meaning that T cannot be bigger than 256 bytes\n\n },\n", "file_path": "src/render/low/uniforms.rs", "rank": 81, "score": 50647.45283149165 }, { "content": " if data.len() + self.len > self.size {\n\n self.resize((data.len() + self.len) * 2, device, encoder); // Double size if needed\n\n }\n\n\n\n let to_add_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {\n\n label: Some(\"To add Buffer\"),\n\n usage: wgpu::BufferUsage::COPY_SRC | self.usage,\n\n contents: bytemuck::cast_slice(data),\n\n });\n\n\n\n encoder.copy_buffer_to_buffer(\n\n &to_add_buffer,\n\n 0,\n\n &self.buffer,\n\n at_byte as u64,\n\n (data.len() * std::mem::size_of::<T>()) as u64,\n\n );\n\n\n\n self.len += data.len();\n\n }\n", "file_path": "src/render/low/buffer.rs", "rank": 82, "score": 50647.45283149165 }, { "content": " wgpu::BindGroupLayoutEntry {\n\n binding: 0,\n\n visibility: wgpu::ShaderStage::FRAGMENT,\n\n ty: wgpu::BindingType::Texture {\n\n multisampled: false,\n\n view_dimension: wgpu::TextureViewDimension::D2,\n\n sample_type: wgpu::TextureSampleType::Float {filterable: false,},\n\n },\n\n count: None,\n\n },\n\n wgpu::BindGroupLayoutEntry {\n\n binding: 1,\n\n visibility: wgpu::ShaderStage::FRAGMENT,\n\n ty: wgpu::BindingType::Sampler {\n\n comparison: false,\n\n filtering: false,\n\n },\n\n count: None,\n\n },\n\n ],\n", "file_path": "src/render/low/textures.rs", "rank": 83, "score": 50647.45283149165 }, { "content": "pub mod shaders;", "file_path": "src/render/low/shaders/mod.rs", "rank": 84, "score": 48744.10326844298 }, { "content": "", "file_path": "src/render/low/shaders/shaders.rs", "rank": 85, "score": 48741.388518733846 }, { "content": "struct ShaderData {\n\n src: String,\n\n src_path: PathBuf,\n\n spv_path: PathBuf,\n\n kind: shaderc::ShaderKind,\n\n}\n\n\n\nimpl ShaderData {\n\n pub fn load(src_path: PathBuf) -> Result<Self> {\n\n let extension = src_path\n\n .extension()\n\n .context(\"File has no extension\")?\n\n .to_str()\n\n .context(\"Extension cannot be converted to &str\")?;\n\n let kind = match extension {\n\n \"vert\" => shaderc::ShaderKind::Vertex,\n\n \"frag\" => shaderc::ShaderKind::Fragment,\n\n \"comp\" => shaderc::ShaderKind::Compute,\n\n _ => bail!(\"Unsupported shader: {}\", src_path.display()),\n\n };\n", "file_path": "build.rs", "rank": 86, "score": 45726.491748949804 }, { "content": "fn main() { \n\n let mut context = Context::new(String::from(\"Ludwig World 3D\"), [1200, 800]);\n\n let state = MainState::new(&mut context.renderer);\n\n\n\n context.run(state);\n\n}", "file_path": "src/main.rs", "rank": 87, "score": 42235.096970497354 }, { "content": "fn main() -> Result<()> {\n\n // Collect all shaders recursively within /src/\n\n let mut shader_paths = [\n\n glob(\"./src/**/*.vert\")?,\n\n glob(\"./src/**/*.frag\")?,\n\n glob(\"./src/**/*.comp\")?,\n\n ];\n\n\n\n // This could be parallelized\n\n let shaders = shader_paths\n\n .iter_mut()\n\n .flatten()\n\n .map(|glob_result| ShaderData::load(glob_result?))\n\n .collect::<Vec<Result<_>>>()\n\n .into_iter()\n\n .collect::<Result<Vec<_>>>()?;\n\n\n\n let mut compiler = shaderc::Compiler::new().context(\"Unable to create shader compiler\")?;\n\n\n\n // This can't be parallelized. The [shaderc::Compiler] is not\n", "file_path": "build.rs", "rank": 88, "score": 41074.03064225614 }, { "content": "pub trait State {\n\n fn new(renderer: &mut Renderer) -> Self;\n\n /// Update all state\n\n fn update(&mut self, context: &mut Context, encoder: &mut wgpu::CommandEncoder);\n\n /// Draw all state such as chunks, ui, players, mobs.\n\n fn draw(&self) -> Vec<&dyn Drawable>;\n\n}\n\n\n\npub mod mainstate;", "file_path": "src/game/state/mod.rs", "rank": 89, "score": 36885.3950082886 }, { "content": " Self {\n\n blocks,\n\n pos,\n\n }\n\n }\n\n\n\n pub fn generate(&mut self, height: [u32; CHUNKSIZE * CHUNKSIZE]) {\n\n let mut rgn = rand::thread_rng();\n\n\n\n // self.blocks[coord_to_index(0, 0, 0)] = Blocks::GRASS as BlockID;\n\n // self.blocks[coord_to_index(0, 1, 0)] = Blocks::GRASS as BlockID;\n\n\n\n for x in 0..(CHUNKSIZE) as i16 {\n\n for z in 0..(CHUNKSIZE) as i16 {\n\n\n\n let grassheight = height[x as usize + z as usize * CHUNKSIZE] as i16;\n\n let dirtheight = grassheight - rgn.gen_range(1..4);\n\n let stoneheight = dirtheight;\n\n\n\n let mut y = 0;\n", "file_path": "src/world/chunk/chunk.rs", "rank": 90, "score": 35683.65439506121 }, { "content": " {\n\n return false\n\n }\n\n\n\n true\n\n }\n\n\n\n pub fn place_block(&mut self, pos: ChunkCoord, block: BlockID) {\n\n self.blocks[coord_to_index(pos.x, pos.y, pos.z)] = block;\n\n }\n\n}\n\n\n\n/// Y represents height, Z depth and X width\n", "file_path": "src/world/chunk/chunk.rs", "rank": 91, "score": 35683.4331458798 }, { "content": "use rand::Rng;\n\n\n\nuse crate::world::block::blocks::{\n\n Blocks,\n\n BlockID\n\n};\n\nuse crate::world::constants::{CHUNKSIZE, WORLDHEIGHT};\n\nuse crate::world::chunk::pos::*;\n\n\n\n#[derive(Debug)]\n\npub struct Chunk {\n\n /// blocks[x][y][z]\n\n blocks: [BlockID; CHUNKSIZE * CHUNKSIZE * WORLDHEIGHT],\n\n pub pos: ChunkPos,\n\n}\n\n\n\nimpl Chunk {\n\n pub fn new(pos: ChunkPos) -> Self {\n\n let blocks = [Blocks::AIR as BlockID; CHUNKSIZE * CHUNKSIZE * WORLDHEIGHT];\n\n\n", "file_path": "src/world/chunk/chunk.rs", "rank": 92, "score": 35682.48844274778 }, { "content": " if !Chunk::in_bounds(coord) {\n\n return 0\n\n }\n\n else {\n\n return self.blocks[coord_to_index(coord.x, coord.y, coord.z)]\n\n } \n\n }\n\n\n\n /// This will panic if x, y or z are not in bounds\n\n pub fn at_coord(&self, coord: ChunkCoord) -> BlockID {\n\n self.blocks[coord_to_index(coord.x, coord.y, coord.z)] \n\n }\n\n\n\n /// Returns true if the given coordinate is in the bounds\n\n /// of a chunk\n\n pub fn in_bounds(coord: ChunkCoord) -> bool {\n\n if \n\n coord.x < 0 || coord.x >= CHUNKSIZE as i16 ||\n\n coord.z < 0 || coord.z >= CHUNKSIZE as i16 ||\n\n coord.y < 0 || coord.y >= WORLDHEIGHT as i16\n", "file_path": "src/world/chunk/chunk.rs", "rank": 93, "score": 35680.69356338664 }, { "content": " while y < stoneheight {\n\n self.blocks[coord_to_index(x, y, z)] = Blocks::STONE as BlockID;\n\n y += 1;\n\n }\n\n\n\n y = dirtheight;\n\n while y < grassheight {\n\n self.blocks[coord_to_index(x, y, z)] = Blocks::DIRT as BlockID;\n\n y += 1;\n\n }\n\n\n\n self.blocks[coord_to_index(x, grassheight, z)] = Blocks::GRASS as BlockID;\n\n }\n\n }\n\n }\n\n\n\n /// Returns the BlockID at a given coordinate inside a chunk\n\n /// Y represents height, Z depth and X width.\n\n /// Also makes sure coordinate is in bounds\n\n pub fn at_coord_bounds(&self, coord: ChunkCoord) -> BlockID {\n", "file_path": "src/world/chunk/chunk.rs", "rank": 94, "score": 35679.5860314618 }, { "content": "pub const CHUNKSIZE: usize = 32;\n\npub const WORLDHEIGHT: usize = 64;", "file_path": "src/world/constants.rs", "rank": 95, "score": 30760.94303121369 }, { "content": " pub fn from_chunk_pos(chunkpos: ChunkPos, chunkcoord: ChunkCoord) -> Self {\n\n Self {\n\n x: (chunkpos.x * (CHUNKSIZE as i32 ) + chunkcoord.x as i32) as i64,\n\n y: (chunkpos.y * (1 as i32 ) + chunkcoord.y as i32) as i64,\n\n z: (chunkpos.z * (CHUNKSIZE as i32 ) + chunkcoord.z as i32) as i64,\n\n }\n\n }\n\n\n\n pub fn to_chunk_local(&self) -> ChunkCoord {\n\n ChunkCoord {\n\n x: (CHUNKSIZE as i16 + (self.x % CHUNKSIZE as i64) as i16) % CHUNKSIZE as i16,\n\n y: (WORLDHEIGHT as i16 + (self.y % WORLDHEIGHT as i64) as i16) % WORLDHEIGHT as i16,\n\n z: (CHUNKSIZE as i16+ (self.z % CHUNKSIZE as i64) as i16) % CHUNKSIZE as i16,\n\n }\n\n }\n\n\n\n pub fn to_chunk_coord(&self) -> ChunkPos {\n\n ChunkPos {\n\n x: (self.x as f64 / CHUNKSIZE as f64).floor() as i32,\n\n y: 0,\n\n z: (self.z as f64 / CHUNKSIZE as f64).floor() as i32,\n\n }\n\n }\n\n}\n", "file_path": "src/world/chunk/pos.rs", "rank": 96, "score": 28354.413615146266 }, { "content": "use std::ops::{Add, Sub};\n\n\n\nuse crate::world::constants::*;\n\nuse crate::render::low::uniforms::ChunkPositionUniform;\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\n/// Chunk coordinate in world so (0, 0) is from \n\n/// x 0 to 16 and y 0 to 16\n\npub struct ChunkPos {\n\n pub x: i32,\n\n pub y: i32,\n\n pub z: i32,\n\n}\n\n\n\nimpl ChunkPos {\n\n pub fn new(x: i32, y: i32, z: i32) -> Self {\n\n Self {\n\n x,y,z,\n\n }\n\n }\n", "file_path": "src/world/chunk/pos.rs", "rank": 97, "score": 28353.681936354464 }, { "content": "\n\n pub fn to_raw(&self) -> ChunkPositionUniform {\n\n ChunkPositionUniform{ location: [\n\n (self.x * CHUNKSIZE as i32) as f32, \n\n (self.y * 1 as i32) as f32, \n\n (self.z * CHUNKSIZE as i32) as f32, ] \n\n }\n\n }\n\n}\n\n\n\nimpl Add for ChunkPos {\n\n type Output = Self;\n\n\n\n fn add(self, other: Self) -> Self {\n\n Self {\n\n x: self.x + other.x,\n\n y: self.y + other.y,\n\n z: self.z + other.z,\n\n }\n\n }\n", "file_path": "src/world/chunk/pos.rs", "rank": 98, "score": 28350.512397794402 }, { "content": "}\n\n\n\nimpl Sub for ChunkPos {\n\n type Output = Self;\n\n\n\n fn sub(self, other: Self) -> Self {\n\n Self {\n\n x: self.x - other.x,\n\n y: self.y - other.y,\n\n z: self.z - other.z,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\n/// Coordinate inside a chunk ranging from 0 to CHUNKSIZE and WORLDHEIGHT\n\npub struct ChunkCoord {\n\n pub x: i16,\n\n pub y: i16,\n\n pub z: i16,\n", "file_path": "src/world/chunk/pos.rs", "rank": 99, "score": 28349.231742855165 } ]
Rust
src/engine/obj.rs
PistonDevelopers/dyon_asteroids
51f944cac511e26b4d772c57e1ea245ffab280a1
use wavefront_obj::mtl::MtlSet; use wavefront_obj::obj::{self, ObjSet}; use std::sync::Arc; use current::Current; use dyon::*; use dyon::embed::{PushVariable, PopVariable}; pub type Materials = Vec<(Arc<String>, MtlSet)>; pub type ObjSets = Vec<(Arc<String>, ObjSet)>; pub fn register_obj(module: &mut Module) { module.add(Arc::new("load__material".into()), load__material, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::Text], ret: Type::Result(Box::new(Type::Text)) }); module.add(Arc::new("material".into()), material, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::Text], ret: Type::Option(Box::new(Type::F64)) }); module.add(Arc::new("materials".into()), materials, PreludeFunction { lts: vec![], tys: vec![], ret: Type::Array(Box::new(Type::Text)) }); module.add(Arc::new("load__obj".into()), load__obj, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::Text], ret: Type::Result(Box::new(Type::Text)) }); module.add(Arc::new("obj".into()), obj, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::Text], ret: Type::Option(Box::new(Type::F64)) }); module.add(Arc::new("objs".into()), objs, PreludeFunction { lts: vec![], tys: vec![], ret: Type::Array(Box::new(Type::Text)) }); module.add(Arc::new("material_library__obj".into()), material_library__obj, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::F64], ret: Type::Option(Box::new(Type::Text)) }); module.add(Arc::new("object_count__obj".into()), object_count__obj, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::F64], ret: Type::F64 }); module.add(Arc::new("objects__obj".into()), objects__obj, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::F64], ret: Type::Array(Box::new(Type::Text)) }); module.add(Arc::new("vertex_count__obj_object".into()), vertex_count__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::F64 }); module.add(Arc::new("tex_vertex_count__obj_object".into()), tex_vertex_count__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::F64 }); module.add(Arc::new("normal_count__obj_object".into()), normal_count__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::F64 }); module.add(Arc::new("geometry_count__obj_object".into()), geometry_count__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::F64 }); module.add(Arc::new("vertex__obj_object_vertex".into()), vertex__obj_object_vertex, PreludeFunction { lts: vec![Lt::Default; 3], tys: vec![Type::F64; 3], ret: Type::Vec4 }); module.add(Arc::new("tex_vertex__obj_object_tex_vertex".into()), tex_vertex__obj_object_tex_vertex, PreludeFunction { lts: vec![Lt::Default; 3], tys: vec![Type::F64; 3], ret: Type::Vec4 }); module.add(Arc::new("normal__obj_object_normal".into()), normal__obj_object_normal, PreludeFunction { lts: vec![Lt::Default; 3], tys: vec![Type::F64; 3], ret: Type::Vec4 }); module.add(Arc::new("vertices__obj_object".into()), vertices__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::Array(Box::new(Type::Vec4)) }); module.add(Arc::new("tex_vertices__obj_object".into()), tex_vertices__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::Array(Box::new(Type::Vec4)) }); module.add(Arc::new("normals__obj_object".into()), normals__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::Array(Box::new(Type::Vec4)) }); module.add(Arc::new("geometry__obj_object".into()), geometry__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::Array(Box::new(Type::object())) }); } dyon_fn!{fn load__material(file: Arc<String>) -> Result<Arc<String>, String> { use wavefront_obj::mtl::parse; use std::fs::File; use std::io::Read; use std::error::Error; let materials = unsafe { &mut *Current::<Materials>::new() }; let mut f = try!(File::open(&**file).map_err(|err| String::from(err.description()))); let mut s = String::new(); try!(f.read_to_string(&mut s).map_err(|err| String::from(err.description()))); let mtlset = try!(parse(s).map_err(|err| format!("Error when parsing `{}`:\n{}:{}", file, err.line_number, err.message))); materials.push((file.clone(), mtlset)); Ok(file) }} dyon_fn!{fn load__obj(file: Arc<String>) -> Result<Arc<String>, String> { use wavefront_obj::obj::parse; use std::fs::File; use std::io::Read; use std::error::Error; let obj_sets = unsafe { &mut *Current::<ObjSets>::new() }; let mut f = try!(File::open(&**file).map_err(|err| String::from(err.description()))); let mut s = String::new(); try!(f.read_to_string(&mut s).map_err(|err| String::from(err.description()))); let obj_set = try!(parse(s).map_err(|err| format!("Error when parsing `{}`:\n{}:{}", file, err.line_number, err.message))); obj_sets.push((file.clone(), obj_set)); Ok(file) }} dyon_fn!{fn material(file: Arc<String>) -> Option<usize> { let materials = unsafe { &*Current::<Materials>::new() }; for (i, mat) in materials.iter().enumerate() { if &mat.0 == &file { return Some(i); } } None }} dyon_fn!{fn obj(file: Arc<String>) -> Option<usize> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; for (i, obj_set) in obj_sets.iter().enumerate() { if &obj_set.0 == &file { return Some(i); } } None }} dyon_fn!{fn materials() -> Vec<Arc<String>> { let materials = unsafe { &*Current::<Materials>::new() }; materials.iter().map(|n| n.0.clone()).collect() }} dyon_fn!{fn objs() -> Vec<Arc<String>> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets.iter().map(|n| n.0.clone()).collect() }} dyon_fn!{fn material_library__obj(ind: usize) -> Option<Arc<String>> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[ind].1.material_library.as_ref().map(|n| Arc::new(n.clone())) }} dyon_fn!{fn object_count__obj(ind: usize) -> usize { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[ind].1.objects.len() }} dyon_fn!{fn objects__obj(ind: usize) -> Vec<Arc<String>> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[ind].1.objects.iter().map(|n| Arc::new(n.name.clone())).collect() }} dyon_fn!{fn vertex_count__obj_object(obj: usize, object: usize) -> usize { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].vertices.len() }} dyon_fn!{fn tex_vertex_count__obj_object(obj: usize, object: usize) -> usize { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].tex_vertices.len() }} dyon_fn!{fn normal_count__obj_object(obj: usize, object: usize) -> usize { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].normals.len() }} dyon_fn!{fn geometry_count__obj_object(obj: usize, object: usize) -> usize { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].geometry.len() }} dyon_fn!{fn vertex__obj_object_vertex (obj: usize, object: usize, vertex: usize) -> Vec4 { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; let vertex = obj_sets[obj].1.objects[object].vertices[vertex]; [vertex.x, vertex.y, vertex.z].into() }} dyon_fn!{fn tex_vertex__obj_object_tex_vertex (obj: usize, object: usize, tex_vertex: usize) -> Vec4 { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; let tex_vertex = obj_sets[obj].1.objects[object].tex_vertices[tex_vertex]; [tex_vertex.x, tex_vertex.y].into() }} dyon_fn!{fn normal__obj_object_normal (obj: usize, object: usize, normal: usize) -> Vec4 { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; let normal = obj_sets[obj].1.objects[object].normals[normal]; [normal.x, normal.y, normal.z].into() }} dyon_fn!{fn vertices__obj_object(obj: usize, object: usize) -> Vec<Vec4> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].vertices.iter() .map(|vertex| [vertex.x, vertex.y, vertex.z].into()).collect() }} dyon_fn!{fn tex_vertices__obj_object(obj: usize, object: usize) -> Vec<Vec4> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].tex_vertices.iter() .map(|tex_vertex| [tex_vertex.x, tex_vertex.y].into()).collect() }} dyon_fn!{fn normals__obj_object(obj: usize, object: usize) -> Vec<Vec4> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].normals.iter() .map(|normal| [normal.x, normal.y, normal.z].into()).collect() }} pub struct Geometry { pub material_name: Option<Arc<String>>, pub smooth_shading_group: usize, pub shapes: Vec<Shape>, } impl<'a> From<&'a obj::Geometry> for Geometry { fn from(val: &'a obj::Geometry) -> Geometry { Geometry { material_name: val.material_name.as_ref().map(|n| Arc::new(n.clone())), smooth_shading_group: val.smooth_shading_group, shapes: val.shapes.iter().map(|n| Shape(n.clone())).collect() } } } dyon_obj!{Geometry { material_name, smooth_shading_group, shapes }} dyon_fn!{fn geometry__obj_object(obj: usize, object: usize) -> Vec<Geometry> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].geometry.iter() .map(|geometry| geometry.into()).collect() }} pub struct Shape(pub obj::Shape); impl PopVariable for Shape { fn pop_var(rt: &Runtime, var: &Variable) -> Result<Self, String> { if let &Variable::Array(ref arr) = var { Ok(match arr.len() { 1 => { Shape(obj::Shape::Point(try!(rt.var(&arr[0])))) } 2 => { Shape(obj::Shape::Line(try!(rt.var(&arr[0])), try!(rt.var(&arr[1])))) } 3 => { Shape(obj::Shape::Triangle(try!(rt.var(&arr[0])), try!(rt.var(&arr[1])), try!(rt.var(&arr[2])))) } _ => return Err(rt.expected(var, "array of length 1, 2, 3")) }) } else { Err(rt.expected(var, "array")) } } } impl PushVariable for Shape { fn push_var(&self) -> Variable { match self.0 { obj::Shape::Point(ref p) => Variable::Array(Arc::new(vec![p.push_var()])), obj::Shape::Line(ref a, ref b) => Variable::Array(Arc::new(vec![ a.push_var(), b.push_var() ])), obj::Shape::Triangle(ref a, ref b, ref c) => Variable::Array(Arc::new(vec![ a.push_var(), b.push_var(), c.push_var() ])) } } }
use wavefront_obj::mtl::MtlSet; use wavefront_obj::obj::{self, ObjSet}; use std::sync::Arc; use current::Current; use dyon::*; use dyon::embed::{PushVariable, PopVariable}; pub type Materials = Vec<(Arc<String>, MtlSet)>; pub type ObjSets = Vec<(Arc<String>, ObjSet)>; pub fn register_obj(module: &mut Module) { module.add(Arc::new("load__material".into()), load__material, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::Text], ret: Type::Result(Box::new(Type::Text)) }); module.add(Arc::new("material".into()), material, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::Text], ret: Type::Option(Box::new(Type::F64)) }); module.add(Arc::new("materials".into()), materials, PreludeFunction { lts: vec![], tys: vec![], ret: Type::Array(Box::new(Type::Text)) }); module.add(Arc::new("load__obj".into()), load__obj, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::Text], ret: Type::Result(Box::new(Type::Text)) }); module.add(Arc::new("obj".into()), obj, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::Text], ret: Type::Option(Box::new(Type::F64)) }); module.add(Arc::new("objs".into()), objs, PreludeFunction { lts: vec![], tys: vec![], ret: Type::Array(Box::new(Type::Text)) }); module.add(Arc::new("material_library__obj".into()), material_library__obj, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::F64], ret: Type::Option(Box::new(Type::Text)) }); module.add(Arc::new("object_count__obj".into()), object_count__obj, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::F64], ret: Type::F64 }); module.add(Arc::new("objects__obj".into()), objects__obj, PreludeFunction { lts: vec![Lt::Default], tys: vec![Type::F64], ret: Type::Array(Box::new(Type::Text)) }); module.add(Arc::new("vertex_count__obj_object".into()), vertex_count__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::F64 }); module.add(Arc::new("tex_vertex_count__obj_object".into()), tex_vertex_count__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::F64 }); module.add(Arc::new("normal_count__obj_object".into()), normal_count__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::F64 }); module.add(Arc::new("geometry_count__obj_object".into()), geometry_count__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::F64 }); module.add(Arc::new("vertex__obj_object_vertex".into()), vertex__obj_object_vertex, PreludeFunction { lts: vec![Lt::Default; 3], tys: vec![Type::F64; 3], ret: Type::Vec4 }); module.add(Arc::new("tex_vertex__obj_object_tex_vertex".into()), tex_vertex__obj_object_tex_vertex, PreludeFunction { lts: vec![Lt::Default; 3], tys: vec![Type::F64; 3], ret: Type::Vec4 }); module.add(Arc::new("normal__obj_object_normal".into()), normal__obj_object_normal, PreludeFunction { lts: vec![Lt::Default; 3], tys: vec![Type::F64; 3], ret: Type::Vec4 }); module.add(Arc::new("vertices__obj_object".into()), vertices__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::Array(Box::new(Type::Vec4)) }); module.add(Arc::new("tex_vertices__obj_object".into()), tex_vertices__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::Array(Box::new(Type::Vec4)) }); module.add(Arc::new("normals__obj_object".into()), normals__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::Array(Box::new(Type::Vec4)) }); module.add(Arc::new("geometry__obj_object".into()), geometry__obj_object, PreludeFunction { lts: vec![Lt::Default; 2], tys: vec![Type::F64; 2], ret: Type::Array(Box::new(Type::object())) }); } dyon_fn!{fn load__material(file: Arc<String>) -> Result<Arc<String>, String> { use wavefront_obj::mtl::parse; use std::fs::File; use std::io::Read; use std::error::Error; let materials = unsafe { &mut *Current::<Materials>::new() }; let mut f = try!(File::open(&**file).map_err(|err| String::from(err.description()))); let mut s = String::new(); try!(f.read_to_string(&mut s).map_err(|err| String::from(err.description()))); let mtlset = try!(parse(s).map_err(|err| format!("Error when parsing `{}`:\n{}:{}", file, err.line_number, err.message))); materials.push((file.clone(), mtlset)); Ok(file) }} dyon_fn!{fn load__obj(file: Arc<String>) -> Result<Arc<String>, String> { use wavefront_obj::obj::parse; use std::fs::File; use std::io::Read; use std::error::Error; let obj_sets = unsafe { &mut *Current::<ObjSets>::new() }; let mut f = try!(File::open(&**file).map_err(|err| String::from(err.description()))); let mut s = String::new(); try!(f.read_to_string(&mut s).map_err(|err| String::from(err.description()))); let obj_set = try!(parse(s).map_err(|err| format!("Error when parsing `{}`:\n{}:{}", file, err.line_number, err.message))); obj_sets.push((file.clone(), obj_set)); Ok(file) }} dyon_fn!{fn material(file: Arc<String>) -> Option<usize> { let materials = unsafe { &*Current::<Materials>::new() }; for (i, mat) in materials.iter().enumerate() { if &mat.0 == &file { return Some(i); } } None }} dyon_fn!{fn obj(file: Arc<String>) -> Option<usize> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; for (i, obj_set) in obj_sets.iter().enumerate() { if &obj_set.0 == &file { return Some(i); } } None }} dyon_fn!{fn materials() -> Vec<Arc<String>> { let materials = unsafe { &*Current::<Materials>::new() }; materials.iter().map(|n| n.0.clone()).collect() }} dyon_fn!{fn objs() -> Vec<Arc<String>> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets.iter().map(|n| n.0.clone()).collect() }} dyon_fn!{fn material_library__obj(ind: usize) -> Option<Arc<String>> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[ind].1.material_library.as_ref().map(|n| Arc::new(n.clone())) }} dyon_fn!{fn object_count__obj(ind: usize) -> usize { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[ind].1.objects.len() }} dyon_fn!{fn objects__obj(ind: usize) -> Vec<Arc<String>> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[ind].1.objects.iter().map(|n| Arc::new(n.name.clone())).collect() }} dyon_fn!{fn vertex_count__obj_object(obj: usize, object: usize) -> usize { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].vertices.len() }} dyon_fn!{fn tex_vertex_count__obj_object(obj: usize, object: usize) -> usize { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].tex_vertices.len() }} dyon_fn!{fn normal_count__obj_object(obj: usize, object: usize) -> usize { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].normals.len() }} dyon_fn!{fn geometry_count__obj_object(obj: usize, object: usize) -> usize { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].geometry.len() }} dyon_fn!{fn vertex__obj_object_vertex (obj: usize, object: usize, vertex: usize) -> Vec4 { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; let vertex = obj_sets[obj].1.objects[object].vertices[vertex]; [vertex.x, vertex.y, vertex.z].into() }} dyon_fn!{fn tex_vertex__obj_object_tex_vertex (obj: usize, object: usize, tex_vertex: usize) -> Vec4 { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; let tex_vertex = obj_sets[obj].1.objects[object].tex_vertices[tex_vertex]; [tex_vertex.x, tex_vertex.y].into() }} dyon_fn!{fn normal__obj_object_normal (obj: usize, object: usize, normal: usize) -> Vec4 { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; let normal = obj_sets[obj].1.objects[object].normals[normal]; [normal.x, normal.y, normal.z].into() }} dyon_fn!{fn vertices__obj_object(obj: usize, object: usize) -> Vec<Vec4> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].vertices.iter() .map(|vertex| [vertex.x, vertex.y, vertex.z].into()).collect() }} dyon_fn!{fn tex_vertices__obj_object(obj: usize, object: usize) -> Vec<Vec4> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].tex_vertices.iter() .map(|tex_vertex| [tex_vertex.x, tex_vertex.y].into()).collect() }} dyon_fn!{fn normals__obj_object(obj: usize, object: usize) -> Vec<Vec4> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].normals.iter() .map(|normal| [normal.x, normal.y, normal.z].into()).collect() }} pub struct Geometry { pub material_name: Option<Arc<String>>, pub smooth_shading_group: usize, pub shapes: Vec<Shape>, } impl<'a> From<&'a obj::Geometry> for Geometry { fn from(val: &'a obj::Geometry) -> Geometry { Geometry { material_name: val.material_name.as_ref().map(|n| Arc::new(n.clone())), smooth_shading_group: val.smooth_shading_group, shapes: val.shapes.iter().map(|n| Shape(n.clone())).collect() } } } dyon_obj!{Geometry { material_name, smooth_shading_group, shapes }} dyon_fn!{fn geometry__obj_object(obj: usize, object: usize) -> Vec<Geometry> { let obj_sets = unsafe { &*Current::<ObjSets>::new() }; obj_sets[obj].1.objects[object].geometry.iter() .map(|geometry| geometry.into()).collect() }} pub struct Shape(pub obj::Shape); impl PopVariable for Shape { fn pop_var(rt: &Runtime, var: &Variable) -> Result<Self, String> { if let &Variable::Array(ref arr) = var {
try!(rt.var(&arr[2])))) } _ => return Err(rt.expected(var, "array of length 1, 2, 3")) }) } else { Err(rt.expected(var, "array")) } } } impl PushVariable for Shape { fn push_var(&self) -> Variable { match self.0 { obj::Shape::Point(ref p) => Variable::Array(Arc::new(vec![p.push_var()])), obj::Shape::Line(ref a, ref b) => Variable::Array(Arc::new(vec![ a.push_var(), b.push_var() ])), obj::Shape::Triangle(ref a, ref b, ref c) => Variable::Array(Arc::new(vec![ a.push_var(), b.push_var(), c.push_var() ])) } } }
Ok(match arr.len() { 1 => { Shape(obj::Shape::Point(try!(rt.var(&arr[0])))) } 2 => { Shape(obj::Shape::Line(try!(rt.var(&arr[0])), try!(rt.var(&arr[1])))) } 3 => { Shape(obj::Shape::Triangle(try!(rt.var(&arr[0])), try!(rt.var(&arr[1])),
function_block-random_span
[ { "content": "pub fn register_shader(module: &mut Module) {\n\n module.add(Arc::new(\"load_program__name_vshader_fshader\".into()),\n\n load_program__name_vshader_fshader, PreludeFunction {\n\n lts: vec![Lt::Default; 3],\n\n tys: vec![Type::Text; 3],\n\n ret: Type::Result(Box::new(Type::Text))\n\n });\n\n module.add(Arc::new(\"program\".into()),\n\n program, PreludeFunction {\n\n lts: vec![Lt::Default],\n\n tys: vec![Type::Text],\n\n ret: Type::Option(Box::new(Type::F64))\n\n });\n\n module.add(Arc::new(\"count_vertex_buffers\".into()),\n\n count_vertex_buffers, PreludeFunction {\n\n lts: vec![],\n\n tys: vec![],\n\n ret: Type::F64\n\n });\n\n module.add(Arc::new(\"count_index_buffers\".into()),\n", "file_path": "src/engine/shader.rs", "rank": 1, "score": 88758.20219242145 }, { "content": "pub fn register_sound(module: &mut Module) {\n\n module.add(Arc::new(\"load__music\".into()), load__music, PreludeFunction {\n\n lts: vec![Lt::Default],\n\n tys: vec![Type::Text],\n\n ret: Type::Result(Box::new(Type::Text))\n\n });\n\n module.add(Arc::new(\"load__sound\".into()), load__sound, PreludeFunction {\n\n lts: vec![Lt::Default],\n\n tys: vec![Type::Text],\n\n ret: Type::Result(Box::new(Type::Text))\n\n });\n\n module.add(Arc::new(\"music\".into()), music, PreludeFunction {\n\n lts: vec![Lt::Default],\n\n tys: vec![Type::Text],\n\n ret: Type::Option(Box::new(Type::F64))\n\n });\n\n module.add(Arc::new(\"sound\".into()), sound, PreludeFunction {\n\n lts: vec![Lt::Default],\n\n tys: vec![Type::Text],\n\n ret: Type::Option(Box::new(Type::F64))\n", "file_path": "src/engine/sound.rs", "rank": 2, "score": 88758.20219242145 }, { "content": "fn load_module() -> Option<Module> {\n\n use std::sync::Arc;\n\n use dyon_functions::*;\n\n use dyon_interactive::add_functions;\n\n use dyon::{Lt, Module, PreludeFunction, Type};\n\n\n\n let mut module = Module::new();\n\n add_functions::<Window>(&mut module);\n\n module.add(Arc::new(\"draw\".into()), draw, PreludeFunction {\n\n lts: vec![Lt::Default],\n\n tys: vec![Type::array()],\n\n ret: Type::Void\n\n });\n\n module.add(Arc::new(\"next_event\".into()),\n\n next_event, PreludeFunction {\n\n lts: vec![],\n\n tys: vec![],\n\n ret: Type::Bool\n\n });\n\n engine::register_obj(&mut module);\n", "file_path": "src/main.rs", "rank": 3, "score": 47419.69249237707 }, { "content": "pub fn scale(s: f32) -> [[f32; 4]; 4] {\n\n [\n\n [s, 0.0, 0.0, 0.0],\n\n [0.0, s, 0.0, 0.0],\n\n [0.0, 0.0, s, 0.0],\n\n [0.0, 0.0, 0.0, 1.0]\n\n ]\n\n}\n", "file_path": "src/engine/math.rs", "rank": 4, "score": 43382.95428528531 }, { "content": "pub fn rotate_angle_x(angle: f32) -> [[f32; 4]; 4] {\n\n [\n\n [1.0, 0.0, 0.0, 0.0],\n\n [0.0, angle.cos(), angle.sin(), 0.0],\n\n [0.0, -angle.sin(), angle.cos(), 0.0],\n\n [0.0, 0.0, 0.0, 1.0]\n\n ]\n\n}\n\n\n", "file_path": "src/engine/math.rs", "rank": 5, "score": 40446.40556074623 }, { "content": "pub fn rotate_angle(angle: f32) -> [[f32; 4]; 4] {\n\n [\n\n [angle.cos(), angle.sin(), 0.0, 0.0],\n\n [-angle.sin(), angle.cos(), 0.0, 0.0],\n\n [0.0, 0.0, 1.0, 0.0],\n\n [0.0, 0.0, 0.0, 1.0]\n\n ]\n\n}\n\n\n", "file_path": "src/engine/math.rs", "rank": 6, "score": 40446.40556074623 }, { "content": "type Window = GliumWindow<Sdl2Window>;\n\n\n", "file_path": "src/main.rs", "rank": 7, "score": 28692.15730017585 }, { "content": "fn main() {\n\n let opengl = OpenGL::V3_2;\n\n let ref mut window: Window = WindowSettings::new(\"Dyon: Asteroids!\", [512, 512])\n\n .opengl(opengl).samples(4).exit_on_esc(true).build().unwrap();\n\n\n\n let mut runtime = Runtime::new();\n\n let module = match load_module() {\n\n None => return,\n\n Some(m) => m\n\n };\n\n\n\n init_audio();\n\n\n\n let (audio, timer) = {\n\n let ref sdl = window.window.borrow().sdl_context;\n\n (sdl.audio().unwrap(), sdl.timer().unwrap())\n\n };\n\n\n\n let mut g2d = Glium2d::new(opengl, window);\n\n let mut e: Option<Event> = None;\n", "file_path": "src/main.rs", "rank": 8, "score": 27107.17836732584 }, { "content": "fn init_audio() {\n\n // Load dynamic libraries.\n\n // Ignore formats that are not built in.\n\n let _ = mix::init(\n\n mix::INIT_MP3\n\n | mix::INIT_FLAC\n\n | mix::INIT_MOD\n\n | mix::INIT_FLUIDSYNTH\n\n | mix::INIT_MODPLUG\n\n | mix::INIT_OGG\n\n );\n\n mix::open_audio(\n\n // Use cd quality to avoid noise artifacts.\n\n mix::DEFAULT_FREQUENCY * 2,\n\n mix::DEFAULT_FORMAT,\n\n mix::DEFAULT_CHANNELS,\n\n 1024\n\n ).unwrap();\n\n // Allow up to 8 sounds playing at the same time.\n\n mix::allocate_channels(8);\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 9, "score": 25957.239924529422 }, { "content": "# dyon_asteroids\n\nA demo game project for Dyon/Piston/Glium to test Dyon and research design for dyon_interactive\n\n\n\nCurrent screenshot (work in progress):\n\n\n\n![screenshot](./images/screenshot.png)\n\n\n\n### How to install and run\n\n\n\nThe demo game is in early development stage and Dyon has not yet added a feature for shipping binaries.\n\nThis means you need to compile the engine and run it from source:\n\n\n\n1. Install [Rust](https://www.rust-lang.org/)\n\n1. Fork the repo\n\n2. Open up the Terminal window and go the project directory\n\n3. Type `cargo run --release`\n", "file_path": "README.md", "rank": 26, "score": 13090.321184533404 }, { "content": "\n\ndyon_fn!{fn count_index_buffers() -> usize {\n\n let index_buffers = unsafe { &*Current::<IndexBuffers>::new() };\n\n index_buffers.len()\n\n}}\n\n\n\ndyon_fn!{fn create_vertex_buffer__size(size: usize) -> Result<usize, String> {\n\n use std::error::Error;\n\n\n\n let vertex_buffers = unsafe { &mut *Current::<VertexBuffers>::new() };\n\n let window = unsafe { &*Current::<Window>::new() };\n\n let n = vertex_buffers.len();\n\n vertex_buffers.push(try!(glium::VertexBuffer::empty(&window.context, size).map_err(|err|\n\n String::from(err.description())\n\n )));\n\n Ok(n)\n\n}}\n\n\n\ndyon_fn!{fn create_index_buffer__size(size: usize) -> Result<usize, String> {\n\n use std::error::Error;\n", "file_path": "src/engine/shader.rs", "rank": 27, "score": 12.628661800650859 }, { "content": " Type::Vec4, Type::F64, Type::F64,\n\n Type::Vec4, Type::F64],\n\n ret: Type::Void\n\n });\n\n}\n\n\n\ndyon_fn!{fn load_program__name_vshader_fshader(\n\n name: Arc<String>,\n\n vshader: Arc<String>,\n\n fshader: Arc<String>\n\n) -> Result<Arc<String>, String> {\n\n use std::error::Error;\n\n\n\n let programs = unsafe { &mut *Current::<Programs>::new() };\n\n let window = unsafe { &*Current::<Window>::new() };\n\n\n\n let program = try!(glium::Program::from_source(\n\n &window.context, &vshader, &fshader, None).map_err(|err|\n\n match err {\n\n glium::program::ProgramCreationError::CompilationError(err) => err,\n", "file_path": "src/engine/shader.rs", "rank": 28, "score": 12.463775183239452 }, { "content": " engine::register_shader(&mut module);\n\n engine::register_sound(&mut module);\n\n if error(load(\"src/main.dyon\", &mut module)) {\n\n None\n\n } else {\n\n Some(module)\n\n }\n\n}\n\n\n\nmod dyon_functions {\n\n use dyon::Runtime;\n\n use dyon_interactive::{draw_2d, NO_EVENT};\n\n use current::Current;\n\n use super::Window;\n\n\n\n pub fn draw(rt: &mut Runtime) -> Result<(), String> {\n\n use piston::input::*;\n\n use glium_graphics::Glium2d;\n\n use glium::Frame;\n\n\n", "file_path": "src/main.rs", "rank": 29, "score": 12.038731414028717 }, { "content": " _ => String::from(err.description())\n\n }\n\n ));\n\n programs.push((name.clone(), program));\n\n\n\n Ok(name)\n\n}}\n\n\n\ndyon_fn!{fn program(name: Arc<String>) -> Option<usize> {\n\n let programs = unsafe { &*Current::<Programs>::new() };\n\n for (i, n) in programs.iter().enumerate() {\n\n if &n.0 == &name { return Some(i) }\n\n }\n\n None\n\n}}\n\n\n\ndyon_fn!{fn count_vertex_buffers() -> usize {\n\n let vertex_buffers = unsafe { &*Current::<VertexBuffers>::new() };\n\n vertex_buffers.len()\n\n}}\n", "file_path": "src/engine/shader.rs", "rank": 30, "score": 11.838621640069194 }, { "content": "\n\n let index_buffers = unsafe { &mut *Current::<IndexBuffers>::new() };\n\n let window = unsafe { &*Current::<Window>::new() };\n\n let n = index_buffers.len();\n\n index_buffers.push(try!(glium::IndexBuffer::empty(\n\n &window.context, glium::index::PrimitiveType::TrianglesList, size).map_err(|err| {\n\n String::from(err.description())\n\n })));\n\n Ok(n)\n\n}}\n\n\n\ndyon_fn!{fn fill_vertex_buffer__buffer_pos_norm\n\n (buffer: usize, pos: Vec<Vec4>, norm: Vec<Vec4>) {\n\n let vertex_buffers = unsafe { &mut *Current::<VertexBuffers>::new() };\n\n\n\n let n = pos.len();\n\n let slice = vertex_buffers[buffer].slice(0..n).unwrap();\n\n slice.write({\n\n &(0..n).map(|i| Vertex { pos: pos[i].into(), norm: norm[i].into() }).collect::<Vec<_>>()\n\n });\n", "file_path": "src/engine/shader.rs", "rank": 31, "score": 11.677567122514594 }, { "content": "use std::sync::Arc;\n\nuse glium;\n\nuse current::Current;\n\nuse dyon::*;\n\n\n\nuse Window;\n\n\n\npub type Programs = Vec<(Arc<String>, glium::Program)>;\n\npub type VertexBuffers = Vec<glium::VertexBuffer<Vertex>>;\n\npub type IndexBuffers = Vec<glium::IndexBuffer<u32>>;\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Vertex {\n\n pos: [f32; 3],\n\n norm: [f32; 3],\n\n}\n\n\n\nimplement_vertex!{Vertex, pos, norm}\n\n\n", "file_path": "src/engine/shader.rs", "rank": 32, "score": 11.089170728926936 }, { "content": "}}\n\n\n\ndyon_fn!{fn fill_index_buffer__buffer_data(buffer: usize, data: Vec<u32>) {\n\n let index_buffers = unsafe { &*Current::<IndexBuffers>::new() };\n\n\n\n index_buffers[buffer].write(&data);\n\n}}\n\n\n\ndyon_fn!{fn clear_depth() {\n\n use glium::{Frame, Surface};\n\n\n\n let target = unsafe { &mut *Current::<Frame>::new() };\n\n target.clear_depth(1.0);\n\n}}\n\n\n\ndyon_fn!{fn draw__program_vbuf_ibuf_pos_angle_scale_color_anglex(\n\n program: usize,\n\n vbuf: usize,\n\n ibuf: usize,\n\n pos: Vec4,\n", "file_path": "src/engine/shader.rs", "rank": 33, "score": 10.913396892317676 }, { "content": " });\n\n}\n\n\n\ndyon_fn!{fn load__music(file: Arc<String>) -> Result<Arc<String>, String> {\n\n use std::path::Path;\n\n\n\n let music_tracks = unsafe { &mut *Current::<MusicTracks>::new() };\n\n\n\n let track = {\n\n let path = Path::new(&**file);\n\n try!(mix::Music::from_file(&path))\n\n };\n\n music_tracks.push((file.clone(), track));\n\n\n\n Ok(file)\n\n}}\n\n\n\ndyon_fn!{fn load__sound(file: Arc<String>) -> Result<Arc<String>, String> {\n\n use std::path::Path;\n\n\n", "file_path": "src/engine/sound.rs", "rank": 34, "score": 10.016615514166944 }, { "content": "\n\nuse std::sync::Arc;\n\n\n\nuse sdl2_mixer as mix;\n\nuse current::Current;\n\nuse dyon::*;\n\n\n\npub type MusicTracks = Vec<(Arc<String>, mix::Music)>;\n\npub type SoundTracks = Vec<(Arc<String>, mix::Chunk)>;\n\n\n", "file_path": "src/engine/sound.rs", "rank": 35, "score": 9.810941394677027 }, { "content": " count_index_buffers, PreludeFunction {\n\n lts: vec![],\n\n tys: vec![],\n\n ret: Type::F64\n\n });\n\n module.add(Arc::new(\"create_vertex_buffer__size\".into()),\n\n create_vertex_buffer__size, PreludeFunction {\n\n lts: vec![Lt::Default],\n\n tys: vec![Type::F64],\n\n ret: Type::Result(Box::new(Type::F64))\n\n });\n\n module.add(Arc::new(\"create_index_buffer__size\".into()),\n\n create_index_buffer__size, PreludeFunction {\n\n lts: vec![Lt::Default],\n\n tys: vec![Type::F64],\n\n ret: Type::Result(Box::new(Type::F64))\n\n });\n\n module.add(Arc::new(\"fill_vertex_buffer__buffer_pos_norm\".into()),\n\n fill_vertex_buffer__buffer_pos_norm, PreludeFunction {\n\n lts: vec![Lt::Default; 3],\n", "file_path": "src/engine/shader.rs", "rank": 36, "score": 9.554828046750366 }, { "content": " let sound_tracks = unsafe { &mut *Current::<SoundTracks>::new() };\n\n\n\n let track = {\n\n let path = Path::new(&**file);\n\n try!(mix::Chunk::from_file(&path))\n\n };\n\n sound_tracks.push((file.clone(), track));\n\n\n\n Ok(file)\n\n}}\n\n\n\ndyon_fn!{fn music(name: Arc<String>) -> Option<usize> {\n\n let music_tracks = unsafe { &*Current::<MusicTracks>::new() };\n\n\n\n for (i, &(ref track, _)) in music_tracks.iter().enumerate() {\n\n if track == &name { return Some(i); }\n\n }\n\n None\n\n}}\n\n\n", "file_path": "src/engine/sound.rs", "rank": 37, "score": 9.280450332057558 }, { "content": " let e = unsafe { &*Current::<Option<Event>>::new() };\n\n let g2d = unsafe { &mut *Current::<Glium2d>::new() };\n\n let target = unsafe { &mut *Current::<Frame>::new() };\n\n if let &Some(ref e) = e {\n\n if let Some(args) = e.render_args() {\n\n g2d.draw(target, args.viewport(), |c, g| {\n\n draw_2d(rt, c, g)\n\n })\n\n } else {\n\n Ok(())\n\n }\n\n } else {\n\n Err(NO_EVENT.into())\n\n }\n\n }\n\n\n\n pub fn next_event(rt: &mut Runtime) -> Result<(), String> {\n\n use piston::input::*;\n\n use glium::Frame;\n\n\n", "file_path": "src/main.rs", "rank": 38, "score": 9.272144872007134 }, { "content": " let mut target = window.draw();\n\n let mut materials: Materials = vec![];\n\n let mut obj_sets: ObjSets = vec![];\n\n let mut programs: Programs = vec![];\n\n let mut vertex_buffers: VertexBuffers = vec![];\n\n let mut index_buffers: IndexBuffers = vec![];\n\n let mut music_tracks: MusicTracks = vec![];\n\n let mut sound_tracks: SoundTracks = vec![];\n\n\n\n {\n\n let window_guard = CurrentGuard::new(window);\n\n let event_guard: CurrentGuard<Option<Event>> = CurrentGuard::new(&mut e);\n\n let g2d_guard = CurrentGuard::new(&mut g2d);\n\n let target_guard = CurrentGuard::new(&mut target);\n\n let materials_guard = CurrentGuard::new(&mut materials);\n\n let obj_sets_guard = CurrentGuard::new(&mut obj_sets);\n\n let programs_guard = CurrentGuard::new(&mut programs);\n\n let vertex_buffers_guard = CurrentGuard::new(&mut vertex_buffers);\n\n let index_buffers_guard = CurrentGuard::new(&mut index_buffers);\n\n let music_tracks_guard = CurrentGuard::new(&mut music_tracks);\n", "file_path": "src/main.rs", "rank": 39, "score": 9.194644419593267 }, { "content": "dyon_fn!{fn sound(name: Arc<String>) -> Option<usize> {\n\n let sound_tracks = unsafe { &*Current::<SoundTracks>::new() };\n\n\n\n for (i, &(ref track, _)) in sound_tracks.iter().enumerate() {\n\n if track == &name { return Some(i); }\n\n }\n\n None\n\n}}\n\n\n\ndyon_fn!{fn play_forever__music(ind: usize) {\n\n let music_tracks = unsafe { &*Current::<MusicTracks>::new() };\n\n\n\n let _ = music_tracks[ind].1.play(-1);\n\n}}\n\n\n\ndyon_fn!{fn play_once__music(ind: usize) {\n\n let music_tracks = unsafe { &*Current::<MusicTracks>::new() };\n\n\n\n let _ = music_tracks[ind].1.play(0);\n\n}}\n", "file_path": "src/engine/sound.rs", "rank": 40, "score": 9.139067999420217 }, { "content": " tys: vec![Type::F64, Type::Array(Box::new(Type::Vec4)),\n\n Type::Array(Box::new(Type::Vec4))],\n\n ret: Type::Void\n\n });\n\n module.add(Arc::new(\"fill_index_buffer__buffer_data\".into()),\n\n fill_index_buffer__buffer_data, PreludeFunction {\n\n lts: vec![Lt::Default; 2],\n\n tys: vec![Type::F64, Type::Array(Box::new(Type::F64))],\n\n ret: Type::Void\n\n });\n\n module.add(Arc::new(\"clear_depth\".into()),\n\n clear_depth, PreludeFunction {\n\n lts: vec![],\n\n tys: vec![],\n\n ret: Type::Void\n\n });\n\n module.add(Arc::new(\"draw__program_vbuf_ibuf_pos_angle_scale_color_anglex\".into()),\n\n draw__program_vbuf_ibuf_pos_angle_scale_color_anglex, PreludeFunction {\n\n lts: vec![Lt::Default; 8],\n\n tys: vec![Type::F64, Type::F64, Type::F64,\n", "file_path": "src/engine/shader.rs", "rank": 41, "score": 8.707885410444879 }, { "content": " });\n\n module.add(Arc::new(\"play_forever__music\".into()), play_forever__music, PreludeFunction {\n\n lts: vec![Lt::Default],\n\n tys: vec![Type::F64],\n\n ret: Type::Void\n\n });\n\n module.add(Arc::new(\"play_once__music\".into()), play_once__music, PreludeFunction {\n\n lts: vec![Lt::Default],\n\n tys: vec![Type::F64],\n\n ret: Type::Void\n\n });\n\n module.add(Arc::new(\"play_once__sound\".into()), play_once__sound, PreludeFunction {\n\n lts: vec![Lt::Default],\n\n tys: vec![Type::F64],\n\n ret: Type::Void\n\n });\n\n module.add(Arc::new(\"set__sound_volume\".into()), set__sound_volume, PreludeFunction {\n\n lts: vec![Lt::Default; 2],\n\n tys: vec![Type::F64; 2],\n\n ret: Type::Void\n", "file_path": "src/engine/sound.rs", "rank": 42, "score": 8.65178166155694 }, { "content": "extern crate glium_graphics;\n\n#[macro_use]\n\nextern crate glium;\n\nextern crate piston;\n\n#[macro_use]\n\nextern crate dyon;\n\nextern crate current;\n\nextern crate dyon_interactive;\n\nextern crate sdl2_window;\n\nextern crate sdl2;\n\nextern crate sdl2_mixer;\n\nextern crate wavefront_obj;\n\nextern crate vecmath;\n\n\n\nuse sdl2_window::Sdl2Window;\n\nuse sdl2_mixer as mix;\n\nuse glium_graphics::{Glium2d, GliumWindow, OpenGL};\n\nuse piston::window::WindowSettings;\n\nuse piston::input::Event;\n\nuse dyon::{error, load, Module, Runtime};\n\nuse current::CurrentGuard;\n\n\n\nuse engine::{IndexBuffers, Materials, MusicTracks, ObjSets, Programs,\n\n SoundTracks, VertexBuffers};\n\n\n\nmod engine;\n\n\n", "file_path": "src/main.rs", "rank": 43, "score": 8.006923751379471 }, { "content": "\n\ndyon_fn!{fn play_once__sound(ind: usize) {\n\n let sound_tracks = unsafe { &*Current::<SoundTracks>::new() };\n\n\n\n let _ = mix::Channel::all().play(&sound_tracks[ind].1, 0);\n\n}}\n\n\n\ndyon_fn!{fn set__sound_volume(ind: usize, volume: f64) {\n\n let sound_tracks = unsafe { &mut *Current::<SoundTracks>::new() };\n\n\n\n sound_tracks[ind].1.set_volume(volume as isize);\n\n}}\n", "file_path": "src/engine/sound.rs", "rank": 44, "score": 7.883979934157158 }, { "content": "pub use self::obj::*;\n\npub use self::shader::*;\n\npub use self::sound::*;\n\n\n\nmod obj;\n\nmod shader;\n\npub mod math;\n\nmod sound;\n", "file_path": "src/engine/mod.rs", "rank": 45, "score": 7.570566194140936 }, { "content": " let sz = -0.01;\n\n [\n\n [mat[0][0], mat[1][0], 0.0, 0.0],\n\n [mat[0][1], mat[1][1], 0.0, 0.0],\n\n [0.0, 0.0, sz, 0.0],\n\n [mat[0][2], mat[1][2], 0.0, 1.0]\n\n ]\n\n } else {\n\n panic!(\"No render event\");\n\n };\n\n let programs = unsafe { &*Current::<Programs>::new() };\n\n let vertex_buffers = unsafe { &*Current::<VertexBuffers>::new() };\n\n let index_buffers = unsafe { &*Current::<IndexBuffers>::new() };\n\n let target = unsafe { &mut *Current::<Frame>::new() };\n\n let mvp = math::mul(mat, math::mul(\n\n pos_transform,\n\n math::mul(\n\n math::mul(\n\n math::rotate_angle(angle),\n\n math::scale(scale)\n", "file_path": "src/engine/shader.rs", "rank": 46, "score": 7.352314559288416 }, { "content": " angle: f32,\n\n scale: f32,\n\n color: Vec4,\n\n angle_x: f32\n\n) {\n\n use glium::{Depth, DepthTest, Frame, Surface};\n\n use glium::draw_parameters::{DepthClamp, DrawParameters};\n\n use piston::input::{Event, RenderEvent};\n\n use super::math;\n\n\n\n let pos: [f32; 3] = pos.into();\n\n let pos_transform = [\n\n [1.0, 0.0, 0.0, 0.0],\n\n [0.0, 1.0, 0.0, 0.0],\n\n [0.0, 0.0, 1.0, 0.0],\n\n [pos[0], pos[1], pos[2], 1.0]\n\n ];\n\n let e = unsafe { &*Current::<Option<Event>>::new() };\n\n let mat: [[f32; 4]; 4] = if let Some(args) = e.as_ref().unwrap().render_args() {\n\n let mat: [[f32; 3]; 2] = args.viewport().abs_transform();\n", "file_path": "src/engine/shader.rs", "rank": 47, "score": 5.760709127161312 }, { "content": " let window = unsafe { &mut *Current::<Window>::new() };\n\n let e = unsafe { &mut *Current::<Option<Event>>::new() };\n\n let target = unsafe { &mut *Current::<Frame>::new() };\n\n if let Some(new_e) = window.next() {\n\n if new_e.after_render_args().is_some() {\n\n target.set_finish().unwrap();\n\n *target = window.draw();\n\n }\n\n *e = Some(new_e);\n\n rt.push(true);\n\n } else {\n\n *e = None;\n\n rt.push(false);\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 48, "score": 5.439155417924134 }, { "content": " let sound_tracks_guard = CurrentGuard::new(&mut sound_tracks);\n\n if error(runtime.run(&module)) {\n\n return;\n\n }\n\n drop(sound_tracks_guard);\n\n drop(music_tracks_guard);\n\n drop(index_buffers_guard);\n\n drop(vertex_buffers_guard);\n\n drop(programs_guard);\n\n drop(obj_sets_guard);\n\n drop(materials_guard);\n\n drop(target_guard);\n\n drop(g2d_guard);\n\n drop(event_guard);\n\n drop(window_guard);\n\n }\n\n\n\n target.finish().unwrap();\n\n\n\n drop(timer);\n\n drop(audio);\n\n drop(window);\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 49, "score": 4.992437198832356 }, { "content": "pub use vecmath::col_mat4_mul as mul;\n\n\n", "file_path": "src/engine/math.rs", "rank": 50, "score": 4.444747201629624 }, { "content": " ),\n\n math::rotate_angle_x(angle_x)\n\n )\n\n ));\n\n /*\n\n let mvp: [[f32; 4]; 4] = [\n\n [1.0, 0.0, 0.0, 0.0],\n\n [0.0, 1.0, 0.0, 0.0],\n\n [0.0, 0.0, 1.0, 0.0],\n\n [0.0, 0.0, 0.0, 1.0]\n\n ];\n\n */\n\n target.draw(&vertex_buffers[vbuf], &index_buffers[ibuf], &programs[program].1,\n\n &uniform!{mvp: mvp, color: color.0}, &DrawParameters {\n\n depth: Depth {\n\n test: DepthTest::IfLess,\n\n write: true,\n\n range: (0.0, 1.0),\n\n clamp: DepthClamp::NoClamp,\n\n },\n\n ..Default::default()\n\n }).unwrap();\n\n}}\n", "file_path": "src/engine/shader.rs", "rank": 51, "score": 1.2339139381320834 } ]
Rust
src/text.rs
zyxw59/conlang_fmt
2c74dd805de38fff5b28c644517c3347ba93d3a5
use std::io::{Result as IoResult, Write}; use crate::blocks::{BlockCommon, BlockType, Parameter, UpdateParam}; use crate::document::Document; use crate::errors::Result as EResult; use crate::html; type OResult<T> = EResult<Option<T>>; pub trait Referenceable { fn reference_text(&self) -> Text; } #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct Text(pub Vec<Inline>); pub const EMPTY_TEXT: &'static Text = &Text(Vec::new()); impl Text { pub fn new() -> Text { Default::default() } pub fn push(&mut self, element: impl Into<Inline>) { self.0.push(element.into()); } pub fn extend(&mut self, other: &Text) { self.0.extend_from_slice(&other.0) } pub fn with_class(self, class: impl Into<String>) -> Text { Text(vec![Inline { kind: InlineType::Span(self), common: InlineCommon { class: class.into(), }, }]) } pub fn write_inline(&self, w: &mut dyn Write, document: &Document) -> IoResult<()> { for t in &self.0 { t.kind.write(w, &t.common, document)?; } Ok(()) } pub fn write_inline_plain(&self, w: &mut dyn Write, document: &Document) -> IoResult<()> { for t in &self.0 { t.kind.write_plain(w, document)?; } Ok(()) } pub fn starts_with(&self, c: char) -> bool { match self.0.first() { Some(inline) => inline.kind.starts_with(c), None => false, } } pub fn ends_with(&self, c: char) -> bool { match self.0.last() { Some(inline) => inline.kind.ends_with(c), None => false, } } } impl BlockType for Text { fn write(&self, w: &mut dyn Write, _common: &BlockCommon, document: &Document) -> IoResult<()> { write!(w, "<p>")?; self.write_inline(w, document)?; writeln!(w, "</p>\n")?; Ok(()) } } impl<T> From<T> for Text where T: Into<String>, { fn from(s: T) -> Text { let mut t = Text::new(); t.push(s.into()); t } } #[derive(Clone, Debug, Eq, PartialEq)] pub struct Inline { pub kind: InlineType, pub common: InlineCommon, } impl Inline {} impl<T> From<(InlineType, T)> for Inline where T: Into<InlineCommon>, { fn from((kind, common): (InlineType, T)) -> Inline { Inline { kind, common: common.into(), } } } impl From<String> for Inline { fn from(s: String) -> Inline { Inline::from((InlineType::Text(s), String::new())) } } #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct InlineCommon { pub class: String, } impl InlineCommon { pub fn new() -> InlineCommon { Default::default() } } impl UpdateParam for InlineCommon { fn update_param(&mut self, param: Parameter) -> OResult<Parameter> { Ok(match param.0.as_ref().map(|n| n.as_ref()) { Some("class") | None => { self.class = param.1; None } _ => Some(param), }) } } impl<T> From<T> for InlineCommon where T: Into<String>, { fn from(class: T) -> InlineCommon { InlineCommon { class: class.into(), } } } #[derive(Clone, Debug, Eq, PartialEq)] pub enum InlineType { Emphasis(Text), Strong(Text), Italics(Text), Bold(Text), SmallCaps(Text), Span(Text), Replace(String), Reference(String), Link(Link), Text(String), } impl InlineType { pub fn link() -> InlineType { InlineType::Link(Default::default()) } pub fn reference() -> InlineType { InlineType::Reference(Default::default()) } fn write(&self, w: &mut dyn Write, common: &InlineCommon, document: &Document) -> IoResult<()> { if let Some(tag) = self.tag() { write!(w, "<{} ", tag)?; write!( w, "class=\"{} {}\"", html::Encoder(self.class()), html::Encoder(&common.class) )?; if let InlineType::Link(link) = self { write!(w, " href=\"{}\"", html::Encoder(&link.url))?; } else if let InlineType::Reference(id) = self { write!(w, " href=\"#{}\"", html::Encoder(id))?; } write!(w, ">")?; } match self { InlineType::Emphasis(t) | InlineType::Strong(t) | InlineType::Italics(t) | InlineType::Bold(t) | InlineType::SmallCaps(t) | InlineType::Span(t) | InlineType::Link(Link { title: t, .. }) => t.write_inline(w, &document)?, InlineType::Text(s) => write!(w, "{}", html::Encoder(s))?, InlineType::Reference(id) => { if let Some(block) = document.get_id(id) { if let Some(referenceable) = block.kind.as_referenceable() { referenceable.reference_text().write_inline(w, document)?; } else { write!( w, "<span class=\"unreferenceable-block\">#{}</span>", html::Encoder(id) )?; } } else { write!( w, "<span class=\"undefined-reference\">#{}</span>", html::Encoder(id) )?; } } InlineType::Replace(key) => match document.get_replacement(key) { Some(t) => t.write_inline(w, &document)?, None => { write!( w, "<span class=\"undefined-replace\">:{}:</span>", html::Encoder(key) )?; } }, } if let Some(tag) = self.tag() { write!(w, "</{}>", tag)?; } Ok(()) } fn write_plain(&self, w: &mut dyn Write, document: &Document) -> IoResult<()> { match self { InlineType::Emphasis(t) | InlineType::Strong(t) | InlineType::Italics(t) | InlineType::Bold(t) | InlineType::SmallCaps(t) | InlineType::Span(t) | InlineType::Link(Link { title: t, .. }) => t.write_inline_plain(w, &document)?, InlineType::Text(s) => write!(w, "{}", html::Encoder(s))?, InlineType::Reference(id) => { if let Some(block) = document.get_id(id) { if let Some(referenceable) = block.kind.as_referenceable() { referenceable .reference_text() .write_inline_plain(w, document)?; } else { write!(w, "#{}", html::Encoder(id))?; } } else { write!(w, "#{}", html::Encoder(id))?; } } InlineType::Replace(key) => match document.get_replacement(key) { Some(t) => t.write_inline_plain(w, &document)?, None => write!(w, ":{}:", html::Encoder(key))?, }, } Ok(()) } fn tag(&self) -> Option<&'static str> { use self::InlineType::*; match self { Emphasis(_) => Some("em"), Strong(_) => Some("strong"), Italics(_) => Some("i"), Bold(_) => Some("b"), Link(_) | Reference(_) => Some("a"), Text(_) => None, _ => Some("span"), } } fn class(&self) -> &'static str { use self::InlineType::*; match self { SmallCaps(_) => "small-caps", Reference(_) => "reference", _ => "", } } fn starts_with(&self, c: char) -> bool { match self { InlineType::Emphasis(t) | InlineType::Strong(t) | InlineType::Italics(t) | InlineType::Bold(t) | InlineType::SmallCaps(t) | InlineType::Span(t) | InlineType::Link(Link { title: t, .. }) => t.starts_with(c), InlineType::Text(s) => s.starts_with(c), _ => false, } } fn ends_with(&self, c: char) -> bool { match self { InlineType::Emphasis(t) | InlineType::Strong(t) | InlineType::Italics(t) | InlineType::Bold(t) | InlineType::SmallCaps(t) | InlineType::Span(t) | InlineType::Link(Link { title: t, .. }) => t.ends_with(c), InlineType::Text(s) => s.ends_with(c), _ => false, } } } impl UpdateParam for InlineType { fn update_param(&mut self, param: Parameter) -> OResult<Parameter> { Ok(match *self { InlineType::Reference(ref mut s) => match param.0.as_ref().map(|p| p.as_ref()) { Some("ref") | None => { *s = param.1; None } _ => Some(param), }, InlineType::Link(ref mut link) => match param.0.as_ref().map(|p| p.as_ref()) { Some("link") | None => { link.url = param.1; None } Some("title") => { link.title = param.1.into(); None } _ => Some(param), }, _ => Some(param), }) } } #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct Link { pub url: String, pub title: Text, }
use std::io::{Result as IoResult, Write}; use crate::blocks::{BlockCommon, BlockType, Parameter, UpdateParam}; use crate::document::Document; use crate::errors::Result as EResult; use crate::html; type OResult<T> = EResult<Option<T>>; pub trait Referenceable { fn reference_text(&self) -> Text; } #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct Text(pub Vec<Inline>); pub const EMPTY_TEXT: &'static Text = &Text(Vec::new()); impl Text { pub fn new() -> Text { Default::default() } pub fn push(&mut self, element: impl Into<Inline>) { self.0.push(element.into()); } pub fn extend(&mut self, other: &Text) { self.0.extend_from_slice(&other.0) } pub fn with_class(self, class: impl Into<String>) -> Text { Text(vec![Inline { kind: InlineType::Span(self), common: InlineCommon { class: class.into(), }, }]) } pub fn write_inline(&self, w: &mut dyn Write, document: &Document) -> IoResult<()> { for t in &self.0 { t.kind.write(w, &t.common, document)?; } Ok(()) } pub fn w
} pub fn starts_with(&self, c: char) -> bool { match self.0.first() { Some(inline) => inline.kind.starts_with(c), None => false, } } pub fn ends_with(&self, c: char) -> bool { match self.0.last() { Some(inline) => inline.kind.ends_with(c), None => false, } } } impl BlockType for Text { fn write(&self, w: &mut dyn Write, _common: &BlockCommon, document: &Document) -> IoResult<()> { write!(w, "<p>")?; self.write_inline(w, document)?; writeln!(w, "</p>\n")?; Ok(()) } } impl<T> From<T> for Text where T: Into<String>, { fn from(s: T) -> Text { let mut t = Text::new(); t.push(s.into()); t } } #[derive(Clone, Debug, Eq, PartialEq)] pub struct Inline { pub kind: InlineType, pub common: InlineCommon, } impl Inline {} impl<T> From<(InlineType, T)> for Inline where T: Into<InlineCommon>, { fn from((kind, common): (InlineType, T)) -> Inline { Inline { kind, common: common.into(), } } } impl From<String> for Inline { fn from(s: String) -> Inline { Inline::from((InlineType::Text(s), String::new())) } } #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct InlineCommon { pub class: String, } impl InlineCommon { pub fn new() -> InlineCommon { Default::default() } } impl UpdateParam for InlineCommon { fn update_param(&mut self, param: Parameter) -> OResult<Parameter> { Ok(match param.0.as_ref().map(|n| n.as_ref()) { Some("class") | None => { self.class = param.1; None } _ => Some(param), }) } } impl<T> From<T> for InlineCommon where T: Into<String>, { fn from(class: T) -> InlineCommon { InlineCommon { class: class.into(), } } } #[derive(Clone, Debug, Eq, PartialEq)] pub enum InlineType { Emphasis(Text), Strong(Text), Italics(Text), Bold(Text), SmallCaps(Text), Span(Text), Replace(String), Reference(String), Link(Link), Text(String), } impl InlineType { pub fn link() -> InlineType { InlineType::Link(Default::default()) } pub fn reference() -> InlineType { InlineType::Reference(Default::default()) } fn write(&self, w: &mut dyn Write, common: &InlineCommon, document: &Document) -> IoResult<()> { if let Some(tag) = self.tag() { write!(w, "<{} ", tag)?; write!( w, "class=\"{} {}\"", html::Encoder(self.class()), html::Encoder(&common.class) )?; if let InlineType::Link(link) = self { write!(w, " href=\"{}\"", html::Encoder(&link.url))?; } else if let InlineType::Reference(id) = self { write!(w, " href=\"#{}\"", html::Encoder(id))?; } write!(w, ">")?; } match self { InlineType::Emphasis(t) | InlineType::Strong(t) | InlineType::Italics(t) | InlineType::Bold(t) | InlineType::SmallCaps(t) | InlineType::Span(t) | InlineType::Link(Link { title: t, .. }) => t.write_inline(w, &document)?, InlineType::Text(s) => write!(w, "{}", html::Encoder(s))?, InlineType::Reference(id) => { if let Some(block) = document.get_id(id) { if let Some(referenceable) = block.kind.as_referenceable() { referenceable.reference_text().write_inline(w, document)?; } else { write!( w, "<span class=\"unreferenceable-block\">#{}</span>", html::Encoder(id) )?; } } else { write!( w, "<span class=\"undefined-reference\">#{}</span>", html::Encoder(id) )?; } } InlineType::Replace(key) => match document.get_replacement(key) { Some(t) => t.write_inline(w, &document)?, None => { write!( w, "<span class=\"undefined-replace\">:{}:</span>", html::Encoder(key) )?; } }, } if let Some(tag) = self.tag() { write!(w, "</{}>", tag)?; } Ok(()) } fn write_plain(&self, w: &mut dyn Write, document: &Document) -> IoResult<()> { match self { InlineType::Emphasis(t) | InlineType::Strong(t) | InlineType::Italics(t) | InlineType::Bold(t) | InlineType::SmallCaps(t) | InlineType::Span(t) | InlineType::Link(Link { title: t, .. }) => t.write_inline_plain(w, &document)?, InlineType::Text(s) => write!(w, "{}", html::Encoder(s))?, InlineType::Reference(id) => { if let Some(block) = document.get_id(id) { if let Some(referenceable) = block.kind.as_referenceable() { referenceable .reference_text() .write_inline_plain(w, document)?; } else { write!(w, "#{}", html::Encoder(id))?; } } else { write!(w, "#{}", html::Encoder(id))?; } } InlineType::Replace(key) => match document.get_replacement(key) { Some(t) => t.write_inline_plain(w, &document)?, None => write!(w, ":{}:", html::Encoder(key))?, }, } Ok(()) } fn tag(&self) -> Option<&'static str> { use self::InlineType::*; match self { Emphasis(_) => Some("em"), Strong(_) => Some("strong"), Italics(_) => Some("i"), Bold(_) => Some("b"), Link(_) | Reference(_) => Some("a"), Text(_) => None, _ => Some("span"), } } fn class(&self) -> &'static str { use self::InlineType::*; match self { SmallCaps(_) => "small-caps", Reference(_) => "reference", _ => "", } } fn starts_with(&self, c: char) -> bool { match self { InlineType::Emphasis(t) | InlineType::Strong(t) | InlineType::Italics(t) | InlineType::Bold(t) | InlineType::SmallCaps(t) | InlineType::Span(t) | InlineType::Link(Link { title: t, .. }) => t.starts_with(c), InlineType::Text(s) => s.starts_with(c), _ => false, } } fn ends_with(&self, c: char) -> bool { match self { InlineType::Emphasis(t) | InlineType::Strong(t) | InlineType::Italics(t) | InlineType::Bold(t) | InlineType::SmallCaps(t) | InlineType::Span(t) | InlineType::Link(Link { title: t, .. }) => t.ends_with(c), InlineType::Text(s) => s.ends_with(c), _ => false, } } } impl UpdateParam for InlineType { fn update_param(&mut self, param: Parameter) -> OResult<Parameter> { Ok(match *self { InlineType::Reference(ref mut s) => match param.0.as_ref().map(|p| p.as_ref()) { Some("ref") | None => { *s = param.1; None } _ => Some(param), }, InlineType::Link(ref mut link) => match param.0.as_ref().map(|p| p.as_ref()) { Some("link") | None => { link.url = param.1; None } Some("title") => { link.title = param.1.into(); None } _ => Some(param), }, _ => Some(param), }) } } #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct Link { pub url: String, pub title: Text, }
rite_inline_plain(&self, w: &mut dyn Write, document: &Document) -> IoResult<()> { for t in &self.0 { t.kind.write_plain(w, document)?; } Ok(())
function_block-random_span
[ { "content": "pub trait BlockType: Debug {\n\n /// Outputs the block.\n\n fn write(&self, w: &mut dyn Write, common: &BlockCommon, document: &Document) -> IoResult<()>;\n\n\n\n /// Updates with the given parameter. If the parameter was not updated, returns the parameter.\n\n fn update_param(&mut self, param: Parameter) -> OResult<Parameter> {\n\n Ok(Some(param))\n\n }\n\n\n\n /// Returns a `&dyn Referenceable` if the block can be referenced, otherwise returns `None`.\n\n fn as_referenceable(&self) -> Option<&dyn Referenceable> {\n\n None\n\n }\n\n\n\n /// Returns a `&dyn HeadingLike` if the block is a heading, otherwise returns `None`.\n\n fn as_heading(&self) -> Option<&dyn HeadingLike> {\n\n None\n\n }\n\n\n\n /// Returns a `&mut dyn HeadingLike` if the block is a heading, otherwise returns `None`.\n", "file_path": "src/blocks.rs", "rank": 1, "score": 121005.27068411045 }, { "content": "/// Writes a section number recursively.\n\nfn write_section_number(w: &mut dyn Write, number: &[usize]) -> IoResult<()> {\n\n if let Some((last, rest)) = number.split_last() {\n\n write!(w, \"<span class=\\\"secnum\\\">\")?;\n\n write_section_number(w, rest)?;\n\n write!(w, \"{}.</span>\", last)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/blocks/heading.rs", "rank": 2, "score": 109010.94589936384 }, { "content": "pub trait HeadingLike: Debug {\n\n fn numbered(&self) -> bool;\n\n fn toc(&self) -> bool;\n\n fn level(&self) -> usize;\n\n fn children(&self) -> &SectionList;\n\n fn mut_children(&mut self) -> &mut SectionList;\n\n fn number(&self) -> &[usize];\n\n fn push_number(&mut self, value: usize);\n\n fn title(&self) -> &Text;\n\n\n\n #[cfg(test)]\n\n fn eq(&self, other: &dyn HeadingLike) -> bool {\n\n self.numbered() == other.numbered()\n\n && self.toc() == other.toc()\n\n && self.level() == other.level()\n\n && self.children() == other.children()\n\n && self.number() == other.number()\n\n && self.title() == other.title()\n\n }\n\n}\n", "file_path": "src/blocks/heading.rs", "rank": 3, "score": 96361.16481941071 }, { "content": "pub trait UpdateParam {\n\n /// Updates with the given parameter. If the parameter was not updated, returns the parameter.\n\n fn update_param(&mut self, param: Parameter) -> OResult<Parameter>;\n\n}\n\n\n\nimpl UpdateParam for String {\n\n fn update_param(&mut self, param: Parameter) -> OResult<Parameter> {\n\n Ok(match param.0.as_ref().map(|n| n.as_ref()) {\n\n Some(\"class\") | None => {\n\n *self = param.1;\n\n None\n\n }\n\n _ => Some(param),\n\n })\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Block {\n\n pub kind: Box<dyn BlockType>,\n", "file_path": "src/blocks.rs", "rank": 4, "score": 88044.29794394226 }, { "content": "/// Returns a section number as a `Text`.\n\nfn section_number_text(number: &[usize]) -> Text {\n\n if let Some((last, rest)) = number.split_last() {\n\n let mut text = section_number_text(rest).with_class(\"secnum\");\n\n text.push(Inline::from(format!(\"{}.\", last)));\n\n text\n\n } else {\n\n Text::new()\n\n }\n\n}\n\n\n", "file_path": "src/blocks/heading.rs", "rank": 5, "score": 65044.63804829998 }, { "content": "fn print_errors(e: &dyn Fail) {\n\n for c in e.iter_chain() {\n\n eprintln!(\"{}\", c);\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 6, "score": 63170.94542949865 }, { "content": "fn get_entity(c: char) -> Option<&'static str> {\n\n match c {\n\n '\"' => Some(\"quot\"),\n\n '&' => Some(\"amp\"),\n\n '\\'' => Some(\"#x27\"),\n\n '<' => Some(\"lt\"),\n\n '>' => Some(\"gt\"),\n\n _ => None,\n\n }\n\n}\n", "file_path": "src/html.rs", "rank": 7, "score": 54345.42920487463 }, { "content": "fn main_result() -> EResult<()> {\n\n // for now, just read from stdin\n\n let stdin = io::stdin();\n\n let mut input = input::Input::new(stdin.lock());\n\n let mut document: document::Document = Default::default();\n\n loop {\n\n let mut block = input.next_block()?;\n\n if let Some(block) = block.parse()? {\n\n document.add_block(block)?;\n\n } else {\n\n break;\n\n }\n\n }\n\n let stdout = io::stdout();\n\n let mut stdout = stdout.lock();\n\n document.write(&mut stdout)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 9, "score": 49375.04993621932 }, { "content": "fn main() {\n\n if let Err(e) = main_result() {\n\n print_errors(&e);\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 10, "score": 43216.89631257203 }, { "content": "type OResult<T> = EResult<Option<T>>;\n\n\n\n/// A slice of characters representing a block\n\n#[derive(Debug)]\n\npub struct Block<'a> {\n\n slice: &'a [char],\n\n start: Option<usize>,\n\n idx: usize,\n\n}\n\n\n\n/// Update each object `$x` in order with the parameters returned by `$self.parameters()?`.\n\n///\n\n/// Uses `$self` to raise appropriate errors.\n\n///\n\n/// Panics if no argument in `$x` handles all cases where the parameter name is `None`.\n\nmacro_rules! update_multiple {\n\n ( $self:ident, $( $x:expr ),* ) => {\n\n {\n\n for param in $self.parameters()? {\n\n // `update_one!` does the heavy lifting.\n", "file_path": "src/parse.rs", "rank": 11, "score": 35786.87829760132 }, { "content": "type OResult<T> = EResult<Option<T>>;\n\n\n\n#[derive(Debug, Default, Eq, PartialEq)]\n\npub struct Parameter(pub Option<String>, pub String);\n\n\n", "file_path": "src/blocks.rs", "rank": 12, "score": 35786.87829760132 }, { "content": "type OResult<T> = EResult<Option<T>>;\n\n\n\n#[derive(Debug, Default, Eq, PartialEq)]\n\npub struct Replacements {\n\n pub replacements: HashMap<String, Text>,\n\n}\n\n\n\nimpl Replacements {\n\n pub fn new() -> Replacements {\n\n Default::default()\n\n }\n\n\n\n /// Inserts the given key/value pair, returning an error if the key is already present.\n\n pub fn insert(&mut self, key: String, value: Text) -> EResult<()> {\n\n if self.replacements.contains_key(&key) {\n\n Err(ErrorKind::Replace(key).into())\n\n } else {\n\n self.replacements.insert(key, value);\n\n Ok(())\n\n }\n", "file_path": "src/blocks/replacements.rs", "rank": 13, "score": 34941.68807329371 }, { "content": "type OResult<T> = EResult<Option<T>>;\n\n\n", "file_path": "src/blocks/heading.rs", "rank": 14, "score": 34941.68807329371 }, { "content": "type OResult<T> = EResult<Option<T>>;\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub struct Table {\n\n pub title: Text,\n\n pub numbered: bool,\n\n pub number: usize,\n\n pub rows: Vec<Row>,\n\n pub columns: Vec<Column>,\n\n}\n\n\n\nimpl Table {\n\n pub fn new() -> Table {\n\n Default::default()\n\n }\n\n}\n\n\n\nimpl BlockType for Table {\n\n fn write(\n\n &self,\n", "file_path": "src/blocks/table.rs", "rank": 15, "score": 34941.68807329371 }, { "content": "type OResult<T> = EResult<Option<T>>;\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub struct Gloss {\n\n pub title: Text,\n\n pub numbered: bool,\n\n pub number: usize,\n\n pub preamble: Vec<Text>,\n\n pub gloss: Vec<GlossLine>,\n\n pub postamble: Vec<Text>,\n\n}\n\n\n\nimpl Gloss {\n\n pub fn new() -> Gloss {\n\n Default::default()\n\n }\n\n}\n\n\n\nimpl BlockType for Gloss {\n\n fn write(&self, w: &mut dyn Write, common: &BlockCommon, document: &Document) -> IoResult<()> {\n", "file_path": "src/blocks/gloss.rs", "rank": 16, "score": 34941.68807329371 }, { "content": "type OResult<T> = EResult<Option<T>>;\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub struct Contents {\n\n pub title: Text,\n\n pub max_level: usize,\n\n}\n\n\n\nimpl Contents {\n\n pub fn new() -> Contents {\n\n Default::default()\n\n }\n\n\n\n fn write_sublist(\n\n &self,\n\n w: &mut dyn Write,\n\n level: usize,\n\n list: &[usize],\n\n document: &Document,\n\n ) -> IoResult<()> {\n", "file_path": "src/blocks/contents.rs", "rank": 17, "score": 34941.68807329371 }, { "content": "type OResult<T> = EResult<Option<T>>;\n\n\n\n#[derive(Debug, Default, Eq, PartialEq)]\n\npub struct List {\n\n pub items: Vec<ListItem>,\n\n pub ordered: bool,\n\n}\n\n\n\nimpl List {\n\n pub fn new() -> List {\n\n Default::default()\n\n }\n\n\n\n fn tag(ordered: bool) -> &'static str {\n\n if ordered {\n\n \"ol\"\n\n } else {\n\n \"ul\"\n\n }\n\n }\n", "file_path": "src/blocks/list.rs", "rank": 18, "score": 34941.68807329371 }, { "content": " }\n\n }\n\n if heading.numbered() {\n\n heading.push_number(self.get_section_list(curr).last_child_number + 1);\n\n if common.id.is_empty() {\n\n common.id = format!(\"sec-{}\", heading.number().iter().format(\"-\"));\n\n }\n\n }\n\n self.get_mut_section_list(curr)\n\n .push(idx, heading.numbered());\n\n Ok(idx)\n\n }\n\n\n\n /// Writes the blocks as HTML.\n\n pub fn write(&self, w: &mut impl Write) -> EResult<()> {\n\n self.write_head(w).context(ErrorKind::WriteIoHead)?;\n\n for Block { kind, common } in &self.blocks {\n\n kind.write(w, common, self)\n\n .context(ErrorKind::WriteIo(common.start_line))?;\n\n }\n", "file_path": "src/document.rs", "rank": 19, "score": 27385.31875834192 }, { "content": "use std::collections::hash_map::Entry;\n\nuse std::collections::HashMap;\n\nuse std::default::Default;\n\nuse std::fmt::Debug;\n\nuse std::fs::File;\n\nuse std::io::{BufReader, Result as IoResult, Write};\n\nuse std::path::Path;\n\n\n\nuse failure::ResultExt;\n\nuse itertools::Itertools;\n\n\n\nuse crate::blocks::{\n\n control::DocumentControl,\n\n heading::{FillerHeading, HeadingLike, SectionList},\n\n replacements::Replacements,\n\n Block, BlockCommon,\n\n};\n\nuse crate::errors::{ErrorKind, Result as EResult};\n\nuse crate::input::Input;\n\nuse crate::text::Text;\n", "file_path": "src/document.rs", "rank": 20, "score": 27383.463097399213 }, { "content": " self.author.get_or_insert(text.clone());\n\n }\n\n DocumentControl::Description(text) => {\n\n self.description.get_or_insert(text.clone());\n\n }\n\n DocumentControl::Stylesheet(text) => {\n\n self.stylesheets.push(text.clone());\n\n }\n\n DocumentControl::Lang(text) => {\n\n self.lang.get_or_insert(text.clone());\n\n }\n\n DocumentControl::Import(text) => {\n\n let mut filename = Vec::new();\n\n text.write_inline_plain(&mut filename, self)\n\n .expect(\"Writing to `Vec<u8>` shouldn't fail\");\n\n let filename =\n\n String::from_utf8(filename).expect(\"`Text` should always write valid utf-8\");\n\n let file = Path::new(filename.trim())\n\n .canonicalize()\n\n .and_then(File::open)\n", "file_path": "src/document.rs", "rank": 21, "score": 27382.148432211234 }, { "content": " }\n\n if block.common.id.is_empty() {\n\n block.common.id = format!(\"__no-id-{}\", self.noid_index);\n\n self.noid_index += 1;\n\n }\n\n let id = block.common.id.clone();\n\n match self.ids.entry(id) {\n\n Entry::Occupied(e) => return Err(ErrorKind::Id(e.key().clone()).into()),\n\n Entry::Vacant(e) => e.insert(idx),\n\n };\n\n self.blocks.push(block);\n\n Ok(())\n\n }\n\n\n\n fn control(&mut self, control: &DocumentControl) -> EResult<()> {\n\n match control {\n\n DocumentControl::Title(text) => {\n\n self.title.get_or_insert(text.clone());\n\n }\n\n DocumentControl::Author(text) => {\n", "file_path": "src/document.rs", "rank": 22, "score": 27381.73308933016 }, { "content": " .context(ErrorKind::FileNotFound(filename))?;\n\n let mut input = Input::new(BufReader::new(file));\n\n while let Some(block) = input.next_block()?.parse()? {\n\n self.add_block(block)?;\n\n }\n\n }\n\n }\n\n Ok(())\n\n }\n\n\n\n fn add_heading(\n\n &mut self,\n\n heading: &mut dyn HeadingLike,\n\n common: &mut BlockCommon,\n\n ) -> EResult<usize> {\n\n let mut idx = self.blocks.len();\n\n let mut curr = None;\n\n while self.get_section_list(curr).level < heading.level() {\n\n let curr_level = self.get_section_list(curr).level;\n\n if self.get_section_list(curr).is_empty() {\n", "file_path": "src/document.rs", "rank": 23, "score": 27381.09737141961 }, { "content": " /// The first unused number for blocks without an ID.\n\n noid_index: usize,\n\n /// The title of the document.\n\n title: Option<Text>,\n\n /// The author of the document.\n\n author: Option<Text>,\n\n /// The description of the document.\n\n description: Option<Text>,\n\n /// The stylesheets for the document.\n\n stylesheets: Vec<Text>,\n\n /// The global `lang` attribute for the document.\n\n lang: Option<Text>,\n\n}\n\n\n\nimpl Document {\n\n /// Adds the given block to the document.\n\n pub fn add_block(&mut self, mut block: Block) -> EResult<()> {\n\n let mut idx = self.blocks.len();\n\n if let Some(control) = block.kind.as_control() {\n\n self.control(control)?;\n", "file_path": "src/document.rs", "rank": 24, "score": 27380.707262054093 }, { "content": " title.write_inline(w, self)?;\n\n writeln!(w, \"</h1>\")?;\n\n }\n\n Ok(())\n\n }\n\n\n\n fn write_tail(&self, w: &mut impl Write) -> IoResult<()> {\n\n writeln!(w, \"</body>\")?;\n\n writeln!(w, \"</html>\")?;\n\n Ok(())\n\n }\n\n\n\n /// Get a reference to the specified block.\n\n pub fn get_block(&self, idx: usize) -> Option<&Block> {\n\n self.blocks.get(idx)\n\n }\n\n\n\n /// Get a reference to the specified block as a heading.\n\n ///\n\n /// Panics if the specified block doesn't exist or isn't a heading.\n", "file_path": "src/document.rs", "rank": 25, "score": 27379.70809891916 }, { "content": " self.write_tail(w).context(ErrorKind::WriteIoTail)?;\n\n Ok(())\n\n }\n\n\n\n fn write_head(&self, w: &mut impl Write) -> IoResult<()> {\n\n writeln!(w, \"<!doctype html>\")?;\n\n write!(w, \"<html\")?;\n\n if let Some(lang) = &self.lang {\n\n write!(w, \" lang=\\\"\")?;\n\n lang.write_inline_plain(w, self)?;\n\n writeln!(w, \"\\\">\")?;\n\n } else {\n\n writeln!(w, \">\")?;\n\n }\n\n writeln!(w, \"<head>\")?;\n\n writeln!(w, \"<meta charset=\\\"utf-8\\\" />\")?;\n\n if let Some(title) = &self.title {\n\n write!(w, \"<title>\")?;\n\n title.write_inline_plain(w, self)?;\n\n writeln!(w, \"</title>\")?;\n", "file_path": "src/document.rs", "rank": 26, "score": 27379.459662501507 }, { "content": " pub fn get_heading(&self, block_index: usize) -> &dyn HeadingLike {\n\n self.blocks[block_index].kind.as_heading().unwrap()\n\n }\n\n\n\n /// Get a mutable reference to the specified block as a heading.\n\n ///\n\n /// Panics if the specified block doesn't exist or isn't a heading.\n\n fn get_mut_heading(&mut self, block_index: usize) -> &mut dyn HeadingLike {\n\n self.blocks[block_index].kind.as_mut_heading().unwrap()\n\n }\n\n\n\n /// Get a reference to the children of the specified block, or the root section list if none is\n\n /// specified.\n\n ///\n\n /// Panics if the specified block doesn't exist or isn't a heading.\n\n pub fn get_section_list(&self, block_index: Option<usize>) -> &SectionList {\n\n if let Some(idx) = block_index {\n\n self.get_heading(idx).children()\n\n } else {\n\n &self.sections\n", "file_path": "src/document.rs", "rank": 27, "score": 27377.329282222632 }, { "content": " }\n\n if let Some(heading) = block.kind.as_mut_heading() {\n\n idx = self.add_heading(heading, &mut block.common)?;\n\n }\n\n if let Some(replacements) = block.kind.as_mut_replacements() {\n\n self.replacements.update(replacements);\n\n }\n\n if let Some(table) = block.kind.as_mut_table() {\n\n if table.numbered {\n\n self.table_number += 1;\n\n table.number = self.table_number;\n\n }\n\n self.tables.push(idx);\n\n }\n\n if let Some(gloss) = block.kind.as_mut_gloss() {\n\n if gloss.numbered {\n\n self.gloss_number += 1;\n\n gloss.number = self.gloss_number;\n\n }\n\n self.glosses.push(idx);\n", "file_path": "src/document.rs", "rank": 28, "score": 27376.232446882925 }, { "content": " }\n\n }\n\n\n\n /// Get a mutable reference to the children of the specified block, or the root section list if\n\n /// none is specified.\n\n ///\n\n /// Panics if the specified block doesn't exist or isn't a heading.\n\n fn get_mut_section_list(&mut self, block_index: Option<usize>) -> &mut SectionList {\n\n if let Some(idx) = block_index {\n\n self.get_mut_heading(idx).mut_children()\n\n } else {\n\n &mut self.sections\n\n }\n\n }\n\n\n\n /// Gets a reference to the block with the specified ID.\n\n pub fn get_id(&self, id: &str) -> Option<&Block> {\n\n self.ids.get(id).map(|&idx| &self.blocks[idx])\n\n }\n\n\n\n /// Gets the replacement text for the given key.\n\n pub fn get_replacement(&self, key: &str) -> Option<&Text> {\n\n self.replacements.get(key)\n\n }\n\n}\n", "file_path": "src/document.rs", "rank": 29, "score": 27375.374292491895 }, { "content": "\n\n#[derive(Debug, Default)]\n\npub struct Document {\n\n /// A list of blocks in the document\n\n blocks: Vec<Block>,\n\n /// A list of indices into the `blocks` field corresponding to the top-level section headings\n\n /// of the document.\n\n sections: SectionList,\n\n /// A map from IDs to indices into the `blocks` field.\n\n ids: HashMap<String, usize>,\n\n /// A map of defined replacements.\n\n replacements: Replacements,\n\n /// A list of indices into the `blocks` field corresponding to the tables.\n\n tables: Vec<usize>,\n\n /// A list of indices into the `blocks` field corresponding to the glosses.\n\n glosses: Vec<usize>,\n\n /// The last table number.\n\n table_number: usize,\n\n /// The last gloss number.\n\n gloss_number: usize,\n", "file_path": "src/document.rs", "rank": 30, "score": 27374.353601596864 }, { "content": " }\n\n if let Some(author) = &self.author {\n\n write!(w, \"<meta name=\\\"author\\\" content=\\\"\")?;\n\n author.write_inline_plain(w, self)?;\n\n writeln!(w, \"\\\" />\")?;\n\n }\n\n if let Some(description) = &self.description {\n\n write!(w, \"<meta name=\\\"description\\\" content=\\\"\")?;\n\n description.write_inline_plain(w, self)?;\n\n writeln!(w, \"\\\" />\")?;\n\n }\n\n for stylesheet in &self.stylesheets {\n\n write!(w, \"<link rel=\\\"stylesheet\\\" type=\\\"text/css\\\" href=\\\"\")?;\n\n stylesheet.write_inline_plain(w, self)?;\n\n writeln!(w, \"\\\" />\")?;\n\n }\n\n writeln!(w, \"</head>\")?;\n\n writeln!(w, \"<body>\")?;\n\n if let Some(title) = &self.title {\n\n write!(w, \"<h1 class=\\\"title\\\">\")?;\n", "file_path": "src/document.rs", "rank": 31, "score": 27373.44325772143 }, { "content": " // insert filler section\n\n self.blocks.push(FillerHeading::new(curr_level + 1).into());\n\n self.get_mut_section_list(curr).push(idx, false);\n\n // since we inserted another block before the one we're working on\n\n idx += 1;\n\n }\n\n if heading.numbered() {\n\n heading.push_number(self.get_section_list(curr).last_child_number);\n\n }\n\n // move to next child\n\n curr = self.get_section_list(curr).last().cloned();\n\n }\n\n // now, insert the heading into its direct parent.\n\n if !heading.numbered() {\n\n // if this is a nonumber heading, its last_child_number is the same as it's older\n\n // sibling's, if such a sibling exists (otherwise last_child_number should remain\n\n // the default 0)\n\n if let Some(&older_sibling) = self.get_section_list(curr).last() {\n\n heading.mut_children().last_child_number =\n\n self.get_section_list(Some(older_sibling)).last_child_number;\n", "file_path": "src/document.rs", "rank": 32, "score": 27370.927575452377 }, { "content": "use std::io::{Result as IoResult, Write};\n\n\n\nuse crate::blocks::{BlockCommon, BlockType};\n\nuse crate::document::Document;\n\nuse crate::text::Text;\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub enum DocumentControl {\n\n Title(Text),\n\n Stylesheet(Text),\n\n Author(Text),\n\n Description(Text),\n\n Lang(Text),\n\n Import(Text),\n\n}\n\n\n\nimpl BlockType for DocumentControl {\n\n fn write(&self, _: &mut dyn Write, _: &BlockCommon, _: &Document) -> IoResult<()> {\n\n Ok(())\n\n }\n\n\n\n fn as_control(&self) -> Option<&DocumentControl> {\n\n Some(self)\n\n }\n\n}\n", "file_path": "src/blocks/control.rs", "rank": 50, "score": 32.052097765735745 }, { "content": " pub common: BlockCommon,\n\n}\n\n\n\nimpl UpdateParam for Block {\n\n fn update_param(&mut self, param: Parameter) -> OResult<Parameter> {\n\n self.kind.update_param(param).and_then(|p| match p {\n\n Some(p) => self.common.update_param(p),\n\n None => Ok(None),\n\n })\n\n }\n\n}\n\n\n\nimpl<T: BlockType + 'static> From<T> for Block {\n\n fn from(kind: T) -> Block {\n\n Block {\n\n kind: Box::new(kind),\n\n common: Default::default(),\n\n }\n\n }\n\n}\n", "file_path": "src/blocks.rs", "rank": 51, "score": 27.00659256334274 }, { "content": " Ok(())\n\n }\n\n\n\n fn simple_inline(&mut self, mut kind: text::InlineType) -> EResult<text::Inline> {\n\n let mut common = text::InlineCommon::new();\n\n update_multiple!(self, kind, common);\n\n Ok(text::Inline { kind, common })\n\n }\n\n\n\n fn formatting_inline(\n\n &mut self,\n\n delim: char,\n\n single: impl FnOnce(text::Text) -> text::InlineType,\n\n double: impl FnOnce(text::Text) -> text::InlineType,\n\n ) -> EResult<text::Inline> {\n\n let kind = match self.expect(delim)? {\n\n // double\n\n c if c == delim => {\n\n let mut text = text::Text::new();\n\n self.text_until_char(&mut text, delim)?;\n", "file_path": "src/parse.rs", "rank": 52, "score": 26.48932968687674 }, { "content": "}\n\n\n\n#[derive(Debug, Default, Eq, PartialEq)]\n\npub struct ListItem {\n\n pub text: Text,\n\n pub sublist: Vec<ListItem>,\n\n}\n\n\n\nimpl ListItem {\n\n pub fn new() -> ListItem {\n\n Default::default()\n\n }\n\n\n\n fn write(&self, w: &mut dyn Write, ordered: bool, document: &Document) -> IoResult<()> {\n\n write!(w, \"<li>\")?;\n\n self.text.write_inline(w, document)?;\n\n if !self.sublist.is_empty() {\n\n writeln!(w, \"<{}>\", List::tag(ordered))?;\n\n List::write_list(w, &self.sublist, ordered, document)?;\n\n writeln!(w, \"</{}>\", List::tag(ordered))?;\n\n }\n\n writeln!(w, \"</li>\")\n\n }\n\n}\n", "file_path": "src/blocks/list.rs", "rank": 53, "score": 25.675737285394405 }, { "content": "\n\n#[derive(Debug, Default, Eq, PartialEq)]\n\npub struct BlockCommon {\n\n pub class: String,\n\n pub id: String,\n\n pub start_line: usize,\n\n}\n\n\n\nimpl BlockCommon {\n\n pub fn new(start_line: usize) -> BlockCommon {\n\n BlockCommon {\n\n start_line,\n\n ..Default::default()\n\n }\n\n }\n\n}\n\n\n\nimpl UpdateParam for BlockCommon {\n\n fn update_param(&mut self, param: Parameter) -> OResult<Parameter> {\n\n Ok(match param.0.as_ref().map(|n| n.as_ref()) {\n", "file_path": "src/blocks.rs", "rank": 54, "score": 25.61426344503168 }, { "content": " }\n\n Some(_) => Some(param),\n\n })\n\n }\n\n}\n\n\n\n#[derive(Debug, Default, Eq, PartialEq)]\n\npub struct Column {\n\n pub header: bool,\n\n pub class: String,\n\n}\n\n\n\nimpl Column {\n\n pub fn new() -> Column {\n\n Default::default()\n\n }\n\n}\n\n\n\nimpl UpdateParam for Column {\n\n fn update_param(&mut self, param: Parameter) -> OResult<Parameter> {\n", "file_path": "src/blocks/table.rs", "rank": 55, "score": 23.06428586914199 }, { "content": " postamble: Default::default(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Default, Eq, PartialEq)]\n\npub struct GlossLine {\n\n pub words: Vec<Text>,\n\n pub class: String,\n\n}\n\n\n\nimpl GlossLine {\n\n pub fn new() -> GlossLine {\n\n Default::default()\n\n }\n\n\n\n pub fn push(&mut self, word: Text) {\n\n self.words.push(word);\n\n }\n\n}\n", "file_path": "src/blocks/gloss.rs", "rank": 56, "score": 22.321027578169037 }, { "content": "\n\n#[derive(Debug, Eq, PartialEq)]\n\npub enum GlossLineType {\n\n NoSplit,\n\n Split,\n\n}\n\n\n\nimpl GlossLineType {\n\n /// Updates with the given parameter. If the parameter was not updated, returns the parameter.\n\n pub fn update_param(&mut self, param: Parameter) -> OResult<Parameter> {\n\n Ok(match param.0.as_ref() {\n\n Some(_) => Some(param),\n\n None => match param.1.as_ref() {\n\n \"nosplit\" => {\n\n *self = GlossLineType::NoSplit;\n\n None\n\n }\n\n _ => Some(param),\n\n },\n\n })\n\n }\n\n}\n\n\n\nimpl Default for GlossLineType {\n\n fn default() -> GlossLineType {\n\n GlossLineType::Split\n\n }\n\n}\n", "file_path": "src/blocks/gloss.rs", "rank": 57, "score": 22.247094566586487 }, { "content": "use std::fmt::Debug;\n\nuse std::io::{Result as IoResult, Write};\n\nuse std::ops::Deref;\n\n\n\nuse crate::blocks::{BlockCommon, BlockType, Parameter};\n\nuse crate::document::Document;\n\nuse crate::errors::Result as EResult;\n\nuse crate::html;\n\nuse crate::text::{Inline, Referenceable, Text, EMPTY_TEXT};\n\n\n", "file_path": "src/blocks/heading.rs", "rank": 58, "score": 21.949279755006728 }, { "content": "use std::io::{Result as IoResult, Write};\n\n\n\nuse failure::ResultExt;\n\n\n\nuse crate::blocks::{BlockCommon, BlockType, Parameter, UpdateParam};\n\nuse crate::document::Document;\n\nuse crate::errors::{ErrorKind, Result as EResult};\n\nuse crate::html;\n\nuse crate::text::{Referenceable, Text};\n\n\n", "file_path": "src/blocks/table.rs", "rank": 59, "score": 21.923699547120755 }, { "content": "use std::io::{Result as IoResult, Write};\n\n\n\nuse crate::blocks::{BlockCommon, BlockType, Parameter};\n\nuse crate::document::Document;\n\nuse crate::errors::Result as EResult;\n\nuse crate::html;\n\nuse crate::text::{Referenceable, Text};\n\n\n", "file_path": "src/blocks/gloss.rs", "rank": 60, "score": 21.11836796027845 }, { "content": " fn write(&self, _: &mut dyn Write, _: &BlockCommon, _: &Document) -> IoResult<()> {\n\n Ok(())\n\n }\n\n\n\n fn update_param(&mut self, param: Parameter) -> OResult<Parameter> {\n\n Ok(Some(param))\n\n }\n\n\n\n fn as_mut_replacements(&mut self) -> Option<&mut Replacements> {\n\n Some(self)\n\n }\n\n}\n", "file_path": "src/blocks/replacements.rs", "rank": 61, "score": 21.015809189609293 }, { "content": "\n\n fn write_list(\n\n w: &mut dyn Write,\n\n items: &[ListItem],\n\n ordered: bool,\n\n document: &Document,\n\n ) -> IoResult<()> {\n\n for item in items {\n\n item.write(w, ordered, document)?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl BlockType for List {\n\n fn write(&self, w: &mut dyn Write, common: &BlockCommon, document: &Document) -> IoResult<()> {\n\n write!(w, \"<{} \", List::tag(self.ordered))?;\n\n write!(w, \"id=\\\"{}\\\" \", html::Encoder(&common.id))?;\n\n write!(w, \"class=\\\"{}\\\">\", html::Encoder(&common.class))?;\n\n List::write_list(w, &self.items, self.ordered, document)?;\n", "file_path": "src/blocks/list.rs", "rank": 62, "score": 20.754070125649307 }, { "content": "use std::collections::HashMap;\n\nuse std::io::{Result as IoResult, Write};\n\n\n\nuse crate::blocks::{BlockCommon, BlockType, Parameter};\n\nuse crate::document::Document;\n\nuse crate::errors::{ErrorKind, Result as EResult};\n\nuse crate::text::Text;\n\n\n", "file_path": "src/blocks/replacements.rs", "rank": 63, "score": 20.441946351989564 }, { "content": " pub cols: usize,\n\n pub class: String,\n\n pub text: Text,\n\n}\n\n\n\nimpl Cell {\n\n pub fn new() -> Cell {\n\n Default::default()\n\n }\n\n\n\n fn write(\n\n &self,\n\n w: &mut impl Write,\n\n row: &Row,\n\n col: Option<&Column>,\n\n document: &Document,\n\n ) -> IoResult<()> {\n\n let header_row = row.header;\n\n let header_col = col.map(|col| col.header).unwrap_or(false);\n\n if header_row {\n", "file_path": "src/blocks/table.rs", "rank": 64, "score": 20.236518409883725 }, { "content": "use std::io::{Result as IoResult, Write};\n\n\n\nuse failure::ResultExt;\n\n\n\nuse crate::blocks::{BlockCommon, BlockType, Parameter};\n\nuse crate::document::Document;\n\nuse crate::errors::{ErrorKind, Result as EResult};\n\nuse crate::html;\n\nuse crate::text::Text;\n\n\n", "file_path": "src/blocks/contents.rs", "rank": 65, "score": 20.188744048258805 }, { "content": " fn tag(&self) -> &'static str {\n\n match self.level {\n\n 1 => \"h1\",\n\n 2 => \"h2\",\n\n 3 => \"h3\",\n\n 4 => \"h4\",\n\n 5 => \"h5\",\n\n 6 => \"h6\",\n\n _ => \"p\",\n\n }\n\n }\n\n}\n\n\n\nimpl BlockType for Heading {\n\n fn write(&self, w: &mut dyn Write, common: &BlockCommon, document: &Document) -> IoResult<()> {\n\n // start tag\n\n write!(w, \"<{} \", self.tag())?;\n\n write!(w, \"id=\\\"{}\\\" \", html::Encoder(&common.id))?;\n\n write!(w, \"class=\\\"{} \", html::Encoder(&common.class))?;\n\n if self.level > 6 {\n", "file_path": "src/blocks/heading.rs", "rank": 66, "score": 20.181761156636146 }, { "content": " if heading.toc() {\n\n write!(\n\n w,\n\n \"<a href=\\\"#{}\\\">\",\n\n &document.get_block(e).unwrap().common.id\n\n )?;\n\n heading.title().write_inline(w, document)?;\n\n write!(w, \"</a>\")?;\n\n }\n\n self.write_sublist(w, level + 1, heading.children(), &document)?;\n\n writeln!(w, \"</li>\")?;\n\n }\n\n writeln!(w, \"</ol>\\n\")?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl BlockType for Contents {\n\n fn write(&self, w: &mut dyn Write, common: &BlockCommon, document: &Document) -> IoResult<()> {\n", "file_path": "src/blocks/contents.rs", "rank": 67, "score": 19.7350729800528 }, { "content": "}\n\n\n\n#[derive(Debug, Default, Eq, PartialEq)]\n\npub struct FillerHeading {\n\n children: SectionList,\n\n}\n\n\n\nimpl FillerHeading {\n\n pub fn new(level: usize) -> FillerHeading {\n\n FillerHeading {\n\n children: SectionList {\n\n level,\n\n ..Default::default()\n\n },\n\n ..Default::default()\n\n }\n\n }\n\n}\n\n\n\nimpl BlockType for FillerHeading {\n", "file_path": "src/blocks/heading.rs", "rank": 68, "score": 19.530646011292056 }, { "content": " fn write(&self, _: &mut dyn Write, _: &BlockCommon, _: &Document) -> IoResult<()> {\n\n Ok(())\n\n }\n\n\n\n fn as_heading(&self) -> Option<&dyn HeadingLike> {\n\n Some(self)\n\n }\n\n\n\n fn as_mut_heading(&mut self) -> Option<&mut dyn HeadingLike> {\n\n Some(self)\n\n }\n\n}\n\n\n\nimpl HeadingLike for FillerHeading {\n\n fn numbered(&self) -> bool {\n\n false\n\n }\n\n\n\n fn toc(&self) -> bool {\n\n false\n", "file_path": "src/blocks/heading.rs", "rank": 69, "score": 19.174371502837566 }, { "content": " text::InlineType::Italics,\n\n text::InlineType::Bold,\n\n )?);\n\n }\n\n // small caps\n\n '^' => {\n\n push_and_renew!(buffer: String::new(), text);\n\n // rewind\n\n let mut inner = text::Text::new();\n\n self.text_until_char(&mut inner, '^')?;\n\n let kind = text::InlineType::SmallCaps(inner);\n\n text.push(self.simple_inline(kind)?);\n\n }\n\n // generic `span`\n\n '`' => {\n\n push_and_renew!(buffer: String::new(), text);\n\n let mut inner = text::Text::new();\n\n self.text_until_char(&mut inner, '`')?;\n\n let kind = text::InlineType::Span(inner);\n\n let mut common = text::InlineCommon::new();\n", "file_path": "src/parse.rs", "rank": 70, "score": 19.17154918636296 }, { "content": "use std::io::{Result as IoResult, Write};\n\n\n\nuse crate::blocks::{BlockCommon, BlockType, Parameter};\n\nuse crate::document::Document;\n\nuse crate::errors::Result as EResult;\n\nuse crate::html;\n\nuse crate::text::Text;\n\n\n", "file_path": "src/blocks/list.rs", "rank": 71, "score": 18.85219537150612 }, { "content": " update_multiple!(self, heading, common);\n\n self.text_rest(&mut heading.title)?;\n\n Ok(blocks::Block {\n\n kind: Box::new(heading),\n\n common,\n\n })\n\n }\n\n\n\n fn parse_paragraph(&mut self, start: usize) -> EResult<blocks::Block> {\n\n self.idx = start;\n\n let mut text = text::Text::new();\n\n let common = blocks::BlockCommon::new(self.start.unwrap());\n\n self.text_rest(&mut text)?;\n\n Ok(blocks::Block {\n\n kind: Box::new(text),\n\n common,\n\n })\n\n }\n\n\n\n /// Recursively appends list items to the given vector\n", "file_path": "src/parse.rs", "rank": 72, "score": 18.73652370199072 }, { "content": " fn title(&self) -> &Text {\n\n EMPTY_TEXT\n\n }\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub struct SectionList {\n\n pub headings: Vec<usize>,\n\n pub last_child_number: usize,\n\n pub level: usize,\n\n}\n\n\n\nimpl SectionList {\n\n pub fn new(level: usize) -> SectionList {\n\n SectionList {\n\n level,\n\n ..Default::default()\n\n }\n\n }\n\n\n", "file_path": "src/blocks/heading.rs", "rank": 73, "score": 18.607618609095486 }, { "content": " let mut text = text::Text::new();\n\n self.text_rest(&mut text)?;\n\n Ok(blocks::control::DocumentControl::Stylesheet(text).into())\n\n }\n\n\n\n fn parse_lang(&mut self) -> EResult<blocks::Block> {\n\n let mut text = text::Text::new();\n\n self.text_rest(&mut text)?;\n\n Ok(blocks::control::DocumentControl::Lang(text).into())\n\n }\n\n\n\n fn parse_import(&mut self) -> EResult<blocks::Block> {\n\n let mut text = text::Text::new();\n\n self.text_rest(&mut text)?;\n\n Ok(blocks::control::DocumentControl::Import(text).into())\n\n }\n\n\n\n fn parse_toc(&mut self) -> EResult<blocks::Block> {\n\n let mut toc = blocks::contents::Contents::new();\n\n let mut common = blocks::BlockCommon::new(self.start.unwrap());\n", "file_path": "src/parse.rs", "rank": 74, "score": 18.498984030998816 }, { "content": "\n\n#[derive(Debug, Eq, PartialEq)]\n\npub struct Heading {\n\n pub title: Text,\n\n pub numbered: bool,\n\n pub toc: bool,\n\n pub level: usize,\n\n pub children: SectionList,\n\n pub number: Vec<usize>,\n\n}\n\n\n\nimpl Heading {\n\n pub fn new(level: usize) -> Heading {\n\n Heading {\n\n level,\n\n children: SectionList::new(level + 1),\n\n ..Default::default()\n\n }\n\n }\n\n\n", "file_path": "src/blocks/heading.rs", "rank": 75, "score": 18.248531504352147 }, { "content": "##### Parameters\n\n\n\n- `ref`: The ID to reference in the document.\n\n This parameter is required.\n\n The text for the reference will automatically be set based on the type of\n\n element it refers to: \"section\", \"table\", or \"gloss\"; followed by the number\n\n of that element.\n\n If the reference points to an element with the `nonumber` parameter, then a\n\n warning will be raised, and the text will simply be the type of the element.\n\n\n\n This parameter can be abbreviated; the first parameter to a `:ref:` will be\n\n interpreted as a `ref` parameter rather than a `class` parameter.\n\n\n\n#### External links (`:link:`)\n\n\n\n##### Parameters\n\n\n\n- `url`: The URL to link to.\n\n This parameter is required.\n\n\n\n This parameter can be abbreviated; the first parameter to a `:link:` will be\n\n interpreted as a `url` parameter rather than a `class` parameter.\n\n- `title`: The text to display for the link.\n\n Defaults to the value of the `url` parameter.\n", "file_path": "README.md", "rank": 76, "score": 17.97415401994162 }, { "content": " }\n\n gloss.gloss.push(line);\n\n }\n\n }\n\n }\n\n Ok(blocks::Block {\n\n kind: Box::new(gloss),\n\n common,\n\n })\n\n }\n\n\n\n fn parse_replace_block(&mut self) -> EResult<blocks::Block> {\n\n let mut replacements = blocks::replacements::Replacements::new();\n\n let mut common = blocks::BlockCommon::new(self.start.unwrap());\n\n update_multiple!(self, common);\n\n self.skip_whitespace();\n\n while let Some(':') = self.next() {\n\n let directive = self.directive()?;\n\n let mut text = text::Text::new();\n\n self.text_until_char(&mut text, '\\n')?;\n", "file_path": "src/parse.rs", "rank": 77, "score": 17.439357967987956 }, { "content": " fn parse_gloss(&mut self) -> EResult<blocks::Block> {\n\n let mut gloss = blocks::gloss::Gloss::new();\n\n let mut common = blocks::BlockCommon::new(self.start.unwrap());\n\n update_multiple!(self, gloss, common);\n\n self.text_until_hard_line(&mut gloss.title)?;\n\n // now we've matched a hard line; time to start constructing the lines of the\n\n // gloss\n\n while let Some(_) = self.peek() {\n\n self.skip_whitespace();\n\n // skip until after the double colon\n\n self.idx += 2;\n\n let mut class = String::new();\n\n let mut kind = blocks::gloss::GlossLineType::Split;\n\n update_multiple!(self, kind, class);\n\n // check whether it's a nosplit:\n\n match kind {\n\n blocks::gloss::GlossLineType::NoSplit => {\n\n let mut line = Default::default();\n\n // add the rest of the line\n\n self.text_until_hard_line(&mut line)?;\n", "file_path": "src/parse.rs", "rank": 78, "score": 17.269380466115788 }, { "content": "use std::fmt::Debug;\n\nuse std::io::{Result as IoResult, Write};\n\n\n\nuse crate::document::Document;\n\nuse crate::errors::Result as EResult;\n\nuse crate::text::Referenceable;\n\n\n\npub mod contents;\n\npub mod control;\n\npub mod gloss;\n\npub mod heading;\n\npub mod list;\n\npub mod replacements;\n\npub mod table;\n\n\n\nuse control::DocumentControl;\n\nuse gloss::Gloss;\n\nuse heading::HeadingLike;\n\nuse replacements::Replacements;\n\nuse table::Table;\n\n\n\n#[cfg(test)]\n\nuse list::List;\n\n\n", "file_path": "src/blocks.rs", "rank": 79, "score": 17.186046127904866 }, { "content": " &mut self,\n\n text: &mut text::Text,\n\n predicate: impl Fn(&Self, char) -> bool,\n\n ) -> EResult<()> {\n\n let mut buffer = String::new();\n\n while let Some(c) = self.next() {\n\n match c {\n\n // the specified character was found, break\n\n c if predicate(self, c) => break,\n\n // bracketed text\n\n '{' => {\n\n push_and_renew!(buffer: String::new(), text);\n\n self.text_until_char(text, '}')?;\n\n }\n\n // directive\n\n ':' => {\n\n push_and_renew!(buffer: String::new(), text);\n\n text.push(match self.directive()?.as_ref() {\n\n // cross reference\n\n \"ref\" => self.simple_inline(text::InlineType::reference())?,\n", "file_path": "src/parse.rs", "rank": 80, "score": 17.175120197170585 }, { "content": "\n\nimpl Row {\n\n pub fn new() -> Row {\n\n Default::default()\n\n }\n\n}\n\n\n\nimpl UpdateParam for Row {\n\n fn update_param(&mut self, param: Parameter) -> OResult<Parameter> {\n\n Ok(match param.0.as_ref().map(|n| n.as_ref()) {\n\n Some(\"class\") => {\n\n self.class = param.1;\n\n None\n\n }\n\n None => {\n\n match param.1.as_ref() {\n\n \"header\" => self.header = true,\n\n _ => self.class = param.1,\n\n }\n\n None\n", "file_path": "src/blocks/table.rs", "rank": 81, "score": 16.989837597732617 }, { "content": " Ok(blocks::Block {\n\n kind: Box::new(list),\n\n common,\n\n })\n\n }\n\n\n\n fn parse_table(&mut self) -> EResult<blocks::Block> {\n\n let mut table = blocks::table::Table::new();\n\n let mut common = blocks::BlockCommon::new(self.start.unwrap());\n\n update_multiple!(self, table, common);\n\n self.text_until_char(&mut table.title, '\\n')?;\n\n // put the newline back on the stack, since it's needed for `match_hard_line`\n\n self.idx -= 1;\n\n // match column parameters\n\n while let Some(c) = self.next() {\n\n match c {\n\n // new cell\n\n '|' => {\n\n let mut col = blocks::table::Column::new();\n\n update_multiple!(self, col);\n", "file_path": "src/parse.rs", "rank": 82, "score": 16.616124446078846 }, { "content": "\n\n fn as_mut_table(&mut self) -> Option<&mut Table> {\n\n Some(self)\n\n }\n\n\n\n fn as_referenceable(&self) -> Option<&dyn Referenceable> {\n\n Some(self)\n\n }\n\n}\n\n\n\nimpl Referenceable for Table {\n\n fn reference_text(&self) -> Text {\n\n let mut text = Text::from(\"table \");\n\n if self.numbered {\n\n text.push(format!(\"{}\", self.number));\n\n } else {\n\n text.extend(&self.title);\n\n }\n\n text\n\n }\n", "file_path": "src/blocks/table.rs", "rank": 83, "score": 16.00149567443813 }, { "content": "\n\n fn parse_title(&mut self) -> EResult<blocks::Block> {\n\n let mut text = text::Text::new();\n\n self.text_rest(&mut text)?;\n\n Ok(blocks::control::DocumentControl::Title(text).into())\n\n }\n\n\n\n fn parse_author(&mut self) -> EResult<blocks::Block> {\n\n let mut text = text::Text::new();\n\n self.text_rest(&mut text)?;\n\n Ok(blocks::control::DocumentControl::Author(text).into())\n\n }\n\n\n\n fn parse_description(&mut self) -> EResult<blocks::Block> {\n\n let mut text = text::Text::new();\n\n self.text_rest(&mut text)?;\n\n Ok(blocks::control::DocumentControl::Description(text).into())\n\n }\n\n\n\n fn parse_stylesheet(&mut self) -> EResult<blocks::Block> {\n", "file_path": "src/parse.rs", "rank": 84, "score": 15.963895943483617 }, { "content": " update_multiple!(self, toc, common);\n\n self.text_rest(&mut toc.title)?;\n\n Ok(blocks::Block {\n\n kind: Box::new(toc),\n\n common,\n\n })\n\n }\n\n\n\n fn parse_list(&mut self) -> EResult<blocks::Block> {\n\n let mut list = blocks::list::List::new();\n\n let mut common = blocks::BlockCommon::new(self.start.unwrap());\n\n update_multiple!(self, list, common);\n\n while self.idx < self.len() {\n\n let indent = self.skip_whitespace_virtual() - self.idx;\n\n self.idx += indent + 2;\n\n let mut item = blocks::list::ListItem::new();\n\n self.text_until_hard_line(&mut item.text)?;\n\n self.list_tree(indent, &mut item.sublist)?;\n\n list.items.push(item);\n\n }\n", "file_path": "src/parse.rs", "rank": 85, "score": 15.641936441411104 }, { "content": "use std::fmt;\n\nuse std::io;\n\nuse std::result;\n\n\n\nuse failure::{Backtrace, Context, Fail};\n\n\n\npub type Result<T> = result::Result<T, Error>;\n\n\n\n#[derive(Debug)]\n\npub struct Error {\n\n inner: Context<ErrorKind>,\n\n}\n\n\n\nimpl Error {\n\n pub fn kind(&self) -> &ErrorKind {\n\n self.inner.get_context()\n\n }\n\n}\n\n\n\nimpl Fail for Error {\n", "file_path": "src/errors.rs", "rank": 86, "score": 15.611200385991113 }, { "content": " write!(w, \"rowspan=\\\"{}\\\" \", self.rows)?;\n\n }\n\n write!(w, \"class=\\\"{}\", html::Encoder(&self.class))?;\n\n if let Some(col) = col {\n\n write!(w, \" {}\", html::Encoder(&col.class))?;\n\n }\n\n write!(w, r#\"\">\"#)?;\n\n self.text.write_inline(w, document)?;\n\n if header_row || header_col {\n\n write!(w, \"</th>\")?;\n\n } else {\n\n write!(w, \"</td>\")?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl UpdateParam for Cell {\n\n fn update_param(&mut self, param: Parameter) -> OResult<Parameter> {\n\n Ok(match param.0.as_ref().map(|n| n.as_ref()) {\n", "file_path": "src/blocks/table.rs", "rank": 87, "score": 15.482006708964832 }, { "content": "}\n\n\n\nimpl Default for Table {\n\n fn default() -> Table {\n\n Table {\n\n title: Default::default(),\n\n numbered: true,\n\n number: 0,\n\n rows: Default::default(),\n\n columns: Default::default(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Default, Eq, PartialEq)]\n\npub struct Row {\n\n pub cells: Vec<Cell>,\n\n pub header: bool,\n\n pub class: String,\n\n}\n", "file_path": "src/blocks/table.rs", "rank": 88, "score": 15.470614275133375 }, { "content": " #[fail(display = \"An IO error occurred while writing tail matter\")]\n\n WriteIoTail,\n\n}\n\n\n\nimpl ErrorKind {\n\n pub fn input_error(err: &io::Error, line: usize) -> ErrorKind {\n\n match err.kind() {\n\n io::ErrorKind::InvalidData => ErrorKind::Unicode(line),\n\n _ => ErrorKind::ReadIo(line),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, Eq, Fail, PartialEq)]\n\npub enum EndOfBlockKind {\n\n #[fail(display = \"expected a character after `\\\\`\")]\n\n Escape,\n\n #[fail(display = \"expected `{}`\", _0)]\n\n Expect(char),\n\n}\n", "file_path": "src/errors.rs", "rank": 89, "score": 15.013920932210675 }, { "content": " block.next();\n\n assert_eq!(block.directive().unwrap(), \"foo\");\n\n assert_eq!(block.next(), Some('x'));\n\n }\n\n\n\n macro_rules! text {\n\n ($($($type:ident)? ($text:tt)),*) => {\n\n $crate::text::Text(vec![$(inline!($($type)? ($text))),*])\n\n }\n\n }\n\n\n\n macro_rules! inline {\n\n ($type:ident ($text:tt)) => {\n\n $crate::text::Inline {\n\n kind: $crate::text::InlineType::$type($text.into()),\n\n common: Default::default(),\n\n }\n\n };\n\n (($text:tt)) => {\n\n String::from($text).into()\n", "file_path": "src/parse.rs", "rank": 90, "score": 15.01290178635985 }, { "content": "impl Referenceable for Gloss {\n\n fn reference_text(&self) -> Text {\n\n let mut text = Text::from(\"gloss \");\n\n if self.numbered {\n\n text.push(format!(\"{}\", self.number));\n\n } else {\n\n text.extend(&self.title);\n\n }\n\n text\n\n }\n\n}\n\n\n\nimpl Default for Gloss {\n\n fn default() -> Gloss {\n\n Gloss {\n\n title: Default::default(),\n\n numbered: true,\n\n number: 0,\n\n preamble: Default::default(),\n\n gloss: Default::default(),\n", "file_path": "src/blocks/gloss.rs", "rank": 91, "score": 14.883760420977284 }, { "content": " replacements\n\n .insert(directive, text)\n\n .context(ErrorKind::Block(self.start.unwrap()))?;\n\n }\n\n Ok(blocks::Block {\n\n kind: Box::new(replacements),\n\n common,\n\n })\n\n }\n\n\n\n fn parse_heading(&mut self, start: usize) -> EResult<blocks::Block> {\n\n // count the `#`s\n\n while let Some('#') = self.next() {}\n\n // this is the number of `#`s. Subtract 1 because we're now at the char *after* the\n\n // last `#`.\n\n let level = self.idx - start - 1;\n\n // then rewind one character, we don't want to eat the character _after_ the `#`s.\n\n self.idx -= 1;\n\n let mut heading = blocks::heading::Heading::new(level);\n\n let mut common = blocks::BlockCommon::new(self.start.unwrap());\n", "file_path": "src/parse.rs", "rank": 92, "score": 14.780204376055497 }, { "content": "# conlang_fmt\n\n\n\nA program for formatting constructed language documentation.\n\nThis program is bespoke for that purpose; attempting to use it for anything\n\nelse should be done with the knowledge that it will likely be difficult at\n\nbest, and impossible at worst.\n\nOver time, as more features are added, that may change, but for now, don't\n\nexpect it to do everything.\n\n\n\n## Syntax\n\n\n\n### Parameters\n\n\n\nMany syntax elements can take optional parameters, which are denoted by a\n\ncomma-separated list surrounded by square brackets.\n\nIf a parameter takes an argument, it is denoted by an equals sign followed by\n\nthe value of the argument.\n\n\n\n#### Common Parameters\n\n\n\n- `class`: A list of CSS classes to apply to the element.\n\n The `class` parameter can be abbreviated by leaving a space-separated list of\n\n classes as the final parameter.\n\n This does not work when only one class is included and that class conflicts\n\n with a named parameter of that element.\n\n For example, a heading with the parameter string `[notoc]` would be parsed as\n\n having the `notoc` parameter, rather than a class of `notoc`.\n\n- `id`: The ID for the element.\n\n This parameter is only allowed on block-level directives.\n\n If an ID is specified for an element, it must be unique.\n\n To reference a block with the `:ref:` directive, it must have an ID\n\n specified.\n\n\n\n### Directives\n\n\n\nA directive is indicated by text surrounded by colons, with the exception of\n\nheadings (indicated by a series of `#` characters), and some formatting\n\ncommands (indicated by surrounding text with various delimiters).\n\nThe parameters for a directive go directly after the second colon.\n\n\n\n### Blocks\n\n\n\nA block is a paragraph-level element, such as a section header, a table, or a\n\nparagraph of text.\n\nAll blocks must be separated by blank lines, with no exceptions.\n\n\n", "file_path": "README.md", "rank": 93, "score": 14.730357438345951 }, { "content": "use std::fmt;\n\n\n\n/// A structure which when formatted entity-encodes a minimal set of characters:\n\n///\n\n/// - `\"` => `&quot;`\n\n/// - `&` => `&amp;`\n\n/// - `'` => `&#x27;`\n\n/// - `<` => `&lt;`\n\n/// - `>` => `&gt;`\n\npub struct Encoder<'a>(pub &'a str);\n\n\n\nimpl fmt::Display for Encoder<'_> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n for c in self.0.chars() {\n\n if let Some(ent) = get_entity(c) {\n\n write!(f, \"&{};\", ent)?;\n\n } else {\n\n write!(f, \"{}\", c)?;\n\n }\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/html.rs", "rank": 94, "score": 14.699488532312154 }, { "content": " write!(w, \"<div \")?;\n\n write!(w, \"id=\\\"{}\\\" \", html::Encoder(&common.id))?;\n\n write!(w, \"class=\\\"{} toc\\\">\", html::Encoder(&common.class))?;\n\n write!(w, \"<p class=\\\"toc-heading\\\">\")?;\n\n self.title.write_inline(w, &document)?;\n\n writeln!(w, \"</p>\")?;\n\n self.write_sublist(w, 1, document.get_section_list(None), &document)?;\n\n writeln!(w, \"</div>\\n\")\n\n }\n\n\n\n fn update_param(&mut self, param: Parameter) -> OResult<Parameter> {\n\n Ok(match param.0.as_ref().map(|n| n.as_ref()) {\n\n Some(\"maxlevel\") => {\n\n self.max_level = param\n\n .1\n\n .parse::<usize>()\n\n .with_context(|_| ErrorKind::Parse)?;\n\n None\n\n }\n\n _ => Some(param),\n", "file_path": "src/blocks/contents.rs", "rank": 95, "score": 14.238373840705535 }, { "content": "use std::io::{BufRead, Lines};\n\nuse std::iter::Enumerate;\n\n\n\nuse failure::ResultExt;\n\n\n\nuse crate::errors::{ErrorKind, Result as EResult};\n\nuse crate::parse::Block;\n\n\n\n#[derive(Debug)]\n\npub struct Input<B> {\n\n lines: Enumerate<Lines<B>>,\n\n buffer: Vec<char>,\n\n}\n\n\n\nimpl<B> Input<B>\n\nwhere\n\n B: BufRead,\n\n{\n\n pub fn new(input: B) -> Input<B> {\n\n Input {\n", "file_path": "src/input.rs", "rank": 96, "score": 14.191041322635295 }, { "content": " fn as_mut_gloss(&mut self) -> Option<&mut Gloss> {\n\n None\n\n }\n\n\n\n /// Returns a `&DocumentControl` if the block is a document control block, otherwise returns `None`.\n\n fn as_control(&self) -> Option<&DocumentControl> {\n\n None\n\n }\n\n}\n\n\n\nimpl<T: BlockType> UpdateParam for T {\n\n fn update_param(&mut self, param: Parameter) -> OResult<Parameter> {\n\n BlockType::update_param(self, param)\n\n }\n\n}\n", "file_path": "src/blocks.rs", "rank": 97, "score": 14.186922909539671 }, { "content": " }\n\n\n\n /// Updates `self` with keys from `other`, replacing duplicates.\n\n pub fn update(&mut self, other: &mut Replacements) {\n\n for (k, v) in other.drain() {\n\n self.replacements.insert(k, v);\n\n }\n\n }\n\n\n\n fn drain(&mut self) -> impl Iterator<Item = (String, Text)> + '_ {\n\n self.replacements.drain()\n\n }\n\n\n\n /// Gets the given key.\n\n pub fn get(&self, key: &str) -> Option<&Text> {\n\n self.replacements.get(key)\n\n }\n\n}\n\n\n\nimpl BlockType for Replacements {\n", "file_path": "src/blocks/replacements.rs", "rank": 98, "score": 14.15846543405565 }, { "content": "#### Formatting\n\n\n\n- Emphasis (usually displayed as italics) is indicated by surrounding the text\n\n with `*`.\n\n- Strong emphasis (usually displayed as bold) is indicated by surrounding the\n\n text with `**`.\n\n- Italics (formatting only, without semantics) is indicated by surrounding the\n\n text with `_` (a single underscore).\n\n- Bold (formatting only, without semantics) is indicated by surrounding the\n\n text with `__` (two underscores).\n\n- Small caps is indicated by surrounding the text with `^`.\n\n- A generic `<span>` element is indicated by surrounding the text with `` ` ``.\n\n\n\nIn each of these cases, parameters come directly after the closing delimiter.\n\n\n\nFormatting elements which use different markers (e.g. emphasis (`*`) and small\n\ncaps (`^`)) can be freely nested.\n\nHowever, to include a formatting element directly inside another which uses the\n\n_same_ marker (e.g. emphasis (`*`) and strong emphasis (`**`)), the inner\n\nelement must be surrounded by `{` `}`.\n\n\n\n##### Parameters\n\n\n\n> Note about `class`: In the case of a generic span, this defaults to\n\n> `conlang`.\n\n> Otherwise, defaults to none.\n\n\n\n#### Text replacements (`:replace:`)\n\n\n\nA list of text replacements can be defined in `:replace:` block.\n\nEach replacement in the list should consist of a directive to be used for the\n\nreplacement, followed by the replacement text, which may\n\nitself contain replacements (or any other inline formatting).\n\nIn the text, a replacement is denoted by the directive declared in the\n\n`:replace:` block.\n\nText replacements do not take any parameters other than the `class` parameter.\n\n\n\n> Replacements can only be defined once in a single `:replace:` block, but can\n\n> be redefined in another `:replace:` block, to allow the same replacement to\n\n> have different expansions in different places.\n\n\n\n#### Cross references (`:ref:`)\n\n\n", "file_path": "README.md", "rank": 99, "score": 14.156548621327186 } ]
Rust
rustracts/src/contracts/option.rs
JOE1994/rustracts
c15541d7968aea40d06dadd5e2c5cb57b4d6d341
use std::sync::Mutex; use std::time::Duration; use crate::context::{ContextError, ContextErrorKind, ContractContext}; use crate::park::{WaitMessage, WaitThread}; use crate::time::Timer; use crate::{Contract, ContractExt, Status}; use futures::{ future::{FusedFuture, Future}, task::{Context, Poll}, }; use parc::{LockWeak, ParentArc}; #[must_use = "contracts do nothing unless polled or awaited"] pub struct OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { runner: WaitThread, timer: Timer, void_context: Option<ParentArc<Mutex<VC>>>, prod_context: Option<ParentArc<Mutex<PC>>>, on_exe: Option<F>, } impl<F, VC, PC, R> OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { pub fn new(expire: Duration, void_c: VC, prod_c: PC, on_exe: F) -> Self { Self { runner: WaitThread::new(), timer: Timer::new(expire), void_context: Some(ParentArc::new(Mutex::new(void_c))), prod_context: Some(ParentArc::new(Mutex::new(prod_c))), on_exe: Some(on_exe), } } fn poll_prod(&self) -> bool { match &self.prod_context { Some(c) => c.as_ref().lock().unwrap().poll_valid(), None => false, } } pin_utils::unsafe_pinned!(timer: Timer); pin_utils::unsafe_unpinned!(void_context: Option<ParentArc<Mutex<VC>>>); pin_utils::unsafe_unpinned!(prod_context: Option<ParentArc<Mutex<PC>>>); pin_utils::unsafe_unpinned!(on_exe: Option<F>); } impl<F, VC, PC, R> Contract for OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { fn poll_valid(&self) -> bool { match &self.void_context { Some(c) => c.as_ref().lock().unwrap().poll_valid(), None => false, } } fn execute(mut self: std::pin::Pin<&mut Self>) -> Self::Output { let vlockarc = self .as_mut() .void_context() .take() .expect("Cannot poll after expiration"); let plockarc = self .as_mut() .prod_context() .take() .expect("Cannot poll after expiration"); let vcontext = vlockarc.block_into_inner().into_inner().unwrap(); let pcontext = plockarc.block_into_inner().into_inner().unwrap(); let f = self .as_mut() .on_exe() .take() .expect("Cannot run a contract after expiration"); Status::Completed(f((vcontext, pcontext))) } fn void(self: std::pin::Pin<&mut Self>) -> Self::Output { Status::Terminated } } impl<F, VC, PC, R> ContractExt for OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { type Context = (LockWeak<Mutex<VC>>, LockWeak<Mutex<PC>>); fn get_context(&self) -> Result<Self::Context, ContextError> { match (&self.void_context, &self.prod_context) { (Some(ref vc), Some(ref pc)) => { Ok((ParentArc::downgrade(vc), ParentArc::downgrade(pc))) } _ => Err(ContextError::from(ContextErrorKind::ExpiredContext)), } } } impl<F, VC, PC, R> Future for OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { type Output = Status<R>; fn poll(mut self: std::pin::Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> { self.runner .sender() .send(WaitMessage::WakeIn { waker: cx.waker().clone(), duration: Duration::new(0, 100), }) .unwrap(); let mv = ( self.as_mut().timer().poll(cx), self.poll_valid(), self.poll_prod(), ); match mv { (Poll::Ready(_), true, true) => Poll::Ready(self.execute()), (Poll::Ready(_), true, false) => Poll::Ready(self.void()), (Poll::Pending, true, _) => Poll::Pending, (_, false, _) => Poll::Ready(self.void()), } } } impl<F, VC, PC, R> FusedFuture for OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { fn is_terminated(&self) -> bool { self.void_context.is_none() || self.prod_context.is_none() || self.on_exe.is_none() } } #[cfg(test)] mod tests { use super::OptionContract; use crate::context::cmp::EqContext; use crate::{ContractExt, Status}; use std::time::Duration; #[test] fn prod_option_contract() { let vcontext = EqContext(2, 2); let pcontext = EqContext(2, 2); let c = OptionContract::new( Duration::new(1, 0), vcontext, pcontext, |(vcon, pcon)| -> usize { vcon.0 + pcon.0 + 1 }, ); if let Status::Completed(val) = futures::executor::block_on(c) { assert_eq!(val, 5); } else { assert!(false); } } #[test] fn void_option_contract() { let vcontext = EqContext(2, 2); let pcontext = EqContext(2, 2); let c = OptionContract::new( Duration::new(1, 0), vcontext, pcontext, |(vcon, pcon)| -> usize { vcon.0 + pcon.0 + 1 }, ); let handle = std::thread::spawn({ let (vcontext, _) = c.get_context().unwrap(); move || match vcontext.upgrade() { Some(vc) => vc.lock().unwrap().0 += 1, None => {} } }); if let Status::Completed(val) = futures::executor::block_on(c) { assert_ne!(val, 6); } else { assert!(true); } handle.join().unwrap(); } #[test] fn noprod_option_contract() { let vcontext = EqContext(2, 2); let pcontext = EqContext(2, 2); let c = OptionContract::new( Duration::new(1, 0), vcontext, pcontext, |(vcon, pcon)| -> usize { vcon.0 + pcon.0 + 1 }, ); let _ = std::thread::spawn({ let (_, pcontext) = c.get_context().unwrap(); move || match pcontext.upgrade() { Some(pc) => pc.lock().unwrap().0 += 1, None => {} } }) .join(); if let Status::Completed(val) = futures::executor::block_on(c) { assert_ne!(val, 6); } else { assert!(true); } } }
use std::sync::Mutex; use std::time::Duration; use crate::context::{ContextError, ContextErrorKind, ContractContext}; use crate::park::{WaitMessage, WaitThread}; use crate::time::Timer; use crate::{Contract, ContractExt, Status}; use futures::{ future::{FusedFuture, Future}, task::{Context, Poll}, }; use parc::{LockWeak, ParentArc}; #[must_use = "contracts do nothing unless polled or awaited"] pub struct OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { runner: WaitThread, timer: Timer, void_context: Option<ParentArc<Mutex<VC>>>, prod_context: Option<ParentArc<Mutex<PC>>>, on_exe: Option<F>, } impl<F, VC, PC, R> OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { pub fn new(expire: Duration, void_c: VC, prod_c: PC, on_exe: F) -> Self { Self { runner: WaitThread::new(), timer: Timer::new(expire), void_context: Some(ParentArc::new(Mutex::new(void_c))), prod_context: Some(ParentArc::new(Mutex::new(prod_c))), on_exe: Some(on_exe), } } fn poll_prod(&self) -> bool { match &self.prod_context { Some(c) => c.as_ref().lock().unwrap().poll_valid(), None => false, } } pin_utils::unsafe_pinned!(timer: Timer); pin_utils::unsafe_unpinned!(void_context: Option<ParentArc<Mutex<VC>>>); pin_utils::unsafe_unpinned!(prod_context: Option<ParentArc<Mutex<PC>>>); pin_utils::unsafe_unpinned!(on_exe: Option<F>); } impl<F, VC, PC, R> Contract for OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { fn poll_valid(&self) -> bool { match &self.void_context { Some(c) => c.as_ref().lock().unwrap().poll_valid(), None => false, } } fn execute(mut self: std::pin::Pin<&mut Self>) -> Self::Output { let vlockarc = self .as_mut() .void_context() .take() .expect("Cannot poll after expiration"); let plockarc = self .as_mut() .prod_context() .take() .expect("Cannot poll after expiration"); let vcontext = vlockarc.block_into_inner().into_inner().unwrap(); let pcontext = plockarc.block_into_inner().into_inner().unwrap(); let f = self .as_mut() .on_exe() .take() .expect("Cannot run a contract after expiration"); Status::Completed(f((vcontext, pcontext))) } fn void(self: std::pin::Pin<&mut Self>) -> Self::Output { Status::Terminated } } impl<F, VC, PC, R> ContractExt for OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { type Context = (LockWeak<Mutex<VC>>, LockWeak<Mutex<PC>>); fn get_context(&self) -> Result<Self::Context, ContextError> { match (&self.void_context, &self.prod_context) { (Some(ref vc), Some(ref pc)) => { Ok((ParentArc::downgrade(vc), ParentArc::downgrade(pc))) } _ => Err(ContextError::from(ContextErrorKind::ExpiredContext)), } } } impl<F, VC, PC, R> Future for OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { type Output = Status<R>; fn poll(mut self: std::pin::Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> { self.runner .sender() .send(WaitMessage::WakeIn { waker: cx.waker().clone(), duration: Duration::new(0, 100), }) .unwrap(); let mv = ( self.as_mut().timer().poll(cx), self.poll_valid(), self.poll_prod(), ); match mv { (Poll::Ready(_), true, true) => Poll::Ready(self.execute()), (Poll::Ready(_), true, false) => Poll::Ready(self.void()), (Poll::Pending, true, _) => Poll::Pending, (_, false, _) => Poll::Ready(self.void()), } } } impl<F, VC, PC, R> FusedFuture for OptionContract<F, VC, PC, R> where VC: ContractContext, PC: ContractContext, F: FnOnce((VC, PC)) -> R, { fn is_terminated(&self) -> bool { self.void_context.is_none() || self.prod_context.is_none() || self.on_exe.is_none() } } #[cfg(test)] mod tests { use super::OptionContract; use crate::context::cmp::EqContext; use crate::{ContractExt, Status}; use std::time::Duration; #[test] fn prod_option_contract() { let vcontext = EqContext(2, 2); let pcontext = EqContext(2, 2); let c = OptionContract::new( Duration::new(1, 0), vcontext, pcontext, |(vcon, pcon)| -> usize { vcon.0 + pcon.0 + 1 }, ); if let Status::Completed(val) = futures::executor::block_on(c) { assert_eq!(val, 5); } else { assert!(false); } } #[test]
#[test] fn noprod_option_contract() { let vcontext = EqContext(2, 2); let pcontext = EqContext(2, 2); let c = OptionContract::new( Duration::new(1, 0), vcontext, pcontext, |(vcon, pcon)| -> usize { vcon.0 + pcon.0 + 1 }, ); let _ = std::thread::spawn({ let (_, pcontext) = c.get_context().unwrap(); move || match pcontext.upgrade() { Some(pc) => pc.lock().unwrap().0 += 1, None => {} } }) .join(); if let Status::Completed(val) = futures::executor::block_on(c) { assert_ne!(val, 6); } else { assert!(true); } } }
fn void_option_contract() { let vcontext = EqContext(2, 2); let pcontext = EqContext(2, 2); let c = OptionContract::new( Duration::new(1, 0), vcontext, pcontext, |(vcon, pcon)| -> usize { vcon.0 + pcon.0 + 1 }, ); let handle = std::thread::spawn({ let (vcontext, _) = c.get_context().unwrap(); move || match vcontext.upgrade() { Some(vc) => vc.lock().unwrap().0 += 1, None => {} } }); if let Status::Completed(val) = futures::executor::block_on(c) { assert_ne!(val, 6); } else { assert!(true); } handle.join().unwrap(); }
function_block-full_function
[]
Rust
src/protocol/flv.rs
nintha/river
56a63312dd7fef48f73dbd3985022604c739ba40
use byteorder::{BigEndian, ByteOrder}; use crate::protocol::rtmp::{ChunkMessageType, RtmpMessage}; use crate::util::spawn_and_log_error; use smol::channel::Receiver; use std::convert::TryFrom; use crate::rtmp_server::eventbus_map; use chrono::Local; use smol::io::AsyncWriteExt; use std::time::{Duration, Instant}; pub const FLV_HEADER_WITH_TAG0: [u8; 13] = [ 0x46, 0x4c, 0x56, 0x01, 0x05, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, ]; pub const FLV_HEADER_ONLY_VIDEO_WITH_TAG0: [u8; 13] = [ 0x46, 0x4c, 0x56, 0x01, 0x01, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, ]; pub struct FlvTag { raw_data: Vec<u8>, } #[allow(unused)] impl FlvTag { pub fn tag_type(&self) -> u8 { self.raw_data[0] } pub fn data_size(&self) -> u32 { BigEndian::read_u24(&self.raw_data[1..4]) } pub fn timestamp(&self) -> u32 { let timestamp_u24 = BigEndian::read_u24(&self.raw_data[4..7]); timestamp_u24 | (self.raw_data[7] as u32) << 24 } pub fn body(&self) -> &[u8] { &self.raw_data[11..] } } impl TryFrom<RtmpMessage> for FlvTag { type Error = anyhow::Error; fn try_from(mut msg: RtmpMessage) -> Result<Self, Self::Error> { let mut raw_data = vec![]; match msg.header.message_type { ChunkMessageType::AudioMessage => raw_data.push(0x08), ChunkMessageType::VideoMessage => raw_data.push(0x09), _ => Err(anyhow::anyhow!( "[FlvTag] invalid message type, {:?}", msg.header.message_type ))?, } raw_data.extend_from_slice(&(msg.body.len() as u32).to_be_bytes()[1..4]); raw_data.extend_from_slice(&(msg.header.timestamp & 0xFFFFFF).to_be_bytes()[1..4]); raw_data.push((msg.header.timestamp >> 24) as u8); raw_data.extend_from_slice(&0u32.to_be_bytes()[1..4]); raw_data.append(&mut msg.body); Ok(FlvTag { raw_data }) } } impl AsRef<[u8]> for FlvTag { fn as_ref(&self) -> &[u8] { self.raw_data.as_ref() } } #[allow(unused)] pub fn save_flv_background(stream_name: &str, peer_addr: String) { if let Some(eventbus) = eventbus_map().get(stream_name) { let flv_rx = eventbus.register_receiver(); spawn_and_log_error(handle_flv_rx(flv_rx, stream_name.to_owned(), peer_addr)); } } async fn handle_flv_rx( flv_rx: Receiver<RtmpMessage>, stream_name: String, peer_addr: String, ) -> anyhow::Result<()> { let tmp_dir = "tmp"; if smol::fs::read_dir(tmp_dir).await.is_err() { smol::fs::create_dir_all(tmp_dir).await?; } let mut file = smol::fs::OpenOptions::new() .create(true) .write(true) .truncate(true) .open("tmp/output.flv") .await?; file.write_all(&FLV_HEADER_WITH_TAG0).await?; let ctx_begin_timestamp = Local::now().timestamp_millis(); let mut last_flush_time = Instant::now(); let min_flush_duration = Duration::from_secs(2); while let Ok(mut msg) = flv_rx.recv().await { msg.header.timestamp = (Local::now().timestamp_millis() - ctx_begin_timestamp) as u32; let flv_tag = FlvTag::try_from(msg)?; file.write_all(flv_tag.as_ref()).await?; file.write_all(&(flv_tag.as_ref().len() as u32).to_be_bytes()) .await?; if last_flush_time.elapsed() > min_flush_duration { last_flush_time = Instant::now(); file.flush().await? } } log::warn!("[peer={}][handle_flv_rx] closed, stream_name={}", peer_addr, stream_name); Ok(()) }
use byteorder::{BigEndian, ByteOrder}; use crate::protocol::rtmp::{ChunkMessageType, RtmpMessage}; use crate::util::spawn_and_log_error; use smol::channel::Receiver; use std::convert::TryFrom; use crate::rtmp_server::eventbus_map; use chrono::Local; use smol::io::AsyncWriteExt; use std::time::{Duration, Instant}; pub const FLV_HEADER_WITH_TAG0: [u8; 13] = [ 0x46, 0x4c, 0x56, 0x01, 0x05, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, ]; pub const FLV_HEADER_ONLY_VIDEO_WITH_TAG0: [u8; 13] = [ 0x46, 0x4c, 0x56, 0x01, 0x01, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, ]; pub struct FlvTag { raw_data: Vec<u8>, } #[allow(unused)] impl FlvTag { pub fn tag_type(&self) -> u8 { self.raw_data[0] }
file.write_all(&FLV_HEADER_WITH_TAG0).await?; let ctx_begin_timestamp = Local::now().timestamp_millis(); let mut last_flush_time = Instant::now(); let min_flush_duration = Duration::from_secs(2); while let Ok(mut msg) = flv_rx.recv().await { msg.header.timestamp = (Local::now().timestamp_millis() - ctx_begin_timestamp) as u32; let flv_tag = FlvTag::try_from(msg)?; file.write_all(flv_tag.as_ref()).await?; file.write_all(&(flv_tag.as_ref().len() as u32).to_be_bytes()) .await?; if last_flush_time.elapsed() > min_flush_duration { last_flush_time = Instant::now(); file.flush().await? } } log::warn!("[peer={}][handle_flv_rx] closed, stream_name={}", peer_addr, stream_name); Ok(()) }
pub fn data_size(&self) -> u32 { BigEndian::read_u24(&self.raw_data[1..4]) } pub fn timestamp(&self) -> u32 { let timestamp_u24 = BigEndian::read_u24(&self.raw_data[4..7]); timestamp_u24 | (self.raw_data[7] as u32) << 24 } pub fn body(&self) -> &[u8] { &self.raw_data[11..] } } impl TryFrom<RtmpMessage> for FlvTag { type Error = anyhow::Error; fn try_from(mut msg: RtmpMessage) -> Result<Self, Self::Error> { let mut raw_data = vec![]; match msg.header.message_type { ChunkMessageType::AudioMessage => raw_data.push(0x08), ChunkMessageType::VideoMessage => raw_data.push(0x09), _ => Err(anyhow::anyhow!( "[FlvTag] invalid message type, {:?}", msg.header.message_type ))?, } raw_data.extend_from_slice(&(msg.body.len() as u32).to_be_bytes()[1..4]); raw_data.extend_from_slice(&(msg.header.timestamp & 0xFFFFFF).to_be_bytes()[1..4]); raw_data.push((msg.header.timestamp >> 24) as u8); raw_data.extend_from_slice(&0u32.to_be_bytes()[1..4]); raw_data.append(&mut msg.body); Ok(FlvTag { raw_data }) } } impl AsRef<[u8]> for FlvTag { fn as_ref(&self) -> &[u8] { self.raw_data.as_ref() } } #[allow(unused)] pub fn save_flv_background(stream_name: &str, peer_addr: String) { if let Some(eventbus) = eventbus_map().get(stream_name) { let flv_rx = eventbus.register_receiver(); spawn_and_log_error(handle_flv_rx(flv_rx, stream_name.to_owned(), peer_addr)); } } async fn handle_flv_rx( flv_rx: Receiver<RtmpMessage>, stream_name: String, peer_addr: String, ) -> anyhow::Result<()> { let tmp_dir = "tmp"; if smol::fs::read_dir(tmp_dir).await.is_err() { smol::fs::create_dir_all(tmp_dir).await?; } let mut file = smol::fs::OpenOptions::new() .create(true) .write(true) .truncate(true) .open("tmp/output.flv") .await?;
random
[ { "content": "pub fn print_hex(bytes: &[u8]) {\n\n println!(\"{}\", bytes_hex_format(bytes));\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 0, "score": 112056.65758520142 }, { "content": "pub fn bytes_hex_format(bytes: &[u8]) -> String {\n\n const COLUMN: usize = 16;\n\n const COL_SPACE: &str = \" \";\n\n let mut text = String::new();\n\n let mut i = 0;\n\n let mut arr: [char; COLUMN] = ['.'; COLUMN];\n\n for byte in bytes {\n\n text += &format!(\"{:02X}\", byte);\n\n if byte.is_ascii_graphic() {\n\n arr[i % COLUMN] = byte.clone() as char;\n\n } else {\n\n arr[i % COLUMN] = '.';\n\n }\n\n text += &format!(\" \");\n\n i += 1;\n\n // 每8列多一个空格\n\n if i % 8 == 0 {\n\n text += COL_SPACE;\n\n }\n\n if i % COLUMN == 0 {\n", "file_path": "src/util.rs", "rank": 1, "score": 104229.36682933194 }, { "content": "/// 生成随机字节数组\n\npub fn gen_random_bytes(len: u32) -> Vec<u8> {\n\n let mut rng = rand::thread_rng();\n\n let mut vec = Vec::new();\n\n for _ in 0..len {\n\n vec.push(rng.gen());\n\n }\n\n vec\n\n}\n", "file_path": "src/util.rs", "rank": 2, "score": 99697.39447803528 }, { "content": "#[allow(unused)]\n\npub fn handle_video_data(bytes: &[u8], ctx: &RtmpContext) {\n\n let frame_type = bytes[0];\n\n let mut read_index = 1;\n\n let acv_packet_type = bytes[read_index];\n\n read_index += 1;\n\n\n\n // AVC时,全0,无意义(作业时间)\n\n let _composition_time_offset = &bytes[read_index..read_index + 3];\n\n read_index += 3;\n\n\n\n log::debug!(\n\n \"[peer={}] video frame type = {:#04X}, acv_packet_type={:#04X}\",\n\n &ctx.peer_addr,\n\n frame_type,\n\n acv_packet_type\n\n );\n\n\n\n // AVCDecoderConfigurationRecord(AVC sequence header)\n\n if acv_packet_type == 0 {\n\n // let config_version = &bytes[read_index];\n", "file_path": "src/protocol/h264.rs", "rank": 3, "score": 95330.82963983754 }, { "content": "/// 从字节数组中读取全部的AMF值\n\npub fn read_all_amf_value(bytes: &[u8]) -> Option<Vec<Value>> {\n\n let mut read_num = 0;\n\n let mut list = Vec::new();\n\n\n\n loop {\n\n if let Ok(v) = amf::amf0::Value::read_from(&mut &bytes[read_num..]) {\n\n let len = calc_amf_byte_len(&v);\n\n read_num += len;\n\n list.push(v);\n\n\n\n if read_num >= bytes.len() {\n\n break;\n\n }\n\n } else {\n\n return None;\n\n }\n\n }\n\n Some(list)\n\n}\n", "file_path": "src/protocol/rtmp.rs", "rank": 4, "score": 93451.95669963057 }, { "content": "pub fn init_logger() {\n\n let env = env_logger::Env::default().filter_or(env_logger::DEFAULT_FILTER_ENV, \"info\");\n\n // 设置日志打印格式\n\n env_logger::Builder::from_env(env)\n\n .format(|buf, record| {\n\n writeln!(\n\n buf,\n\n \"{} {} - {}\",\n\n Local::now().format(\"%Y-%m-%d %H:%M:%S%.3f\"),\n\n buf.default_styled_level(record.level()),\n\n &record.args()\n\n )\n\n })\n\n .init();\n\n log::info!(\"env_logger initialized.\");\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 5, "score": 81388.48118946911 }, { "content": "/// movie data\n\nfn mdat(data: &[u8]) -> Vec<u8> {\n\n mp4_box(b\"mdat\", vec![data])\n\n}\n\n\n", "file_path": "src/protocol/fmp4.rs", "rank": 6, "score": 79305.09397524307 }, { "content": "/// AVCConfigurationBox\n\nfn avcc(track: &Track, sps: &[u8], pps: &[u8]) -> Vec<u8> {\n\n let mut bytes = vec![\n\n 0x01, // version\n\n sps[3], // profile\n\n sps[4], // profile compat\n\n sps[5], // level\n\n 0xFC | 3, // lengthSizeMinusOne, hard-coded to 4 bytes\n\n 0xE0 | track.sps_list.len() as u8, // 3bit reserved (111) + numOfSequenceParameterSets\n\n ];\n\n bytes.extend_from_slice(sps);\n\n bytes.push(track.pps_list.len() as u8);\n\n bytes.extend_from_slice(pps);\n\n\n\n mp4_box(b\"avcC\", vec![&bytes])\n\n}\n\n\n", "file_path": "src/protocol/fmp4.rs", "rank": 7, "score": 76681.25164262221 }, { "content": "fn mp4_box(box_type: &[u8; 4], payloads: Vec<&[u8]>) -> Vec<u8> {\n\n let size: u32 = 8 + payloads.iter().map(|x| x.len() as u32).sum::<u32>();\n\n let mut buffer = Vec::with_capacity(size as usize);\n\n buffer.extend_from_slice(&size.to_be_bytes());\n\n buffer.extend_from_slice(box_type);\n\n\n\n for p in payloads {\n\n buffer.extend_from_slice(p);\n\n }\n\n\n\n buffer\n\n}\n\n\n", "file_path": "src/protocol/fmp4.rs", "rank": 8, "score": 75424.41442358814 }, { "content": "/// file type\n\nfn ftyp() -> Vec<u8> {\n\n const MAJOR_BRAND: [u8; 4] = *b\"isom\";\n\n const MINOR_VERSION: [u8; 4] = [0, 0, 0, 1];\n\n const AVC_BRAND: [u8; 4] = *b\"avc1\";\n\n\n\n mp4_box(b\"ftyp\", vec![&MAJOR_BRAND, &MINOR_VERSION, &MAJOR_BRAND, &AVC_BRAND])\n\n}\n\n\n", "file_path": "src/protocol/fmp4.rs", "rank": 9, "score": 72956.73369732 }, { "content": "fn hdlr() -> Vec<u8> {\n\n const VIDEO_HDLR: [u8; 37] = [\n\n 0x00, // version 0\n\n 0x00, 0x00, 0x00, // flags\n\n 0x00, 0x00, 0x00, 0x00, // pre_defined\n\n 0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'\n\n 0x00, 0x00, 0x00, 0x00, // reserved\n\n 0x00, 0x00, 0x00, 0x00, // reserved\n\n 0x00, 0x00, 0x00, 0x00, // reserved\n\n 0x56, 0x69, 0x64, 0x65,\n\n 0x6f, 0x48, 0x61, 0x6e,\n\n 0x64, 0x6c, 0x65, 0x72, 0x00, // name: 'VideoHandler'\n\n ];\n\n mp4_box(b\"hdlr\", vec![&VIDEO_HDLR])\n\n}\n\n\n", "file_path": "src/protocol/fmp4.rs", "rank": 10, "score": 72956.73369732 }, { "content": "fn btrt() -> Vec<u8> {\n\n const BTRT: [u8; 12] = [\n\n 0x00, 0x1c, 0x9c, 0x80, // bufferSizeDB\n\n 0x00, 0x2d, 0xc6, 0xc0, // maxBitrate\n\n 0x00, 0x2d, 0xc6, 0xc0\n\n ];\n\n mp4_box(b\"btrt\", vec![&BTRT])\n\n}\n\n\n", "file_path": "src/protocol/fmp4.rs", "rank": 11, "score": 72956.73369732 }, { "content": "fn bool2u8(v: bool) -> u8 {\n\n if v { 0x01 } else { 0x00 }\n\n}", "file_path": "src/protocol/aac.rs", "rank": 12, "score": 71490.7276855307 }, { "content": "fn tkhd(track: &Track) -> Vec<u8> {\n\n let bytes = vec![\n\n 0x00, // version 0\n\n 0x00, 0x00, 0x07, // flags\n\n 0x00, 0x00, 0x00, 0x00, // creation_time\n\n 0x00, 0x00, 0x00, 0x00, // modification_time\n\n (track.id >> 24) as u8,\n\n (track.id >> 16) as u8,\n\n (track.id >> 8) as u8,\n\n track.id as u8, // track_ID\n\n 0x00, 0x00, 0x00, 0x00, // reserved\n\n (track.duration >> 24) as u8,\n\n (track.duration >> 16) as u8,\n\n (track.duration >> 8) as u8,\n\n track.duration as u8, // duration\n\n 0x00, 0x00, 0x00, 0x00,\n\n 0x00, 0x00, 0x00, 0x00, // reserved\n\n 0x00, 0x00, // layer\n\n 0x00, 0x00, // alternate_group\n\n (track.volume >> 0) as u8, (((track.volume % 1) * 10) >> 0) as u8, // track volume\n", "file_path": "src/protocol/fmp4.rs", "rank": 13, "score": 66285.29983909018 }, { "content": "/// movie extend\n\nfn mvex(tracks: &[Track]) -> Vec<u8> {\n\n let boxes = tracks.into_iter().map(|t| trex(t)).collect::<Vec<Vec<u8>>>();\n\n mp4_box(b\"mvex\", boxes.iter().map(AsRef::as_ref).collect())\n\n}\n\n\n", "file_path": "src/protocol/fmp4.rs", "rank": 14, "score": 66285.29983909018 }, { "content": "fn trak(track: &Track) -> Vec<u8> {\n\n mp4_box(b\"trak\", vec![&tkhd(&track), &mdia(&track)])\n\n}\n\n\n", "file_path": "src/protocol/fmp4.rs", "rank": 15, "score": 66285.29983909018 }, { "content": "fn trex(track: &Track) -> Vec<u8> {\n\n let bytes = [\n\n 0x00, // version 0\n\n 0x00, 0x00, 0x00, // flags\n\n (track.id >> 24) as u8,\n\n (track.id >> 16) as u8,\n\n (track.id >> 8) as u8,\n\n track.id as u8, // track_ID\n\n 0x00, 0x00, 0x00, 0x01, // default_sample_description_index\n\n 0x00, 0x00, 0x00, 0x00, // default_sample_duration\n\n 0x00, 0x00, 0x00, 0x00, // default_sample_size\n\n 0x00, 0x01, 0x00, 0x01, // default_sample_flags\n\n ];\n\n mp4_box(b\"trex\", vec![&bytes])\n\n}\n\n\n", "file_path": "src/protocol/fmp4.rs", "rank": 16, "score": 66285.29983909018 }, { "content": "fn stsd(track: &Track) -> Vec<u8> {\n\n const STSD: [u8; 8] = [\n\n 0x00, // version 0\n\n 0x00, 0x00, 0x00, // flags\n\n 0x00, 0x00, 0x00, 0x01\n\n ];\n\n mp4_box(b\"stsd\", vec![&STSD, &avc1(track)])\n\n}\n\n\n", "file_path": "src/protocol/fmp4.rs", "rank": 17, "score": 66285.29983909018 }, { "content": "fn sdtp(samples: &[Sample]) -> Vec<u8> {\n\n let mut buffer = Vec::with_capacity(samples.len() + 4);\n\n // leave the full box header (4 bytes) all zero\n\n buffer.extend_from_slice(&[0x00, 0x00, 0x00, 0x00, ]);\n\n\n\n for s in samples {\n\n buffer.push(s.flags.as_byte());\n\n }\n\n mp4_box(b\"sdtp\", vec![&buffer])\n\n}\n\n\n", "file_path": "src/protocol/fmp4.rs", "rank": 18, "score": 66285.29983909018 }, { "content": "fn minf(track: &Track) -> Vec<u8> {\n\n const VMHD: [u8; 12] = [\n\n 0x00, // version\n\n 0x00, 0x00, 0x01, // flags\n\n 0x00, 0x00, // graphicsmode\n\n 0x00, 0x00,\n\n 0x00, 0x00,\n\n 0x00, 0x00, // opcolor\n\n ];\n\n const DREF: [u8; 20] = [\n\n 0x00, // version 0\n\n 0x00, 0x00, 0x00, // flags\n\n 0x00, 0x00, 0x00, 0x01, // entry_count\n\n 0x00, 0x00, 0x00, 0x0c, // entry_size\n\n 0x75, 0x72, 0x6c, 0x20, // 'url' type\n\n 0x00, // version 0\n\n 0x00, 0x00, 0x01, // entry_flags\n\n ];\n\n let dinf = mp4_box(b\"dinf\", vec![&mp4_box(b\"dref\", vec![&DREF])]);\n\n mp4_box(b\"minf\", vec![&mp4_box(b\"vmhd\", vec![&VMHD]), &dinf, &stbl(&track)])\n\n}\n\n\n", "file_path": "src/protocol/fmp4.rs", "rank": 19, "score": 66285.29983909018 }, { "content": "fn mfhd(sn: u32) -> Vec<u8> {\n\n let bytes: [u8; 8] = [\n\n 0x00,\n\n 0x00, 0x00, 0x00, // flags\n\n (sn >> 24) as u8,\n\n (sn >> 16) as u8,\n\n (sn >> 8) as u8,\n\n sn as u8, // sequence_number\n\n ];\n\n mp4_box(b\"mfhd\", vec![&bytes])\n\n}\n\n\n", "file_path": "src/protocol/fmp4.rs", "rank": 20, "score": 66285.29983909018 }, { "content": "fn avc1(track: &Track) -> Vec<u8> {\n\n let mut sps = vec![];\n\n let mut pps = vec![];\n\n\n\n for item in &track.sps_list {\n\n let length = item.len() as u16;\n\n sps.extend_from_slice(&length.to_be_bytes());\n\n sps.extend_from_slice(&item);\n\n }\n\n\n\n for item in &track.pps_list {\n\n let length = item.len() as u16;\n\n pps.extend_from_slice(&length.to_be_bytes());\n\n pps.extend_from_slice(&item);\n\n }\n\n\n\n let width = track.width;\n\n let height = track.height;\n\n\n\n let bytes = vec![\n", "file_path": "src/protocol/fmp4.rs", "rank": 21, "score": 66285.29983909018 }, { "content": "fn stbl(track: &Track) -> Vec<u8> {\n\n const STCO: [u8; 8] = [\n\n 0x00, // version\n\n 0x00, 0x00, 0x00, // flags\n\n 0x00, 0x00, 0x00, 0x00, // entry_count\n\n ];\n\n const STTS: [u8; 8] = STCO;\n\n const STSC: [u8; 8] = STCO;\n\n const STSZ: [u8; 12] = [\n\n 0x00, // version\n\n 0x00, 0x00, 0x00, // flags\n\n 0x00, 0x00, 0x00, 0x00, // sample_size\n\n 0x00, 0x00, 0x00, 0x00, // sample_count\n\n ];\n\n\n\n mp4_box(b\"stbl\", vec![\n\n &stsd(track),\n\n &mp4_box(b\"stts\", vec![&STTS]),\n\n &mp4_box(b\"stsc\", vec![&STSC]),\n\n &mp4_box(b\"stsz\", vec![&STSZ]),\n\n &mp4_box(b\"stco\", vec![&STCO])\n\n ])\n\n}\n\n\n", "file_path": "src/protocol/fmp4.rs", "rank": 22, "score": 66285.29983909018 }, { "content": "fn mdia(track: &Track) -> Vec<u8> {\n\n mp4_box(b\"mdia\", vec![&mdhd(track.timescale, track.duration), &hdlr(), &minf(track)])\n\n}\n\n\n", "file_path": "src/protocol/fmp4.rs", "rank": 23, "score": 66285.29983909018 }, { "content": "/// 执行一个新协程,并且在错误时打印错误信息\n\npub fn spawn_and_log_error<F, E>(fut: F)\n\nwhere\n\n F: Future<Output = Result<(), E>> + Send + 'static,\n\n E: Debug,\n\n{\n\n smol::spawn(async move {\n\n if let Err(e) = fut.await {\n\n log::error!(\"spawn future error, {:?}\", e)\n\n }\n\n })\n\n .detach();\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 24, "score": 65348.56783070675 }, { "content": "/// 计算一个AMF值的字节长度\n\npub fn calc_amf_byte_len(v: &amf0::Value) -> usize {\n\n match v {\n\n Value::Number(_) => 9,\n\n Value::Boolean(_) => 2,\n\n Value::String(s) => (s.len() + 3),\n\n Value::Object { entries, .. } => {\n\n // marker and tail\n\n let mut len = 4;\n\n for en in entries {\n\n len += en.key.len() + 2;\n\n len += calc_amf_byte_len(&en.value);\n\n }\n\n len\n\n }\n\n Value::Null => 1,\n\n Value::Undefined => 1,\n\n Value::EcmaArray { entries } => {\n\n // marker and tail\n\n let mut len = 8;\n\n for en in entries {\n", "file_path": "src/protocol/rtmp.rs", "rank": 25, "score": 63840.9689572867 }, { "content": "#[allow(unused)]\n\npub fn save_fmp4_background(stream_name: &str, peer_addr: String) {\n\n if let Some(eventbus) = eventbus_map().get(stream_name) {\n\n log::warn!(\"[peer={}] save_fmp4_background, stream_name={}\", peer_addr, stream_name);\n\n let rx = eventbus.register_receiver();\n\n spawn_and_log_error(handle_fmp4_rx(rx, stream_name.to_owned(), peer_addr));\n\n }\n\n}\n\n\n\n/// Rtmp流输出到mp4文件\n\nasync fn handle_fmp4_rx(\n\n rx: Receiver<RtmpMessage>,\n\n stream_name: String,\n\n peer_addr: String,\n\n) -> anyhow::Result<()> {\n\n let tmp_dir = \"tmp\";\n\n if smol::fs::read_dir(tmp_dir).await.is_err() {\n\n smol::fs::create_dir_all(tmp_dir).await?;\n\n }\n\n\n\n let mut file = smol::fs::OpenOptions::new()\n", "file_path": "src/protocol/fmp4.rs", "rank": 26, "score": 61124.43341465822 }, { "content": "pub fn audio_header_map() -> &'static DashMap<String, RtmpMessage> {\n\n static INSTANCE: OnceCell<DashMap<String, RtmpMessage>> = OnceCell::new();\n\n INSTANCE.get_or_init(|| DashMap::new())\n\n}\n\n\n", "file_path": "src/rtmp_server.rs", "rank": 27, "score": 61124.43341465822 }, { "content": "pub fn video_header_map() -> &'static DashMap<String, RtmpMessage> {\n\n static INSTANCE: OnceCell<DashMap<String, RtmpMessage>> = OnceCell::new();\n\n INSTANCE.get_or_init(|| DashMap::new())\n\n}\n\n\n", "file_path": "src/rtmp_server.rs", "rank": 29, "score": 61124.43341465822 }, { "content": "fn mdhd(timescale: u32, duration: u32) -> Vec<u8> {\n\n let bytes = vec![\n\n 0x00, // version 0\n\n 0x00, 0x00, 0x00, // flags\n\n 0x00, 0x00, 0x00, 0x02, // creation_time\n\n 0x00, 0x00, 0x00, 0x03, // modification_time\n\n (timescale >> 24) as u8,\n\n (timescale >> 16) as u8,\n\n (timescale >> 8) as u8,\n\n timescale as u8, // timescale\n\n (duration >> 24) as u8,\n\n (duration >> 16) as u8,\n\n (duration >> 8) as u8,\n\n duration as u8, // duration\n\n 0x55, 0xc4, // 'und' language (undetermined)\n\n 0x00, 0x00,\n\n ];\n\n mp4_box(b\"mdhd\", vec![&bytes])\n\n}\n\n\n", "file_path": "src/protocol/fmp4.rs", "rank": 30, "score": 60767.359797980505 }, { "content": "fn mvhd(timescale: u32, duration: u32) -> Vec<u8> {\n\n let bytes = vec![\n\n 0x00, // version 0\n\n 0x00, 0x00, 0x00, // flags\n\n 0x00, 0x00, 0x00, 0x01, // creation_time\n\n 0x00, 0x00, 0x00, 0x02, // modification_time\n\n (timescale >> 24) as u8,\n\n (timescale >> 16) as u8,\n\n (timescale >> 8) as u8,\n\n timescale as u8, // timescale\n\n (duration >> 24) as u8,\n\n (duration >> 16) as u8,\n\n (duration >> 8) as u8,\n\n duration as u8, // duration\n\n 0x00, 0x01, 0x00, 0x00, // 1.0 rate\n\n 0x01, 0x00, // 1.0 volume\n\n 0x00, 0x00, // reserved\n\n 0x00, 0x00, 0x00, 0x00, // reserved\n\n 0x00, 0x00, 0x00, 0x00, // reserved\n\n 0x00, 0x01, 0x00, 0x00,\n", "file_path": "src/protocol/fmp4.rs", "rank": 31, "score": 60767.359797980505 }, { "content": "pub fn meta_data_map() -> &'static DashMap<String, RtmpMetaData> {\n\n static INSTANCE: OnceCell<DashMap<String, RtmpMetaData>> = OnceCell::new();\n\n INSTANCE.get_or_init(|| DashMap::new())\n\n}\n\n\n\n/// TCP 连接处理\n\npub async fn accept_loop(addr: &str) -> anyhow::Result<()> {\n\n let listener = TcpListener::bind(addr.clone()).await?;\n\n log::info!(\"RTMP Server is listening to {}\", addr);\n\n\n\n let mut incoming = listener.incoming();\n\n while let Some(stream) = incoming.next().await {\n\n let stream = stream?;\n\n log::info!(\"new connection: {}\", stream.peer_addr()?);\n\n spawn_and_log_error(connection_loop(stream));\n\n }\n\n Ok(())\n\n}\n\n\n\nasync fn connection_loop(stream: TcpStream) -> anyhow::Result<()> {\n", "file_path": "src/rtmp_server.rs", "rank": 32, "score": 59896.404991644005 }, { "content": "pub fn eventbus_map() -> &'static DashMap<String, EventBus<RtmpMessage>> {\n\n static INSTANCE: OnceCell<DashMap<String, EventBus<RtmpMessage>>> = OnceCell::new();\n\n INSTANCE.get_or_init(|| DashMap::new())\n\n}\n\n\n", "file_path": "src/rtmp_server.rs", "rank": 33, "score": 58703.40966071695 }, { "content": "fn trun(_track: &Track, offset: u32, samples: &[Sample]) -> Vec<u8> {\n\n let sample_count = samples.len() as u32;\n\n let data_offset = offset + 8 + 12 + 16 * sample_count;\n\n\n\n let mut buffer = vec![];\n\n buffer.push(0x00); // version 0\n\n buffer.extend_from_slice(&[0x00, 0x0F, 0x01]); // flags\n\n buffer.extend_from_slice(&sample_count.to_be_bytes());\n\n buffer.extend_from_slice(&data_offset.to_be_bytes());\n\n\n\n for s in samples {\n\n buffer.extend_from_slice(&s.duration.to_be_bytes());\n\n buffer.extend_from_slice(&s.size.to_be_bytes());\n\n buffer.extend_from_slice(&s.flags.as_four_byte());\n\n buffer.extend_from_slice(&s.cts.to_be_bytes());\n\n }\n\n\n\n mp4_box(b\"trun\", vec![&buffer])\n\n}\n\n\n", "file_path": "src/protocol/fmp4.rs", "rank": 34, "score": 56119.93941208118 }, { "content": "/// movie box\n\nfn moov(tracks: &[Track], duration: u32, timescale: u32) -> Vec<u8> {\n\n let boxes = tracks.iter().map(|t| trak(t)).collect::<Vec<Vec<u8>>>();\n\n let mvhd = mvhd(timescale, duration);\n\n let mvex = mvex(&tracks);\n\n\n\n let mut payloads: Vec<&[u8]> = vec![];\n\n payloads.push(&mvhd);\n\n boxes.iter().for_each(|x| payloads.push(x));\n\n payloads.push(&mvex);\n\n\n\n mp4_box(b\"moov\", payloads)\n\n}\n\n\n\n/// 后台保存FLV文件\n", "file_path": "src/protocol/fmp4.rs", "rank": 35, "score": 56119.93941208118 }, { "content": "#[derive(Clap, Debug)]\n\n#[clap(version = crate_version ! (), author = \"Ninthakeey <[email protected]>\")]\n\nstruct Opts {\n\n #[clap(long, default_value = \"0\", about = \"disabled if port is 0\")]\n\n http_flv_port: u16,\n\n #[clap(long, default_value = \"18000\", about = \"disabled if port is 0\")]\n\n http_player_port: u16,\n\n #[clap(long, default_value = \"18001\", about = \"disabled if port is 0\")]\n\n ws_h264_port: u16,\n\n #[clap(long, default_value = \"0\", about = \"disabled if port is 0\")]\n\n ws_fmp4_port: u16,\n\n #[clap(long, default_value = \"1935\")]\n\n rtmp_port: u16,\n\n}\n\n\n\n\n", "file_path": "src/main.rs", "rank": 36, "score": 54402.61857402894 }, { "content": "// 把RMTP流转换城MIX流,并保证首帧为关键帧\n\nfn rtmp_rx_into_mix_rx(rx: Receiver<RtmpMessage>, stream_name: String) -> impl Stream<Item=Mix> {\n\n stream::unfold((rx, false, stream_name), |(rx, first_key_frame, stream_name)| async move {\n\n while let Ok(msg) = rx.recv().await {\n\n let mixes = Mix::from_rtmp_message(&msg, &stream_name);\n\n if mixes.is_empty() {\n\n continue;\n\n }\n\n\n\n if first_key_frame {\n\n return Some((stream::iter(mixes), (rx, first_key_frame, stream_name)));\n\n }\n\n\n\n let mut mixes = mixes.into_iter().skip_while(|mix| !mix.is_key_frame()).collect::<Vec<Mix>>();\n\n\n\n // 消息堆积,丢弃视频非关键帧\n\n if rx.len() > 30 {\n\n mixes.retain(|x| x.is_audio() || x.is_key_frame());\n\n }\n\n\n\n if mixes.is_empty() {\n\n continue;\n\n }\n\n\n\n return Some((stream::iter(mixes), (rx, true, stream_name)));\n\n }\n\n None\n\n }).flatten()\n\n}\n\n\n", "file_path": "src/ws_h264.rs", "rank": 37, "score": 53205.989668288734 }, { "content": "fn traf(track: &Track, base_media_decode_time: u32, samples: &[Sample]) -> Vec<u8> {\n\n let sample_dependency_table = sdtp(samples);\n\n let id = track.id;\n\n\n\n let tfhd = {\n\n let bytes: [u8; 8] = [\n\n 0x00, // version 0\n\n 0x00, 0x00, 0x00, // flags\n\n (id >> 24) as u8,\n\n (id >> 16) as u8,\n\n (id >> 8) as u8,\n\n (id as u8), // track_ID\n\n ];\n\n mp4_box(b\"tfhd\", vec![&bytes])\n\n };\n\n\n\n let tfdt = {\n\n let bytes: [u8; 8] = [\n\n 0x00, // version 0\n\n 0x00, 0x00, 0x00, // flags\n", "file_path": "src/protocol/fmp4.rs", "rank": 38, "score": 52466.072545599476 }, { "content": "fn moof(sn: u32, base_media_decode_time: u32, track: &Track, samples: &[Sample]) -> Vec<u8> {\n\n mp4_box(b\"moof\", vec![&mfhd(sn), &traf(track, base_media_decode_time, samples)])\n\n}\n\n\n", "file_path": "src/protocol/fmp4.rs", "rank": 39, "score": 48926.805397118245 }, { "content": "fn main() -> anyhow::Result<()> {\n\n util::init_logger();\n\n\n\n let opts: Opts = Opts::parse();\n\n log::info!(\"{:?}\", &opts);\n\n\n\n let player_html = include_str!(\"../static/player.html\");\n\n let player_html = player_html.replace(\"{/*$INJECTED_CONTEXT*/}\", &format!(\"{{port: {}}}\", opts.ws_h264_port));\n\n\n\n if opts.http_player_port > 0 {\n\n spawn_and_log_error(http_player::run_server(format!(\"0.0.0.0:{}\", opts.http_player_port), player_html));\n\n }\n\n if opts.http_flv_port > 0 {\n\n spawn_and_log_error(http_flv::run_server(format!(\"0.0.0.0:{}\", opts.http_flv_port)));\n\n }\n\n if opts.ws_h264_port > 0 {\n\n spawn_and_log_error(ws_h264::run_server(format!(\"0.0.0.0:{}\", opts.ws_h264_port)));\n\n }\n\n if opts.ws_fmp4_port > 0 {\n\n spawn_and_log_error(ws_fmp4::run_server(format!(\"0.0.0.0:{}\", opts.ws_fmp4_port)));\n\n }\n\n smol::block_on(accept_loop(&format!(\"0.0.0.0:{}\", opts.rtmp_port)))\n\n}\n", "file_path": "src/main.rs", "rank": 40, "score": 33443.9880468194 }, { "content": "fn get_path(req: &str) -> Option<&str> {\n\n let first_line = req.lines().next().unwrap_or_default();\n\n if first_line.starts_with(\"GET\") {\n\n return first_line.split_whitespace().skip(1).next();\n\n }\n\n None\n\n}\n\n\n\nasync fn write_chunk(stream: &mut TcpStream, bytes: &[u8]) -> anyhow::Result<()> {\n\n stream.write_all(format!(\"{:X}\\r\\n\", bytes.len()).as_bytes()).await?;\n\n stream.write_all(bytes).await?;\n\n stream.write_all(b\"\\r\\n\").await?;\n\n stream.flush().await?;\n\n Ok(())\n\n}", "file_path": "src/http_flv.rs", "rank": 41, "score": 28627.311703449173 }, { "content": "use byteorder::{BigEndian, ByteOrder};\n\n\n\nuse crate::protocol::rtmp::{RtmpContext, RtmpMessage, ChunkMessageType};\n\n\n\n/// H264编码数据存储或传输的基本单元\n\npub struct Nalu {\n\n inner: Vec<u8>,\n\n pub is_key_frame: bool,\n\n}\n\n\n\nimpl Nalu {\n\n pub const UNIT_TYPE_SPS: u8 = 7;\n\n pub const UNIT_TYPE_PPS: u8 = 8;\n\n\n\n /// RtmpMessage to Nalus\n\n pub fn from_rtmp_message(msg: &RtmpMessage) -> Vec<Nalu> {\n\n if msg.header.message_type != ChunkMessageType::VideoMessage {\n\n return vec![];\n\n }\n\n\n", "file_path": "src/protocol/h264.rs", "rank": 42, "score": 12.378799131203225 }, { "content": "use std::{u32, vec};\n\nuse crate::rtmp_server::{eventbus_map, meta_data_map, video_header_map};\n\nuse crate::util::spawn_and_log_error;\n\nuse smol::channel::Receiver;\n\nuse crate::protocol::rtmp::RtmpMessage;\n\nuse crate::protocol::h264::Nalu;\n\nuse smol::io::AsyncWriteExt;\n\n\n\n/// fps = timescale / duration\n\n#[derive(Clone)]\n\npub struct Track {\n\n pub id: u32,\n\n pub duration: u32,\n\n pub timescale: u32,\n\n pub width: u16,\n\n pub height: u16,\n\n pub volume: u16,\n\n pub dts: u32,\n\n pub pps_list: Vec<Vec<u8>>,\n\n pub sps_list: Vec<Vec<u8>>,\n", "file_path": "src/protocol/fmp4.rs", "rank": 44, "score": 9.90381697485229 }, { "content": "}\n\n\n\nimpl Flags {\n\n pub fn as_byte(&self) -> u8 {\n\n self.depands_on << 4 | self.is_depended_on << 2 | self.has_redundancy as u8\n\n }\n\n\n\n /// in trun box\n\n pub fn as_four_byte(&self) -> [u8; 4] {\n\n [\n\n self.is_leading << 2 | self.depands_on,\n\n self.is_depended_on << 6 | self.has_redundancy << 6 | self.padding_value << 1 | self.is_non_sync,\n\n (self.degrad_prio >> 8) as u8,\n\n self.degrad_prio as u8,\n\n ]\n\n }\n\n}\n\n\n\npub struct Fmp4Encoder {\n\n track: Track,\n", "file_path": "src/protocol/fmp4.rs", "rank": 45, "score": 9.656486918016743 }, { "content": " /// earlier proprietary products; 4-31 are reserved for future\n\n /// implementations; and 32-255 are not allowed (to allow\n\n /// distinguishing RTMP from text-based protocols, which always start\n\n /// with a printable character). A server that does not recognize the\n\n /// client’s requested version SHOULD respond with 3. The client MAY\n\n /// choose to degrade to version 3, or to abandon the handshake.\n\n pub version: u8,\n\n}\n\n\n\nimpl Handshake0 {\n\n pub const S0_V3: Handshake0 = Handshake0 { version: 3 };\n\n pub fn to_bytes(&self) -> Vec<u8> {\n\n vec![self.version.to_owned()]\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Handshake1 {\n\n /// Time (4 bytes): This field contains a timestamp, which SHOULD be\n\n /// used as the epoch for all future chunks sent from this endpoint.\n", "file_path": "src/protocol/rtmp.rs", "rank": 46, "score": 9.56961268678926 }, { "content": "\n\nimpl AsRef<[u8]> for AAC {\n\n fn as_ref(&self) -> &[u8] {\n\n &self.inner\n\n }\n\n}\n\n\n\npub struct ADTS {\n\n // 1 bit; 0: MPEG-4, 1: MPEG-2\n\n pub id: bool,\n\n /// 2 bits; 0-Main Profile, 1-Low Complexity, 2-Scalable Sampling Rate\n\n pub profile: u8,\n\n /// 4 bits; 15 is forbidden\n\n pub sampling_frequency_index: u8,\n\n /// set to 0 when encoding, ignore when decoding\n\n pub private_bit: bool,\n\n /// 3 bits;\n\n pub channel_configuration: u8,\n\n pub copyright_identification_bit: bool,\n\n pub copyright_identification_start: bool,\n", "file_path": "src/protocol/aac.rs", "rank": 47, "score": 9.438951853571922 }, { "content": " );\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct RtmpMessageHeader {\n\n /// chunk stream id\n\n pub csid: u8,\n\n pub timestamp: u32,\n\n pub message_length: u32,\n\n pub message_type_id: u8,\n\n pub message_type: ChunkMessageType,\n\n /// message stream id\n\n /// 0 => 信令,\n\n /// 1 => play 信令| publish 信令 | 音视频数据\n\n pub msid: u32,\n\n}\n\n\n\nimpl RtmpMessageHeader {\n", "file_path": "src/protocol/rtmp.rs", "rank": 48, "score": 9.030265450917256 }, { "content": " // 13 bits;\n\n pub aac_frame_length: u16,\n\n // 2 bits; 表示ADTS帧中有N + 1个AAC原始帧\n\n pub num_of_raw_data_blocks_in_frame: u8,\n\n // 音频数据\n\n pub raw_data: Vec<u8>,\n\n}\n\n\n\nimpl ADTS {\n\n pub const HEADER_LEN: u16 = 7;\n\n /// 12 bits\n\n pub const SYNC_WORD: u16 = 0xFFF;\n\n /// 2 bits\n\n pub const LAYER: u8 = 0;\n\n /// 1 bit\n\n pub const PROTECTION_ABSENT: bool = true;\n\n /// set to 0 when encoding, ignore when decoding,\n\n pub const ORIGINALITY: bool = false;\n\n /// set to 0 when encoding, ignore when decoding\n\n pub const HOME: bool = false;\n", "file_path": "src/protocol/aac.rs", "rank": 49, "score": 8.612732186110906 }, { "content": "use crossbeam_utils::atomic::AtomicCell;\n\nuse dashmap::DashMap;\n\nuse smol::channel::{Receiver, Sender};\n\n\n\npub struct EventBus<E> {\n\n label: String,\n\n incr_val: AtomicCell<u64>,\n\n tx_map: DashMap<u64, Sender<E>>,\n\n}\n\n\n\nimpl<E: 'static + Clone> EventBus<E> {\n\n pub fn with_label(label: String) -> Self {\n\n Self {\n\n label,\n\n incr_val: Default::default(),\n\n tx_map: Default::default(),\n\n }\n\n }\n\n\n\n pub async fn publish(&self, val: E) {\n", "file_path": "src/eventbus.rs", "rank": 50, "score": 8.245093822401628 }, { "content": " is_depended_on: 0,\n\n has_redundancy: 0,\n\n depands_on: if key_frame { 2 } else { 1 },\n\n padding_value: 0,\n\n is_non_sync: if key_frame { 0 } else { 1 },\n\n degrad_prio: 0,\n\n },\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Flags {\n\n pub is_leading: u8,\n\n pub is_depended_on: u8,\n\n pub has_redundancy: u8,\n\n pub depands_on: u8,\n\n pub padding_value: u8,\n\n pub is_non_sync: u8,\n\n pub degrad_prio: u16,\n", "file_path": "src/protocol/fmp4.rs", "rank": 52, "score": 7.796025591473077 }, { "content": "}\n\n\n\nimpl Handshake2 {\n\n pub const PACKET_LENGTH: u32 = 1536;\n\n pub fn to_bytes(&self) -> Vec<u8> {\n\n let mut v = Vec::new();\n\n v.append(self.time.to_be_bytes().to_vec().as_mut());\n\n v.append(self.time2.to_be_bytes().to_vec().as_mut());\n\n v.append(self.random_echo.clone().as_mut());\n\n v\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct RtmpContext {\n\n pub stream: TcpStream,\n\n pub ctx_begin_timestamp: i64,\n\n pub last_timestamp: u32,\n\n pub last_timestamp_delta: u32,\n\n pub last_message_length: u32,\n", "file_path": "src/protocol/rtmp.rs", "rank": 53, "score": 7.7795714548188535 }, { "content": " /// This may be 0, or some arbitrary value. To synchronize multiple\n\n /// chunkstreams, the endpoint may wish to send the current value of\n\n /// the other chunkstream’s timestamp.\n\n pub time: u32,\n\n /// Zero (4 bytes): This field MUST be all 0s.\n\n pub zero: u32,\n\n /// Random data (1528 bytes): This field can contain any arbitrary\n\n /// values. Since each endpoint has to distinguish between the\n\n /// response to the handshake it has initiated and the handshake\n\n /// initiated by its peer,this data SHOULD send something sufficiently\n\n /// random. But there is no need for cryptographically-secure\n\n /// randomness, or even dynamic values.\n\n pub random_data: Vec<u8>,\n\n}\n\n\n\nimpl Handshake1 {\n\n pub const PACKET_LENGTH: u32 = 1536;\n\n pub fn to_bytes(&self) -> Vec<u8> {\n\n let mut v = Vec::new();\n\n v.append(self.time.to_be_bytes().to_vec().as_mut());\n", "file_path": "src/protocol/rtmp.rs", "rank": 54, "score": 7.484636235686056 }, { "content": " pub last_message_type_id: u8,\n\n pub last_message_stream_id: u32,\n\n pub chunk_size: u32,\n\n pub remain_message_length: u32,\n\n pub recv_bytes_num: u32,\n\n pub peer_addr: String,\n\n pub stream_name: String,\n\n pub is_publisher: bool,\n\n}\n\n\n\nimpl RtmpContext {\n\n pub fn new(stream: TcpStream) -> Self {\n\n let peer_addr = stream\n\n .peer_addr()\n\n .map(|a| a.to_string())\n\n .unwrap_or_default();\n\n RtmpContext {\n\n stream,\n\n ctx_begin_timestamp: Local::now().timestamp_millis(),\n\n last_timestamp_delta: 0,\n", "file_path": "src/protocol/rtmp.rs", "rank": 55, "score": 7.414861954338013 }, { "content": " pub async fn peek_exact_from_peer(&mut self, bytes_num: u32) -> anyhow::Result<Vec<u8>> {\n\n let mut data = vec![0u8; bytes_num as usize];\n\n self.stream.peek(&mut data).await?;\n\n Ok(data)\n\n }\n\n\n\n pub async fn write_to_peer(&mut self, bytes: &[u8]) -> anyhow::Result<()> {\n\n self.stream.write_all(bytes).await?;\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Drop for RtmpContext {\n\n fn drop(&mut self) {\n\n if self.is_publisher {\n\n eventbus_map().remove(&self.stream_name);\n\n log::warn!(\n\n \"[{}][RtmpContext] remove eventbus, stream_name={}\",\n\n self.peer_addr,\n\n self.stream_name\n", "file_path": "src/protocol/rtmp.rs", "rank": 56, "score": 7.3994428048039405 }, { "content": "use std::fmt::{Debug, Formatter};\n\n\n\nuse amf::amf0;\n\nuse amf::amf0::Value;\n\nuse byteorder::{BigEndian, ByteOrder, WriteBytesExt};\n\nuse chrono::Local;\n\nuse num::FromPrimitive;\n\nuse smol::io::{AsyncReadExt, AsyncWriteExt};\n\nuse smol::net::TcpStream;\n\n\n\nuse crate::rtmp_server::eventbus_map;\n\nuse crate::util::bytes_hex_format;\n\nuse std::convert::TryFrom;\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Handshake0 {\n\n /// Version (8 bits): In C0, this field identifies the RTMP version\n\n /// requested by the client. In S0, this field identifies the RTMP\n\n /// version selected by the server. The version defined by this\n\n /// specification is 3. Values 0-2 are deprecated values used by\n", "file_path": "src/protocol/rtmp.rs", "rank": 57, "score": 7.360775994432323 }, { "content": "pub struct RtmpMessage {\n\n pub header: RtmpMessageHeader,\n\n pub body: Vec<u8>,\n\n pub chunk_count: u32,\n\n}\n\n\n\nimpl RtmpMessage {\n\n /// 读取完整消息\n\n pub async fn read_from(ctx: &mut RtmpContext) -> anyhow::Result<Self> {\n\n let mut chunk = RtmpMessage::read_chunk_from(ctx).await?;\n\n while ctx.remain_message_length > 0 {\n\n let mut remain_chunk = RtmpMessage::read_chunk_from(ctx).await?;\n\n chunk.body.append(&mut remain_chunk.body);\n\n chunk.chunk_count += 1;\n\n }\n\n\n\n Ok(chunk)\n\n }\n\n\n\n /// 读取一个消息分片\n", "file_path": "src/protocol/rtmp.rs", "rank": 58, "score": 7.2244646252482605 }, { "content": "\n\nimpl AsRef<[u8]> for Nalu {\n\n fn as_ref(&self) -> &[u8] {\n\n self.inner.as_ref()\n\n }\n\n}\n\n\n\n/// # VideoTagHeader\n\n///\n\n/// ## Frame Type\n\n///\n\n/// Type: UB [4]\n\n///\n\n/// Type of video frame. The following values are defined:\n\n/// 1 = key frame (for AVC, a seekable frame)\n\n/// 2 = inter frame (for AVC, a non-seekable frame)\n\n/// 3 = disposable inter frame (H.263 only)\n\n/// 4 = generated key frame (reserved for server use only)\n\n/// 5 = video info/command frame\n\n///\n", "file_path": "src/protocol/h264.rs", "rank": 59, "score": 7.202209146667875 }, { "content": " v.append(self.zero.to_be_bytes().to_vec().as_mut());\n\n v.append(self.random_data.clone().as_mut());\n\n v\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Handshake2 {\n\n /// Time (4 bytes): This field MUST contain the timestamp sent by the\n\n /// peer in S1 (for C2) or C1 (for S2).\n\n pub time: u32,\n\n /// Time2 (4 bytes): This field MUST contain the timestamp at which the\n\n /// previous packet(s1 or c1) sent by the peer was read.\n\n pub time2: u32,\n\n /// Random echo (1528 bytes): This field MUST contain the random data\n\n /// field sent by the peer in S1 (for C2) or S2 (for C1). Either peer\n\n /// can use the time and time2 fields together with the current\n\n /// timestamp as a quick estimate of the bandwidth and/or latency of\n\n /// the connection, but this is unlikely to be useful.\n\n pub random_echo: Vec<u8>,\n", "file_path": "src/protocol/rtmp.rs", "rank": 60, "score": 7.109937246170218 }, { "content": " sn: u32,\n\n}\n\n\n\nimpl Fmp4Encoder {\n\n pub fn new(track: Track) -> Self {\n\n Self {\n\n track,\n\n sn: 0,\n\n }\n\n }\n\n\n\n pub fn init_segment(&self) -> Vec<u8> {\n\n let mut ftyp = ftyp();\n\n let mut movie = moov(&vec![self.track.clone()], Track::DEFAULT_TIMESCALE, self.track.timescale);\n\n let total_len = ftyp.len() + movie.len();\n\n\n\n let mut buffer = Vec::with_capacity(total_len);\n\n buffer.append(&mut ftyp);\n\n buffer.append(&mut movie);\n\n\n", "file_path": "src/protocol/fmp4.rs", "rank": 61, "score": 6.790130796118806 }, { "content": "#[macro_use]\n\nextern crate num_derive;\n\n\n\nmod eventbus;\n\npub mod http_flv;\n\npub mod http_player;\n\npub mod protocol;\n\npub mod rtmp_server;\n\npub mod util;\n\npub mod ws_h264;\n\npub mod ws_fmp4;", "file_path": "src/lib.rs", "rank": 63, "score": 6.459178905055262 }, { "content": " pub fn get_nal_ref_idc(&self) -> u8 {\n\n self.inner[0] >> 5\n\n }\n\n\n\n /// 帧类型\n\n #[allow(unused)]\n\n pub fn get_nal_unit_type(&self) -> u8 {\n\n self.inner[4] & 0x1F\n\n }\n\n\n\n #[allow(unused)]\n\n pub fn nalu_type_desc(&self) -> String {\n\n let priority: String = match self.get_nal_ref_idc() {\n\n 0 => \"DISPOSABLE\".into(),\n\n 1 => \"LOW\".into(),\n\n 2 => \"HIGH\".into(),\n\n 3 => \"HIGHEST\".into(),\n\n _ => \"UNKNOWN\".into(),\n\n };\n\n\n", "file_path": "src/protocol/h264.rs", "rank": 64, "score": 6.382891995484294 }, { "content": "}\n\n\n\nimpl Track {\n\n pub const DEFAULT_TIMESCALE: u32 = 1000_000;\n\n pub const DEFAULT_ID: u32 = 1;\n\n}\n\n\n\nimpl Default for Track {\n\n fn default() -> Self {\n\n Self {\n\n id: Track::DEFAULT_ID,\n\n duration: 0,\n\n timescale: Track::DEFAULT_TIMESCALE,\n\n width: 0,\n\n height: 0,\n\n volume: 0,\n\n dts: 0,\n\n pps_list: vec![],\n\n sps_list: vec![],\n\n }\n", "file_path": "src/protocol/fmp4.rs", "rank": 65, "score": 6.266847406900801 }, { "content": " }\n\n}\n\n\n\n\n\n#[derive(Clone)]\n\npub struct Sample {\n\n pub size: u32,\n\n pub duration: u32,\n\n pub cts: u32,\n\n pub flags: Flags,\n\n}\n\n\n\nimpl Sample {\n\n pub fn new(size: u32, duration: u32, cts: u32, key_frame: bool) -> Self {\n\n Self {\n\n size,\n\n duration,\n\n cts,\n\n flags: Flags {\n\n is_leading: 0,\n", "file_path": "src/protocol/fmp4.rs", "rank": 66, "score": 6.262147995954733 }, { "content": "use amf::amf0::Value;\n\nuse amf::Pair;\n\nuse byteorder::{BigEndian, ByteOrder};\n\nuse chrono::Local;\n\nuse dashmap::DashMap;\n\nuse once_cell::sync::OnceCell;\n\nuse smol::net::{TcpListener, TcpStream};\n\nuse smol::prelude::*;\n\n\n\nuse crate::eventbus::EventBus;\n\nuse crate::protocol::rtmp::{\n\n ChunkMessageType, Handshake0, Handshake1, Handshake2, RtmpContext, RtmpMessage, RtmpMetaData,\n\n};\n\nuse crate::util::{bytes_hex_format, gen_random_bytes, print_hex, spawn_and_log_error};\n\nuse std::convert::TryFrom;\n\nuse crate::protocol::fmp4::save_fmp4_background;\n\n\n", "file_path": "src/rtmp_server.rs", "rank": 67, "score": 5.915533588517919 }, { "content": "\n\n#[derive(Default, Clone)]\n\npub struct RtmpMetaData {\n\n pub width: f64,\n\n pub height: f64,\n\n pub video_codec_id: String,\n\n pub video_data_rate: f64,\n\n pub audio_codec_id: String,\n\n pub audio_data_rate: f64,\n\n pub frame_rate: f64,\n\n pub duration: f64,\n\n pub begin_time: i64,\n\n}\n\n\n\nimpl TryFrom<&amf::amf0::Value> for RtmpMetaData {\n\n type Error = anyhow::Error;\n\n\n\n fn try_from(value: &amf::amf0::Value) -> Result<Self, Self::Error> {\n\n let mut meta_data = RtmpMetaData::default();\n\n if let Value::EcmaArray { entries } = value {\n", "file_path": "src/protocol/rtmp.rs", "rank": 69, "score": 5.615114909531811 }, { "content": "use async_tungstenite::tungstenite::handshake::server::{ErrorResponse, Request, Response};\n\nuse async_tungstenite::tungstenite::Message;\n\nuse crossbeam_utils::atomic::AtomicCell;\n\nuse futures::sink::SinkExt;\n\nuse futures::StreamExt;\n\nuse smol::net::{SocketAddr, TcpListener, TcpStream};\n\n\n\nuse crate::protocol::h264::Nalu;\n\nuse crate::rtmp_server::{eventbus_map, video_header_map, audio_header_map};\n\nuse crate::protocol::rtmp::{ChunkMessageType, RtmpMessage};\n\nuse smol::channel::Receiver;\n\nuse smol::stream::{Stream};\n\nuse smol::stream;\n\nuse crate::protocol::aac::{AAC, ADTS};\n\n\n\n#[allow(unused)]\n\npub async fn run_server(addr: String) -> anyhow::Result<()> {\n\n // Create the event loop and TCP listener we'll accept connections on.\n\n let try_socket = TcpListener::bind(&addr).await;\n\n let listener = try_socket.expect(\"Failed to bind\");\n", "file_path": "src/ws_h264.rs", "rank": 70, "score": 5.367635698113553 }, { "content": "///\n\n/// 其他帧 AAC raw data\n\n/// 1) 第1个byte : audioCodeId=10,如果是44KHZ、16bit、双声道,\n\n/// 第一个byte是0xAF。如果实际采样率不是5.5KHZ、11KHZ、22KHZ、44KHZ,\n\n/// 就选一个接近的。\n\n/// 2) 第2个byte : 0x01 表示是raw data\n\n/// 3) 第3byte开始 : 去掉前7个byte的AAC头之后的AAC数据。\n\npub struct AAC {\n\n inner: Vec<u8>,\n\n}\n\n\n\n#[allow(unused)]\n\nimpl AAC {\n\n pub fn from_rtmp_message(msg: &RtmpMessage, header: &RtmpMessage) -> Option<Self> {\n\n if msg.header.message_type != ChunkMessageType::AudioMessage {\n\n return None;\n\n }\n\n\n\n Some(Self {\n\n inner: msg.body.to_owned(),\n", "file_path": "src/protocol/aac.rs", "rank": 71, "score": 5.264838289844711 }, { "content": "use crate::util::spawn_and_log_error;\n\nuse smol::io::{AsyncReadExt, AsyncWriteExt};\n\nuse smol::net::{TcpListener, TcpStream};\n\nuse smol::stream::StreamExt;\n\nuse crate::rtmp_server::{eventbus_map, video_header_map};\n\nuse crate::protocol::flv::{FLV_HEADER_ONLY_VIDEO_WITH_TAG0};\n\nuse crate::protocol::flv::FlvTag;\n\nuse chrono::Local;\n\nuse std::convert::TryFrom;\n\nuse crate::protocol::rtmp::ChunkMessageType;\n\n\n\npub async fn run_server(addr: String) -> anyhow::Result<()> {\n\n // Open up a TCP connection and create a URL.\n\n let listener = TcpListener::bind(addr).await?;\n\n let addr = format!(\"http://{}\", listener.local_addr()?);\n\n log::info!(\"HTTP-FLV Server is listening to {}\", addr);\n\n\n\n // For each incoming TCP connection, spawn a task and call `accept`.\n\n let mut incoming = listener.incoming();\n\n while let Some(stream) = incoming.next().await {\n", "file_path": "src/http_flv.rs", "rank": 72, "score": 5.186353434289055 }, { "content": " }\n\n }\n\n\n\n pub fn to_bytes(&self) -> Vec<u8> {\n\n let mut data = vec![0u8; 7];\n\n data[0] = (ADTS::SYNC_WORD >> 4) as u8;\n\n data[1] = 0xF0 | bool2u8(self.id) << 3 | ADTS::LAYER << 1 | bool2u8(ADTS::PROTECTION_ABSENT);\n\n data[2] = self.profile << 6\n\n | self.sampling_frequency_index << 2\n\n | bool2u8(self.private_bit) << 1\n\n | self.channel_configuration >> 2;\n\n data[3] = self.channel_configuration << 6\n\n | bool2u8(ADTS::ORIGINALITY) << 5\n\n | bool2u8(ADTS::HOME) << 4\n\n | bool2u8(self.copyright_identification_bit) << 3\n\n | bool2u8(self.copyright_identification_start) << 2\n\n | (self.aac_frame_length >> 11) as u8;\n\n data[4] = (self.aac_frame_length >> 3) as u8;\n\n data[5] = (self.aac_frame_length as u8) << 5 | (ADTS::ADTS_BUFFER_FULLNESS >> 6) as u8;\n\n data[6] = (ADTS::ADTS_BUFFER_FULLNESS as u8) << 2 | self.num_of_raw_data_blocks_in_frame;\n\n\n\n data.extend_from_slice(&self.raw_data);\n\n data\n\n }\n\n}\n\n\n", "file_path": "src/protocol/aac.rs", "rank": 73, "score": 5.166710210994542 }, { "content": "use async_tungstenite::tungstenite::handshake::server::{ErrorResponse, Request, Response};\n\nuse async_tungstenite::tungstenite::Message;\n\nuse crossbeam_utils::atomic::AtomicCell;\n\nuse futures::sink::SinkExt;\n\nuse futures::StreamExt;\n\nuse smol::net::{SocketAddr, TcpListener, TcpStream};\n\n\n\nuse crate::protocol::h264::Nalu;\n\nuse crate::rtmp_server::{eventbus_map, video_header_map, meta_data_map};\n\nuse crate::protocol::fmp4::{Fmp4Encoder, Track};\n\n\n\n#[allow(unused)]\n\npub async fn run_server(addr: String) -> anyhow::Result<()> {\n\n // Create the event loop and TCP listener we'll accept connections on.\n\n let try_socket = TcpListener::bind(&addr).await;\n\n let listener = try_socket.expect(\"Failed to bind\");\n\n log::info!(\"Websocket Listening on: {}\", addr);\n\n\n\n // Let's spawn the handling of each connection in a separate task.\n\n while let Ok((stream, addr)) = listener.accept().await {\n", "file_path": "src/ws_fmp4.rs", "rank": 74, "score": 5.132411918080512 }, { "content": " buffer\n\n }\n\n\n\n pub fn wrap_frame(&mut self, data: &[u8], key_frame: bool) -> Vec<u8> {\n\n let sample = Sample::new(\n\n data.len() as u32,\n\n self.track.duration,\n\n 0,\n\n key_frame,\n\n );\n\n\n\n let mut buffer = moof(self.sn, self.track.dts, &self.track, &vec![sample]);\n\n buffer.append(&mut mdat(data));\n\n\n\n self.track.dts += self.track.duration;\n\n self.sn += 1;\n\n\n\n buffer\n\n }\n\n}\n\n\n", "file_path": "src/protocol/fmp4.rs", "rank": 75, "score": 5.051176581881183 }, { "content": " let t: String = match self.get_nal_unit_type() {\n\n 1 => \"SLICE\".into(),\n\n 2 => \"DPA\".into(),\n\n 3 => \"DPB\".into(),\n\n 4 => \"DPC\".into(),\n\n 5 => \"IDR\".into(),\n\n 6 => \"SEI\".into(),\n\n 7 => \"SPS\".into(),\n\n 8 => \"PPS\".into(),\n\n 9 => \"AUD\".into(),\n\n 10 => \"EOSEQ\".into(),\n\n 11 => \"EOSTREAM\".into(),\n\n 12 => \"FILL\".into(),\n\n _ => \"UNKNOWN\".into(),\n\n };\n\n\n\n format!(\"{}::{}\", priority, t)\n\n }\n\n\n\n pub fn to_avcc_format(&self) -> Vec<u8> {\n", "file_path": "src/protocol/h264.rs", "rank": 76, "score": 5.023973724450975 }, { "content": "use smol::io::{AsyncWriteExt, AsyncReadExt};\n\nuse smol::net::{TcpListener, TcpStream};\n\nuse smol::stream::StreamExt;\n\n\n\nuse crate::util::spawn_and_log_error;\n\n\n\npub async fn run_server(addr: String, player_html: String) -> anyhow::Result<()> {\n\n // Open up a TCP connection and create a URL.\n\n let listener = TcpListener::bind(addr).await?;\n\n let addr = format!(\"http://{}\", listener.local_addr()?);\n\n log::info!(\"HTTP-Player Server is listening to {}\", addr);\n\n\n\n // For each incoming TCP connection, spawn a task and call `accept`.\n\n let mut incoming = listener.incoming();\n\n while let Some(stream) = incoming.next().await {\n\n let stream = stream?;\n\n spawn_and_log_error(accept(stream, player_html.clone()));\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/http_player.rs", "rank": 77, "score": 4.729959852708208 }, { "content": " last_timestamp: 0,\n\n last_message_length: 0,\n\n last_message_type_id: 0,\n\n last_message_stream_id: 0,\n\n chunk_size: 128,\n\n remain_message_length: 0,\n\n recv_bytes_num: 0,\n\n peer_addr,\n\n stream_name: Default::default(),\n\n is_publisher: false,\n\n }\n\n }\n\n\n\n pub async fn read_exact_from_peer(&mut self, bytes_num: u32) -> anyhow::Result<Vec<u8>> {\n\n let mut data = vec![0u8; bytes_num as usize];\n\n AsyncReadExt::read_exact(&mut self.stream, &mut data).await?;\n\n Ok(data)\n\n }\n\n\n\n /// Receives data without removing it from the queue.\n", "file_path": "src/protocol/rtmp.rs", "rank": 78, "score": 4.293037526351767 }, { "content": "use chrono::Local;\n\nuse rand::Rng;\n\nuse std::fmt::Debug;\n\nuse std::future::Future;\n\nuse std::io::Write;\n\n\n", "file_path": "src/util.rs", "rank": 79, "score": 4.242111499761556 }, { "content": " pub fn to_bytes(&self) -> Vec<u8> {\n\n let enable_extend_timestamp_field = self.timestamp >= 0xFFFFFF;\n\n\n\n let mut rs = vec![self.csid];\n\n if enable_extend_timestamp_field {\n\n rs.write_u24::<BigEndian>(0xFFFFFF).unwrap();\n\n } else {\n\n rs.write_u24::<BigEndian>(self.timestamp).unwrap();\n\n }\n\n rs.write_u24::<BigEndian>(self.message_length).unwrap();\n\n rs.write_u8(self.message_type_id).unwrap();\n\n rs.write_u32::<BigEndian>(self.msid).unwrap();\n\n if enable_extend_timestamp_field {\n\n rs.write_u32::<BigEndian>(self.timestamp).unwrap();\n\n }\n\n rs\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n", "file_path": "src/protocol/rtmp.rs", "rank": 80, "score": 4.240276684823751 }, { "content": " /// 11bits; 0x7FF 说明是码率可变的码流\n\n pub const ADTS_BUFFER_FULLNESS: u16 = 0x7FF;\n\n\n\n /// `len` must be less than or equals to 2^13 - 7\n\n pub fn with_data(data: Vec<u8>) -> Self {\n\n if data.len() + ADTS::HEADER_LEN as usize > (2usize << 13) {\n\n unreachable!(\"ADTS len must be less than or equals to 2^13 - 7\");\n\n }\n\n\n\n Self {\n\n id: false,\n\n profile: 1,\n\n sampling_frequency_index: 4,\n\n private_bit: false,\n\n channel_configuration: 1,\n\n copyright_identification_bit: false,\n\n copyright_identification_start: false,\n\n aac_frame_length: data.len() as u16 + ADTS::HEADER_LEN,\n\n num_of_raw_data_blocks_in_frame: 0,\n\n raw_data: data,\n", "file_path": "src/protocol/aac.rs", "rank": 81, "score": 4.225661859705996 }, { "content": "pub mod flv;\n\npub mod rtmp;\n\npub mod h264;\n\npub mod aac;\n\npub mod fmp4;\n", "file_path": "src/protocol/mod.rs", "rank": 82, "score": 4.066858236450748 }, { "content": "use clap::crate_version;\n\nuse clap::Clap;\n\nuse river::{ws_h264, ws_fmp4, util, http_flv, http_player};\n\nuse river::rtmp_server::accept_loop;\n\nuse river::util::spawn_and_log_error;\n\n\n\n\n\n#[derive(Clap, Debug)]\n\n#[clap(version = crate_version ! (), author = \"Ninthakeey <[email protected]>\")]\n", "file_path": "src/main.rs", "rank": 83, "score": 3.924020051285481 }, { "content": " pub fn split_chunks_bytes(&self, chunk_size: u32) -> Vec<Vec<u8>> {\n\n let chunk_size = chunk_size as usize;\n\n let mut rs = vec![];\n\n\n\n let mut remain = self.body.clone();\n\n while remain.len() > chunk_size {\n\n let right = remain.split_off(chunk_size);\n\n rs.push(remain);\n\n remain = right;\n\n }\n\n rs.push(remain);\n\n\n\n // 添加type0头部\n\n for item in self.header.to_bytes().iter().rev() {\n\n (&mut rs[0]).insert(0, item.clone());\n\n }\n\n\n\n // 添加type3头部\n\n if rs.len() > 1 {\n\n let type3_fmt = 0xC0 | self.header.csid;\n", "file_path": "src/protocol/rtmp.rs", "rank": 84, "score": 3.8414323290036534 }, { "content": " let origin = self.as_ref();\n\n let mut bytes = vec![0x00, 0x00, 0x00, 0x00];\n\n for i in 4..origin.len() {\n\n // remove prevention byte\n\n // if origin[i - 2] == 0 && origin[i - 1] == 0 && origin[i] == 3 {\n\n // if i < origin.len() && [0u8, 1, 2, 3].contains(&origin[i + 1]) {\n\n // continue;\n\n // }\n\n // }\n\n\n\n bytes.push(origin[i]);\n\n }\n\n let len = (bytes.len() - 4) as u32;\n\n bytes[0] = (len >> 24) as u8;\n\n bytes[1] = (len >> 16) as u8;\n\n bytes[2] = (len >> 8) as u8;\n\n bytes[3] = len as u8;\n\n bytes\n\n }\n\n}\n", "file_path": "src/protocol/h264.rs", "rank": 85, "score": 3.658294471905507 }, { "content": " })\n\n }\n\n\n\n pub fn is_sequence_header(&self) -> bool {\n\n self.inner[1] == 0x00\n\n }\n\n\n\n pub fn is_raw_data(&self) -> bool {\n\n self.inner[1] != 0x00\n\n }\n\n\n\n /// raw_data -> ADTS\n\n pub fn to_adts(&self) -> Option<ADTS> {\n\n if self.is_raw_data() {\n\n Some(ADTS::with_data(self.inner[2..].to_vec()))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n", "file_path": "src/protocol/aac.rs", "rank": 86, "score": 3.5910986803297713 }, { "content": " .collect()\n\n } else {\n\n vec![]\n\n }\n\n }\n\n _ => vec![]\n\n }\n\n }\n\n #[allow(unused)]\n\n pub fn is_video(&self) -> bool {\n\n matches!(self, Mix::Video(_))\n\n }\n\n #[allow(unused)]\n\n pub fn is_audio(&self) -> bool {\n\n matches!(self, Mix::Audio(_))\n\n }\n\n\n\n pub fn is_key_frame(&self) -> bool {\n\n if let Mix::Video(nalu) = self {\n\n nalu.is_key_frame\n", "file_path": "src/ws_h264.rs", "rank": 87, "score": 3.537801481638823 }, { "content": " (base_media_decode_time >> 24) as u8,\n\n (base_media_decode_time >> 16) as u8,\n\n (base_media_decode_time >> 8) as u8,\n\n (base_media_decode_time as u8), // baseMediaDecodeTime\n\n ];\n\n mp4_box(b\"tfdt\", vec![&bytes])\n\n };\n\n\n\n let trun = trun(track, sample_dependency_table.len() as u32 +\n\n 16 + // tfhd\n\n 16 + // tfdt\n\n 8 + // traf header\n\n 16 + // mfhd\n\n 8 + // moof header\n\n 8, samples);\n\n\n\n mp4_box(b\"traf\", vec![&tfhd, &tfdt, &trun, &sample_dependency_table])\n\n}\n\n\n", "file_path": "src/protocol/fmp4.rs", "rank": 88, "score": 3.348685150927076 }, { "content": " 0x00, 0x00, // reserved\n\n 0x00, 0x01, 0x00, 0x00,\n\n 0x00, 0x00, 0x00, 0x00,\n\n 0x00, 0x00, 0x00, 0x00,\n\n 0x00, 0x00, 0x00, 0x00,\n\n 0x00, 0x01, 0x00, 0x00,\n\n 0x00, 0x00, 0x00, 0x00,\n\n 0x00, 0x00, 0x00, 0x00,\n\n 0x00, 0x00, 0x00, 0x00,\n\n 0x40, 0x00, 0x00, 0x00, // transformation: unity matrix\n\n (track.width >> 8) as u8,\n\n track.width as u8,\n\n 0x00, 0x00, // width\n\n (track.height >> 8) as u8,\n\n track.height as u8,\n\n 0x00, 0x00, // height\n\n ];\n\n mp4_box(b\"tkhd\", vec![&bytes])\n\n}\n\n\n", "file_path": "src/protocol/fmp4.rs", "rank": 89, "score": 3.2334523708054754 }, { "content": " 0x00, 0x00, 0x00, // reserved\n\n 0x00, 0x00, 0x00, // reserved\n\n 0x00, 0x01, // data_reference_index\n\n 0x00, 0x00, // pre_defined\n\n 0x00, 0x00, // reserved\n\n 0x00, 0x00, 0x00, 0x00,\n\n 0x00, 0x00, 0x00, 0x00,\n\n 0x00, 0x00, 0x00, 0x00, // pre_defined\n\n (width >> 8) as u8,\n\n width as u8, // width\n\n (height >> 8) as u8,\n\n height as u8, // height\n\n 0x00, 0x48, 0x00, 0x00, // horizresolution\n\n 0x00, 0x48, 0x00, 0x00, // vertresolution\n\n 0x00, 0x00, 0x00, 0x00, // reserved\n\n 0x00, 0x01, // frame_count\n\n 0x12,\n\n 0x62, 0x69, 0x6E, 0x65, // binelpro.ru\n\n 0x6C, 0x70, 0x72, 0x6F,\n\n 0x2E, 0x72, 0x75, 0x00,\n", "file_path": "src/protocol/fmp4.rs", "rank": 90, "score": 3.054751553335069 }, { "content": " for item in &mut rs[1..] {\n\n item.insert(0, type3_fmt);\n\n }\n\n }\n\n\n\n rs\n\n }\n\n}\n\n\n\nimpl Debug for RtmpMessage {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n write!(\n\n f,\n\n \"ChunkMessage {{\\nheader: {:?}\\nmessage type: {}\\nchunk count={}\\nbody:\\n{}}}\",\n\n self.header,\n\n self.message_type_desc(),\n\n self.chunk_count,\n\n bytes_hex_format(&self.body)\n\n )\n\n }\n", "file_path": "src/protocol/rtmp.rs", "rank": 91, "score": 2.416638960457098 }, { "content": " key: \"code\".to_owned(),\n\n value: amf::amf0::Value::String(\"NetStream.Play.Start\".to_owned()),\n\n },\n\n Pair {\n\n key: \"description\".to_owned(),\n\n value: amf::amf0::Value::String(\"Start live\".to_owned()),\n\n },\n\n ],\n\n }\n\n .write_to(&mut response_result)?;\n\n response_result[6] = (response_result.len() - 12) as u8;\n\n ctx.write_to_peer(response_result.as_ref()).await?;\n\n log::info!(\"[peer={}] S->C, Start play:\", ctx.peer_addr);\n\n print_hex(response_result.as_ref());\n\n }\n\n\n\n {\n\n let mut response_result: Vec<u8> = vec![\n\n 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0x01,\n\n ];\n", "file_path": "src/rtmp_server.rs", "rank": 92, "score": 2.401092558894587 }, { "content": " },\n\n ],\n\n }\n\n .write_to(&mut response_result)?;\n\n response_result[6] = (response_result.len() - 12) as u8;\n\n ctx.write_to_peer(response_result.as_ref()).await?;\n\n log::info!(\"[peer={}] S->C, Start publishing:\", ctx.peer_addr);\n\n print_hex(response_result.as_ref());\n\n\n\n Ok(())\n\n}\n\n\n\nasync fn response_play(ctx: &mut RtmpContext, stream_id: u32) -> anyhow::Result<()> {\n\n {\n\n let rs: Vec<u8> = vec![\n\n 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00,\n\n ];\n\n ctx.write_to_peer(rs.as_ref()).await?;\n\n log::info!(\n\n \"[peer={}] S->C, Stream Begin, streamId={}\",\n", "file_path": "src/rtmp_server.rs", "rank": 93, "score": 2.3648351898629816 }, { "content": " else if acv_packet_type == 1 {\n\n loop {\n\n if read_index >= bytes.len() {\n\n break;\n\n }\n\n let data_len = BigEndian::read_u32(&bytes[read_index..]);\n\n read_index += 4;\n\n let data = &bytes[read_index..(read_index + data_len as usize)];\n\n read_index += data_len as usize;\n\n // println!(\"NALU Type: {}, len={}\", nalu_type_desc(&data[0]), data_len);\n\n // println!(\"len={}, nalu data:\\n{}\", data_len, bytes_hex_format(data));\n\n\n\n let mut nalu_bytes: Vec<u8> = vec![0x00, 0x00, 0x00, 0x01];\n\n nalu_bytes.extend_from_slice(data);\n\n handle_nalu(nalu_bytes);\n\n }\n\n } else {\n\n unreachable!(\"unknown acv packet type\")\n\n };\n\n\n\n fn handle_nalu(nalu_bytes: Vec<u8>) {}\n\n}\n", "file_path": "src/protocol/h264.rs", "rank": 95, "score": 2.306779835397473 }, { "content": " } else {\n\n false\n\n }\n\n }\n\n\n\n fn to_bytes(&self) -> Vec<u8> {\n\n match self {\n\n Mix::Video(nalu) => {\n\n let mut bytes = vec![Mix::VIDEO_FLAG];\n\n bytes.extend_from_slice(nalu.as_ref());\n\n bytes\n\n }\n\n Mix::Audio(aac) => {\n\n let mut bytes = vec![Mix::AUDIO_FLAG];\n\n bytes.extend_from_slice(&aac.to_bytes());\n\n bytes\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ws_h264.rs", "rank": 96, "score": 2.276162232986616 }, { "content": " let mut nalu_bytes: Vec<u8> = vec![0x00, 0x00, 0x00, 0x01];\n\n nalu_bytes.extend_from_slice(data);\n\n nalus.push(Self { inner: nalu_bytes, is_key_frame });\n\n }\n\n let num_of_pps = &bytes[read_index] & 0x1F;\n\n read_index += 1;\n\n for _ in 0..num_of_pps as usize {\n\n let data_len = BigEndian::read_u16(&bytes[read_index..]);\n\n read_index += 2;\n\n let data = &bytes[read_index..(read_index + data_len as usize)];\n\n read_index += data_len as usize;\n\n\n\n let mut nalu_bytes: Vec<u8> = vec![0x00, 0x00, 0x00, 0x01];\n\n nalu_bytes.extend_from_slice(data);\n\n nalus.push(Self { inner: nalu_bytes, is_key_frame });\n\n }\n\n }\n\n // One or more NALUs (Full frames are required)\n\n else if acv_packet_type == 1 {\n\n loop {\n", "file_path": "src/protocol/h264.rs", "rank": 97, "score": 2.198820269585911 }, { "content": " let mut nalu_bytes: Vec<u8> = vec![0x00, 0x00, 0x00, 0x01];\n\n nalu_bytes.extend_from_slice(data);\n\n handle_nalu(nalu_bytes);\n\n }\n\n let num_of_pps = &bytes[read_index] & 0x1F;\n\n read_index += 1;\n\n // println!(\"pps num = {}\", num_of_pps);\n\n for _ in 0..num_of_pps as usize {\n\n let data_len = BigEndian::read_u16(&bytes[read_index..]);\n\n read_index += 2;\n\n let data = &bytes[read_index..(read_index + data_len as usize)];\n\n read_index += data_len as usize;\n\n // println!(\"len={}, pps data:\\n{}\", data_len, bytes_hex_format(data));\n\n\n\n let mut nalu_bytes: Vec<u8> = vec![0x00, 0x00, 0x00, 0x01];\n\n nalu_bytes.extend_from_slice(data);\n\n handle_nalu(nalu_bytes);\n\n }\n\n }\n\n // One or more NALUs (Full frames are required)\n", "file_path": "src/protocol/h264.rs", "rank": 98, "score": 2.1683756356572994 }, { "content": "\n\n for key in dropped_senders.iter() {\n\n self.tx_map.remove(key);\n\n log::info!(\"[EventBus][{}] remove receiver {}\", self.label, key);\n\n }\n\n }\n\n\n\n pub fn register_receiver(&self) -> Receiver<E> {\n\n let (tx, rx) = smol::channel::unbounded();\n\n\n\n let key = self.incr_val.fetch_add(1);\n\n self.tx_map.insert(key, tx);\n\n\n\n log::info!(\"[EventBus][{}] add receiver {}\", self.label, key);\n\n rx\n\n }\n\n}\n", "file_path": "src/eventbus.rs", "rank": 99, "score": 2.1056688648363693 } ]
Rust
src/third_party/alga/alga_transform.rs
dfarnham/nalgebra
79ef862fe9bd5f6d97a864c36274e0eb69468025
use alga::general::{ AbstractGroup, AbstractLoop, AbstractMagma, AbstractMonoid, AbstractQuasigroup, AbstractSemigroup, Identity, Multiplicative, RealField, TwoSidedInverse, }; use alga::linear::{ProjectiveTransformation, Transformation}; use crate::base::allocator::Allocator; use crate::base::dimension::{DimNameAdd, DimNameSum, U1}; use crate::base::{Const, DefaultAllocator, SVector}; use crate::geometry::{Point, SubTCategoryOf, TCategory, TProjective, Transform}; /* * * Algebraic structures. * */ impl<T: RealField + simba::scalar::RealField, C, const D: usize> Identity<Multiplicative> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, C: TCategory, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>>, { #[inline] fn identity() -> Self { Self::identity() } } impl<T: RealField + simba::scalar::RealField, C, const D: usize> TwoSidedInverse<Multiplicative> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, C: SubTCategoryOf<TProjective>, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>>, { #[inline] #[must_use = "Did you mean to use two_sided_inverse_mut()?"] fn two_sided_inverse(&self) -> Self { self.clone().inverse() } #[inline] fn two_sided_inverse_mut(&mut self) { self.inverse_mut() } } impl<T: RealField + simba::scalar::RealField, C, const D: usize> AbstractMagma<Multiplicative> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, C: TCategory, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>>, { #[inline] fn operate(&self, rhs: &Self) -> Self { self * rhs } } macro_rules! impl_multiplicative_structures( ($($marker: ident<$operator: ident>),* $(,)*) => {$( impl<T: RealField + simba::scalar::RealField, C, const D: usize> $marker<$operator> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, C: TCategory, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>> { } )*} ); macro_rules! impl_inversible_multiplicative_structures( ($($marker: ident<$operator: ident>),* $(,)*) => {$( impl<T: RealField + simba::scalar::RealField, C, const D: usize> $marker<$operator> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, C: SubTCategoryOf<TProjective>, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>> { } )*} ); impl_multiplicative_structures!( AbstractSemigroup<Multiplicative>, AbstractMonoid<Multiplicative>, ); impl_inversible_multiplicative_structures!( AbstractQuasigroup<Multiplicative>, AbstractLoop<Multiplicative>, AbstractGroup<Multiplicative> ); /* * * Transformation groups. * */ impl<T, C, const D: usize> Transformation<Point<T, D>> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, T: RealField + simba::scalar::RealField, C: TCategory, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>> + Allocator<T, DimNameSum<Const<D>, U1>>, { #[inline] fn transform_point(&self, pt: &Point<T, D>) -> Point<T, D> { self.transform_point(pt) } #[inline] fn transform_vector(&self, v: &SVector<T, D>) -> SVector<T, D> { self.transform_vector(v) } } impl<T, C, const D: usize> ProjectiveTransformation<Point<T, D>> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, T: RealField + simba::scalar::RealField, C: SubTCategoryOf<TProjective>, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>> + Allocator<T, DimNameSum<Const<D>, U1>>, { #[inline] fn inverse_transform_point(&self, pt: &Point<T, D>) -> Point<T, D> { self.inverse_transform_point(pt) } #[inline] fn inverse_transform_vector(&self, v: &SVector<T, D>) -> SVector<T, D> { self.inverse_transform_vector(v) } }
use alga::general::{ AbstractGroup, AbstractLoop, AbstractMagma, AbstractMonoid, AbstractQuasigroup, AbstractSemigroup, Identity, Multiplicative, RealField, TwoSidedInverse, }; use alga::linear::{ProjectiveTransformation, Transformation}; use crate::base::allocator::Allocator; use crate::base::dimension::{DimNameAdd, DimNameSum, U1}; use crate::base::{Const, DefaultAllocator, SVector}; use crate::geometry::{Point, SubTCategoryOf, TCategory, TProjective, Transform}; /* * * Algebraic structures. * */ impl<T: RealField + simba::scalar::RealField, C, const D: usize> Identity<Multiplicative> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, C: TCategory, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>>, { #[inline] fn identity() -> Self { Self::identity() } } impl<T: RealField + simba::scalar::RealField, C, const D: usize> TwoSidedInverse<Multiplicative> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, C: SubTCategoryOf<TProjective>, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>>, { #[inline] #[must_use = "Did you mean to use two_sided_inverse_mut()?"] fn two_sided_inverse(&self) -> Self { self.clone().inverse() } #[inline] fn two_sided_inverse_mut(&mut self) { self.inverse_mut() } } impl<T: RealField + simba::scalar::RealField, C, const D: usize> AbstractMagma<Multiplicative> for Transfo
SVector<T, D> { self.transform_vector(v) } } impl<T, C, const D: usize> ProjectiveTransformation<Point<T, D>> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, T: RealField + simba::scalar::RealField, C: SubTCategoryOf<TProjective>, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>> + Allocator<T, DimNameSum<Const<D>, U1>>, { #[inline] fn inverse_transform_point(&self, pt: &Point<T, D>) -> Point<T, D> { self.inverse_transform_point(pt) } #[inline] fn inverse_transform_vector(&self, v: &SVector<T, D>) -> SVector<T, D> { self.inverse_transform_vector(v) } }
rm<T, C, D> where Const<D>: DimNameAdd<U1>, C: TCategory, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>>, { #[inline] fn operate(&self, rhs: &Self) -> Self { self * rhs } } macro_rules! impl_multiplicative_structures( ($($marker: ident<$operator: ident>),* $(,)*) => {$( impl<T: RealField + simba::scalar::RealField, C, const D: usize> $marker<$operator> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, C: TCategory, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>> { } )*} ); macro_rules! impl_inversible_multiplicative_structures( ($($marker: ident<$operator: ident>),* $(,)*) => {$( impl<T: RealField + simba::scalar::RealField, C, const D: usize> $marker<$operator> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, C: SubTCategoryOf<TProjective>, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>> { } )*} ); impl_multiplicative_structures!( AbstractSemigroup<Multiplicative>, AbstractMonoid<Multiplicative>, ); impl_inversible_multiplicative_structures!( AbstractQuasigroup<Multiplicative>, AbstractLoop<Multiplicative>, AbstractGroup<Multiplicative> ); /* * * Transformation groups. * */ impl<T, C, const D: usize> Transformation<Point<T, D>> for Transform<T, C, D> where Const<D>: DimNameAdd<U1>, T: RealField + simba::scalar::RealField, C: TCategory, DefaultAllocator: Allocator<T, DimNameSum<Const<D>, U1>, DimNameSum<Const<D>, U1>> + Allocator<T, DimNameSum<Const<D>, U1>>, { #[inline] fn transform_point(&self, pt: &Point<T, D>) -> Point<T, D> { self.transform_point(pt) } #[inline] fn transform_vector(&self, v: &SVector<T, D>) ->
random
[]
Rust
src/ais/vdm_t14.rs
johann2/nmea-parser
0dd55af67546526399bbc026067e51da8ea3fdc1
/* Copyright 2020 Timo Saarinen Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ use super::*; #[derive(Default, Clone, Debug, PartialEq)] pub struct SafetyRelatedBroadcastMessage { pub own_vessel: bool, pub station: Station, pub mmsi: u32, pub text: String, } pub(crate) fn handle( bv: &BitVec, station: Station, own_vessel: bool, ) -> Result<ParsedMessage, ParseError> { Ok(ParsedMessage::SafetyRelatedBroadcastMessage( SafetyRelatedBroadcastMessage { own_vessel: { own_vessel }, station: { station }, mmsi: { pick_u64(&bv, 8, 30) as u32 }, text: { pick_string(&bv, 40, 161) }, }, )) } #[cfg(test)] mod test { use super::*; #[test] fn test_parse_vdm_type14() { let mut p = NmeaParser::new(); match p.parse_sentence("!AIVDM,1,1,,A,>5?Per18=HB1U:1@E=B0m<L,2*51") { Ok(ps) => { match ps { ParsedMessage::SafetyRelatedBroadcastMessage(srbm) => { assert_eq!(srbm.mmsi, 351809000); assert_eq!(srbm.text, "RCVD YR TEST MSG"); } ParsedMessage::Incomplete => { assert!(false); } _ => { assert!(false); } } } Err(e) => { assert_eq!(e.to_string(), "OK"); } } match p.parse_sentence("!AIVDM,1,1,,A,>3R1p10E3;;R0USCR0HO>0@gN10kGJp,2*7F") { Ok(ps) => { match ps { ParsedMessage::SafetyRelatedBroadcastMessage(srbm) => { assert_eq!(srbm.mmsi, 237008900); assert_eq!(srbm.text, "EP228 IX48 FG3 DK7 PL56."); } ParsedMessage::Incomplete => { assert!(false); } _ => { assert!(false); } } } Err(e) => { assert_eq!(e.to_string(), "OK"); } } match p.parse_sentence("!AIVDM,1,1,,A,>4aDT81@E=@,2*2E") { Ok(ps) => { match ps { ParsedMessage::SafetyRelatedBroadcastMessage(srbm) => { assert_eq!(srbm.mmsi, 311764000); assert_eq!(srbm.text, "TEST"); } ParsedMessage::Incomplete => { assert!(false); } _ => { assert!(false); } } } Err(e) => { assert_eq!(e.to_string(), "OK"); } } } }
/* Copyright 2020 Timo Saarinen Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ use super::*; #[derive(Default, Clone, Debug, PartialEq)] pub struct SafetyRelatedBroadcastMessage { pub own_vessel: bool, pub station: Station, pub mmsi: u32, pub text: String, }
#[cfg(test)] mod test { use super::*; #[test] fn test_parse_vdm_type14() { let mut p = NmeaParser::new(); match p.parse_sentence("!AIVDM,1,1,,A,>5?Per18=HB1U:1@E=B0m<L,2*51") { Ok(ps) => { match ps { ParsedMessage::SafetyRelatedBroadcastMessage(srbm) => { assert_eq!(srbm.mmsi, 351809000); assert_eq!(srbm.text, "RCVD YR TEST MSG"); } ParsedMessage::Incomplete => { assert!(false); } _ => { assert!(false); } } } Err(e) => { assert_eq!(e.to_string(), "OK"); } } match p.parse_sentence("!AIVDM,1,1,,A,>3R1p10E3;;R0USCR0HO>0@gN10kGJp,2*7F") { Ok(ps) => { match ps { ParsedMessage::SafetyRelatedBroadcastMessage(srbm) => { assert_eq!(srbm.mmsi, 237008900); assert_eq!(srbm.text, "EP228 IX48 FG3 DK7 PL56."); } ParsedMessage::Incomplete => { assert!(false); } _ => { assert!(false); } } } Err(e) => { assert_eq!(e.to_string(), "OK"); } } match p.parse_sentence("!AIVDM,1,1,,A,>4aDT81@E=@,2*2E") { Ok(ps) => { match ps { ParsedMessage::SafetyRelatedBroadcastMessage(srbm) => { assert_eq!(srbm.mmsi, 311764000); assert_eq!(srbm.text, "TEST"); } ParsedMessage::Incomplete => { assert!(false); } _ => { assert!(false); } } } Err(e) => { assert_eq!(e.to_string(), "OK"); } } } }
pub(crate) fn handle( bv: &BitVec, station: Station, own_vessel: bool, ) -> Result<ParsedMessage, ParseError> { Ok(ParsedMessage::SafetyRelatedBroadcastMessage( SafetyRelatedBroadcastMessage { own_vessel: { own_vessel }, station: { station }, mmsi: { pick_u64(&bv, 8, 30) as u32 }, text: { pick_string(&bv, 40, 161) }, }, )) }
function_block-full_function
[ { "content": "/// Make key for store\n\nfn make_gsv_key(sentence_type: &str, msg_count: u32, msg_num: u32) -> String {\n\n format!(\"{},{},{}\", sentence_type, msg_count, msg_num)\n\n}\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n // fn init() {\n\n // let _ = env_logger::builder().is_test(true).try_init();\n\n // }\n\n\n\n #[test]\n\n fn test_parse_cpgsv() {\n\n let mut p = NmeaParser::new();\n\n\n\n match p\n\n .parse_sentence(\"$GPGSV,3,1,11,03,03,111,00,04,15,270,00,06,01,010,00,13,06,292,00*74\")\n", "file_path": "src/gnss/gsv.rs", "rank": 0, "score": 82784.33748037416 }, { "content": "/// Choose the argument which is Some. If both are Some, choose the first one.\n\nfn choose_some_string(a: &Option<String>, b: &Option<String>) -> Option<String> {\n\n if a.is_some() {\n\n a.clone()\n\n } else {\n\n b.clone()\n\n }\n\n}\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_parse_vdm_type24() {\n\n let mut p = NmeaParser::new();\n\n\n\n let s1 = \"!AIVDM,1,1,,A,H42O55i18tMET00000000000000,2*6D\";\n\n match p.parse_sentence(s1) {\n", "file_path": "src/ais/vdm_t24.rs", "rank": 1, "score": 62685.317454636635 }, { "content": "/// Parse hour, minute, second and nano seconds from HHMMSS.SS string.\n\nfn parse_time_with_fractions(hhmmss: &str) -> Result<(u32, u32, u32, u32), ParseError> {\n\n let hour = pick_s2(hhmmss, 0).parse::<u32>()?;\n\n let minute = pick_s2(hhmmss, 2).parse::<u32>()?;\n\n let second = pick_s2(hhmmss, 4).parse::<u32>()?;\n\n let nano = {\n\n let nano_str = hhmmss.get(6..).unwrap_or(\".0\");\n\n if !nano_str.is_empty() {\n\n (nano_str.parse::<f64>()? * 1000000000.0).round() as u32\n\n } else {\n\n 0\n\n }\n\n };\n\n Ok((hour, minute, second, nano))\n\n}\n\n\n\n/// Parse Utc date from YYYY MM DD hh mm ss\n\npub(crate) fn parse_ymdhs(\n\n year: i32,\n\n month: u32,\n\n day: u32,\n\n hour: u32,\n\n min: u32,\n\n sec: u32,\n\n) -> Result<DateTime<Utc>, ParseError> {\n\n parse_valid_utc(year, month, day, hour, min, sec, 0)\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 2, "score": 61725.893301138756 }, { "content": "/// Parse hour, minute and second from HHMMSS string.\n\nfn parse_time(hhmmss: &str) -> Result<(u32, u32, u32), ParseError> {\n\n let hour = pick_s2(hhmmss, 0).parse::<u32>()?;\n\n let minute = pick_s2(hhmmss, 2).parse::<u32>()?;\n\n let second = pick_s2(hhmmss, 4).parse::<u32>()?;\n\n Ok((hour, minute, second))\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 3, "score": 58526.08060090197 }, { "content": "/// Parse day, month and year from YYMMDD string.\n\nfn parse_date(yymmdd: &str) -> Result<(u32, u32, i32), ParseError> {\n\n let day = pick_s2(yymmdd, 0).parse::<u32>()?;\n\n let month = pick_s2(yymmdd, 2).parse::<u32>()?;\n\n let year = pick_s2(yymmdd, 4).parse::<i32>()?;\n\n Ok((day, month, year))\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 4, "score": 50954.51502324956 }, { "content": "/// Read-only access to geographical position in the implementing type.\n\npub trait LatLon {\n\n /// Return the latitude of the position contained by the object. If the position is not\n\n /// available return `None`.\n\n fn latitude(&self) -> Option<f64>;\n\n\n\n /// Return the longitude of the position contained by the object. If the position is not\n\n /// available return `None`.\n\n fn longitude(&self) -> Option<f64>;\n\n}\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n\n/// NMEA sentence parser which keeps multi-sentence state between `parse_sentence` calls.\n\n/// The parser tries to be as permissible as possible about the field formats because some NMEA\n\n/// encoders don't follow the standards strictly.\n\n#[derive(Clone)]\n\npub struct NmeaParser {\n\n saved_fragments: HashMap<String, String>,\n\n saved_vsds: HashMap<u32, ais::VesselStaticData>,\n\n}\n", "file_path": "src/lib.rs", "rank": 5, "score": 49319.436689429735 }, { "content": "/// Using _opt on Utc. Will catch invalid Date (ex: month > 12).\n\npub fn parse_valid_utc(\n\n year: i32,\n\n month: u32,\n\n day: u32,\n\n hour: u32,\n\n min: u32,\n\n sec: u32,\n\n nano: u32,\n\n) -> Result<DateTime<Utc>, ParseError> {\n\n let opt_utc = Utc\n\n .ymd_opt(year, month, day)\n\n .and_hms_nano_opt(hour, min, sec, nano);\n\n match opt_utc {\n\n chrono::LocalResult::Single(valid_utc) | chrono::LocalResult::Ambiguous(valid_utc, _) => {\n\n Ok(valid_utc)\n\n }\n\n chrono::LocalResult::None => Err(format!(\n\n \"Failed to parse Utc Date from y:{} m:{} d:{} h:{} m:{} s:{}\",\n\n year, month, day, hour, min, sec\n\n )\n\n .into()),\n\n }\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 6, "score": 48110.5224697724 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\nuse super::*;\n\n\n\n/// GSV - satellite information\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct GsvData {\n", "file_path": "src/gnss/gsv.rs", "rank": 7, "score": 107.98861721205677 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n/// DTM - Datum being used\n\n#[derive(Clone, Debug, PartialEq)]\n", "file_path": "src/gnss/dtm.rs", "rank": 8, "score": 107.55223161320801 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n/// ZDA - Time and date\n\n#[derive(Clone, Debug, PartialEq)]\n", "file_path": "src/gnss/zda.rs", "rank": 9, "score": 107.05760583122449 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\nuse super::*;\n\n\n\n/// GLL - geographic Position - Latitude/Longitude\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct GllData {\n", "file_path": "src/gnss/gll.rs", "rank": 10, "score": 106.77158744367344 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n/// ALM - GPS Almanac Data\n\n#[derive(Clone, Debug, PartialEq)]\n", "file_path": "src/gnss/alm.rs", "rank": 11, "score": 106.44086470183626 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n/// MSS - Multiple Data ID\n\n#[derive(Clone, Debug, PartialEq)]\n", "file_path": "src/gnss/mss.rs", "rank": 12, "score": 106.44086470183623 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n/// STN - MSK Receiver Signal\n\n#[derive(Clone, Debug, PartialEq)]\n", "file_path": "src/gnss/stn.rs", "rank": 13, "score": 106.44086470183626 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\nuse super::*;\n\n\n\n/// VTG - track made good and speed over ground\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct VtgData {\n", "file_path": "src/gnss/vtg.rs", "rank": 14, "score": 106.17344349061766 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n", "file_path": "src/ais/vdm_t21.rs", "rank": 15, "score": 106.08386855257415 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n", "file_path": "src/ais/vdm_t16.rs", "rank": 16, "score": 106.0838685525741 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n", "file_path": "src/ais/vdm_t4.rs", "rank": 17, "score": 106.08386855257413 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n", "file_path": "src/ais/vdm_t15.rs", "rank": 18, "score": 106.08386855257412 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n", "file_path": "src/ais/vdm_t22.rs", "rank": 19, "score": 106.08386855257416 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n", "file_path": "src/ais/vdm_t13.rs", "rank": 20, "score": 106.08386855257415 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n", "file_path": "src/ais/vdm_t12.rs", "rank": 21, "score": 106.08386855257413 }, { "content": "/*\n\nCopyright 2020-2021 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n", "file_path": "src/ais/vdm_t25.rs", "rank": 22, "score": 106.08386855257413 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n", "file_path": "src/ais/vdm_t17.rs", "rank": 23, "score": 106.08386855257412 }, { "content": "/*\n\nCopyright 2020-2021 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n", "file_path": "src/ais/vdm_t26.rs", "rank": 24, "score": 106.08386855257413 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n", "file_path": "src/ais/vdm_t9.rs", "rank": 25, "score": 106.08386855257413 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n", "file_path": "src/ais/vdm_t23.rs", "rank": 27, "score": 106.08386855257413 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n", "file_path": "src/ais/vdm_t10.rs", "rank": 28, "score": 106.08386855257413 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n", "file_path": "src/ais/vdm_t20.rs", "rank": 29, "score": 106.08386855257415 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n", "file_path": "src/ais/vdm_t6.rs", "rank": 30, "score": 106.08386855257413 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n/// GGA - time, position, and fix related data\n\n#[derive(Clone, Debug, PartialEq)]\n", "file_path": "src/gnss/gga.rs", "rank": 31, "score": 105.83129192435705 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen, Sebastian Urban\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n/// GNS - GNSS fix data\n\n#[derive(Clone, Debug, PartialEq)]\n", "file_path": "src/gnss/gns.rs", "rank": 32, "score": 105.83129192435702 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n/// VBW - Dual Ground/Water Speed\n\n#[derive(Clone, Debug, PartialEq)]\n", "file_path": "src/gnss/vbw.rs", "rank": 33, "score": 105.83129192435703 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\nuse super::*;\n\n\n\n/// RMC - position, velocity, and time (Recommended Minimum sentence C)\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct RmcData {\n", "file_path": "src/gnss/rmc.rs", "rank": 34, "score": 104.99730914525179 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\nuse super::*;\n\n/// GSA - GNSS dilution of position (DOP) and active satellites\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct GsaData {\n\n /// Navigation system\n", "file_path": "src/gnss/gsa.rs", "rank": 35, "score": 104.41909145203431 }, { "content": "/*\n\nCopyright 2021 Linus Eing\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct DptData {\n", "file_path": "src/gnss/dpt.rs", "rank": 36, "score": 103.9487969539297 }, { "content": "/*\n\nCopyright 2021 Linus Eing\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct MtwData {\n", "file_path": "src/gnss/mtw.rs", "rank": 37, "score": 103.9487969539297 }, { "content": "/*\n\nCopyright 2021 Linus Eing\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct DbsData {\n", "file_path": "src/gnss/dbs.rs", "rank": 38, "score": 103.9487969539297 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n/// AIS VDM/VDO type 11: UTC/Date Response\n\npub(crate) fn handle(\n", "file_path": "src/ais/vdm_t11.rs", "rank": 39, "score": 103.38894216447719 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n/// AIS VDM/VDO types 1-3: Position Report with SOTDMA/ITDMA\n\npub(crate) fn handle(\n", "file_path": "src/ais/vdm_t1t2t3.rs", "rank": 40, "score": 102.79772557592993 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\nuse super::*;\n\n\n\nuse chrono::Duration;\n\n\n\nconst AIS_CHAR_BITS: usize = 6;\n", "file_path": "src/util.rs", "rank": 41, "score": 102.25712626106231 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n/// AIS VDM/VDO type 27: Long Range AIS Broadcast message\n\npub(crate) fn handle(\n", "file_path": "src/ais/vdm_t27.rs", "rank": 42, "score": 102.2133322961072 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\nuse super::*;\n\n\n\n/// AIS VDM/VDO type 24: Static data report\n\npub(crate) fn handle(\n\n bv: &BitVec,\n", "file_path": "src/ais/vdm_t24.rs", "rank": 43, "score": 101.63564362284988 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\nuse super::*;\n\n\n\n/// AIVDM type 5: Ship static voyage related data\n\npub(crate) fn handle(\n\n bv: &BitVec,\n", "file_path": "src/ais/vdm_t5.rs", "rank": 44, "score": 101.63564362284991 }, { "content": "/*\n\nCopyright 2021 Linus Eing\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n/// HDT - Heading, true\n\n#[derive(Clone, Debug, PartialEq)]\n", "file_path": "src/gnss/hdt.rs", "rank": 45, "score": 101.13853681942891 }, { "content": "/*\n\nCopyright 2021 Linus Eing\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n/// MDA - Meteorological Composite\n\n#[derive(Clone, Debug, PartialEq)]\n", "file_path": "src/gnss/mda.rs", "rank": 46, "score": 101.13853681942891 }, { "content": "/*\n\nCopyright 2021 Linus Eing\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n/// VHW - Water speed and heading\n\n#[derive(Clone, Debug, PartialEq)]\n", "file_path": "src/gnss/vhw.rs", "rank": 47, "score": 100.55761213069258 }, { "content": "/*\n\nCopyright 2021 Linus Eing\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n/// MWV - Wind speed and angle\n\n#[derive(Clone, Debug, PartialEq)]\n", "file_path": "src/gnss/mwv.rs", "rank": 48, "score": 100.55761213069258 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\n//! GNSS data structures\n\n\n\npub(crate) mod gga;\n\npub(crate) mod gll;\n", "file_path": "src/gnss/mod.rs", "rank": 49, "score": 100.49358774922072 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\nuse super::*;\n\n\n\n/// AIS VDM/VDO type 19: Extended Class B Equipment Position Report\n\npub(crate) fn handle(\n\n _bv: &BitVec,\n", "file_path": "src/ais/vdm_t19.rs", "rank": 50, "score": 99.9416591148643 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\nuse super::*;\n\n\n\n/// AIS VDM/VDO type 18: Standard Class B CS Position Report\n\npub(crate) fn handle(\n\n bv: &BitVec,\n", "file_path": "src/ais/vdm_t18.rs", "rank": 51, "score": 99.94165911486431 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse core::fmt;\n\nuse core::num::{ParseIntError, ParseFloatError};\n\nuse alloc::string::String;\n\n\n", "file_path": "src/error.rs", "rank": 52, "score": 99.55099141252825 }, { "content": "/*\n\nCopyright 2020 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\nuse super::*;\n\n\n\n// Message type 13 is a receipt acknowledgement to senders of previous messages of type 12. \n\n// The message layout is identical to a type 7 Binary Acknowledge.\n\n\n", "file_path": "src/ais/vdm_t7.rs", "rank": 53, "score": 99.45347703091213 }, { "content": "/*\n\nCopyright 2020-2021 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\n//! AIS VDM/VDO data structures\n\n\n\npub(crate) mod vdm_t1t2t3;\n\npub(crate) mod vdm_t4;\n", "file_path": "src/ais/mod.rs", "rank": 54, "score": 98.23727837690214 }, { "content": "/*\n\nCopyright 2021 Timo Saarinen\n\n\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n*/\n\n\n\n//! # NMEA Parser: NMEA parser for Rust\n\n//!\n\n//! This crate aims to cover all AIS sentences and the most important GNSS sentences used with\n\n//! NMEA 0183 standard. The parser supports AIS class A and B types. It also identifies GPS,\n", "file_path": "src/lib.rs", "rank": 55, "score": 91.4269142703267 }, { "content": "/// Type 21: Aid-to-Navigation Report\n\n#[derive(Default, Clone, Debug, PartialEq)]\n\npub struct AidToNavigationReport {\n\n /// True if the data is about own vessel, false if about other.\n\n pub own_vessel: bool,\n\n\n\n /// AIS station type.\n\n pub station: Station,\n\n\n\n /// User ID (30 bits)\n\n pub mmsi: u32,\n\n\n\n /// Aid type (5 bits)\n\n pub aid_type: NavAidType,\n\n\n\n /// Name (120 bits)\n\n pub name: String,\n\n\n\n /// Position accuracy.\n\n high_position_accuracy: bool,\n", "file_path": "src/ais/vdm_t21.rs", "rank": 57, "score": 29.04762562145039 }, { "content": "/// Type 13: Safety-Related Acknowledgment\n\n#[derive(Default, Clone, Debug, PartialEq)]\n\npub struct SafetyRelatedAcknowledgement {\n\n /// True if the data is about own vessel, false if about other.\n\n pub own_vessel: bool,\n\n\n\n /// AIS station type.\n\n pub station: Station,\n\n\n\n /// Source MMSI (30 bits)\n\n pub mmsi: u32,\n\n\n\n /// MMSI number 1 (30 bits)\n\n pub mmsi1: u32,\n\n\n\n /// MMSI sequence\n\n pub mmsi1_seq: u8,\n\n\n\n /// MMSI number 2 (30 bits)\n\n pub mmsi2: u32,\n", "file_path": "src/ais/vdm_t13.rs", "rank": 58, "score": 28.65078980835846 }, { "content": "/// Type 26: Multiple Slot Binary Message\n\n#[derive(Default, Clone, Debug, PartialEq)]\n\npub struct MultipleSlotBinaryMessage {\n\n /// True if the data is about own vessel, false if about other.\n\n pub own_vessel: bool,\n\n\n\n /// AIS station type.\n\n pub station: Station,\n\n\n\n /// User ID (30 bits)\n\n pub mmsi: u32,\n\n\n\n /// When 'addressed' flag is on this field contains the parsed destination MMSI.\n\n pub dest_mmsi: Option<u32>,\n\n\n\n /// When 'addressed' flag is off and 'structured' flag on this field contains\n\n /// application ID which consists of 10-bit DAC and 6-bit FID as in message types 6 and 8.\n\n pub app_id: Option<u16>,\n\n\n\n /// Data field of length 0-1004 bits.\n", "file_path": "src/ais/vdm_t26.rs", "rank": 59, "score": 28.372356317801554 }, { "content": "/// Type 25: Single Slot Binary Message\n\n#[derive(Default, Clone, Debug, PartialEq)]\n\npub struct SingleSlotBinaryMessage {\n\n /// True if the data is about own vessel, false if about other.\n\n pub own_vessel: bool,\n\n\n\n /// AIS station type.\n\n pub station: Station,\n\n\n\n /// User ID (30 bits)\n\n pub mmsi: u32,\n\n\n\n /// When 'addressed' flag is on this field contains the parsed destination MMSI.\n\n pub dest_mmsi: Option<u32>,\n\n\n\n /// When 'addressed' flag is off and 'structured' flag on this field contains\n\n /// application ID which consists of 10-bit DAC and 6-bit FID as in message types 6 and 8.\n\n pub app_id: Option<u16>,\n\n\n\n /// Data field of length 0-128 bits.\n", "file_path": "src/ais/vdm_t25.rs", "rank": 60, "score": 28.372356317801554 }, { "content": "/// Type 16: Assignment Mode Command\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct AssignmentModeCommand {\n\n /// True if the data is about own vessel, false if about other.\n\n pub own_vessel: bool,\n\n\n\n /// AIS station type.\n\n pub station: Station,\n\n\n\n // When the message is 96 bits long it is interpreted as an assignment for a single station,\n\n // When the message is 144 bits long it is interpreted as a channel assignled for two stations.\n\n pub assigned_for_single_station: bool,\n\n\n\n /// Source MMSI (30 bits)\n\n pub mmsi: u32,\n\n\n\n /// Destination A MMSI (30 bits)\n\n pub mmsi1: u32,\n\n\n\n /// Offset A\n", "file_path": "src/ais/vdm_t16.rs", "rank": 61, "score": 28.360799892472226 }, { "content": "/// Type 12: Addressed Safety-Related Message\n\n#[derive(Default, Clone, Debug, PartialEq)]\n\npub struct AddressedSafetyRelatedMessage {\n\n /// True if the data is about own vessel, false if about other.\n\n pub own_vessel: bool,\n\n\n\n /// AIS station type.\n\n pub station: Station,\n\n\n\n /// Source MMSI (30 bits)\n\n pub source_mmsi: u32,\n\n\n\n /// Sequence number (2 bits)\n\n pub sequence_number: u8,\n\n\n\n /// Destination MMSI (30 bits)\n\n pub destination_mmsi: u32,\n\n\n\n /// Retransmit flag (1 bit)\n\n pub retransmit_flag: bool,\n", "file_path": "src/ais/vdm_t12.rs", "rank": 62, "score": 28.324022835281582 }, { "content": "/// Type 15: Interrogation\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Interrogation {\n\n /// True if the data is about own vessel, false if about other.\n\n pub own_vessel: bool,\n\n\n\n /// AIS station type.\n\n pub station: Station,\n\n\n\n /// Interrogation case based on data length\n\n pub case: InterrogationCase,\n\n\n\n /// Source MMSI (30 bits)\n\n pub mmsi: u32,\n\n\n\n /// Interrogated MMSI (30 bits)\n\n pub mmsi1: u32,\n\n\n\n /// First message type (6 bits)\n\n pub type1_1: u8,\n", "file_path": "src/ais/vdm_t15.rs", "rank": 63, "score": 28.04725491939309 }, { "content": "/// Type 10: UTC/Date Inquiry\n\n#[derive(Default, Clone, Debug, PartialEq)]\n\npub struct UtcDateInquiry {\n\n /// True if the data is about own vessel, false if about other.\n\n pub own_vessel: bool,\n\n\n\n /// AIS station type.\n\n pub station: Station,\n\n\n\n /// Source MMSI (30 bits)\n\n pub source_mmsi: u32,\n\n\n\n /// Destination MMSI (30 bits)\n\n pub destination_mmsi: u32,\n\n}\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n\n/// AIS VDM/VDO type 10: UTC/Date Inquiry\n\npub(crate) fn handle(\n", "file_path": "src/ais/vdm_t10.rs", "rank": 64, "score": 27.99796037885078 }, { "content": "/// Type 6: Binary Addressed Message\n\n#[derive(Default, Clone, Debug, PartialEq)]\n\npub struct BinaryAddressedMessage {\n\n /// True if the data is about own vessel, false if about other.\n\n pub own_vessel: bool,\n\n\n\n /// AIS station type.\n\n pub station: Station,\n\n\n\n /// User ID (30 bits)\n\n pub mmsi: u32,\n\n\n\n /// User ID (2 bits)\n\n pub sequence_number: u8,\n\n\n\n /// User ID (30 bits)\n\n pub destination_mmsi: u32,\n\n\n\n /// Retransmit flag\n\n pub retransmit_flag: bool,\n", "file_path": "src/ais/vdm_t6.rs", "rank": 65, "score": 27.97717884519133 }, { "content": "#[derive(Default, Clone, Debug, PartialEq)]\n\npub struct VesselStaticData {\n\n /// True if the data is about own vessel, false if about other vessel.\n\n pub own_vessel: bool,\n\n\n\n /// Class A or Class B\n\n pub ais_type: AisClass,\n\n\n\n /// User ID (30 bits)\n\n pub mmsi: u32,\n\n\n\n /// AIS version indicator (2 bits)\n\n pub ais_version_indicator: u8,\n\n\n\n /// IMO number (1-999999999; 30 bits).\n\n pub imo_number: Option<u32>,\n\n\n\n /// Call sign (7 ASCII characters)\n\n pub call_sign: Option<String>,\n\n\n", "file_path": "src/ais/mod.rs", "rank": 67, "score": 27.724714420281835 }, { "content": "/// Type 4: Base Station Report\n\n#[derive(Default, Clone, Debug, PartialEq)]\n\npub struct BaseStationReport {\n\n /// True if the data is about own vessel, false if about other.\n\n pub own_vessel: bool,\n\n\n\n /// AIS station type.\n\n pub station: Station,\n\n\n\n /// User ID (30 bits)\n\n pub mmsi: u32,\n\n\n\n /// Timestamp\n\n pub timestamp: Option<DateTime<Utc>>,\n\n\n\n /// Position accuracy: true = high (<= 10 m), false = low (> 10 m)\n\n pub high_position_accuracy: bool,\n\n\n\n /// Latitude\n\n pub latitude: Option<f64>,\n", "file_path": "src/ais/vdm_t4.rs", "rank": 68, "score": 26.15136653585458 }, { "content": "\n\n /// Text (936 bits; 1-156 chars)\n\n pub text: String,\n\n}\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n\n/// AIS VDM/VDO type 12: Addressed Safety-Related Message\n\npub(crate) fn handle(\n\n bv: &BitVec,\n\n station: Station,\n\n own_vessel: bool,\n\n) -> Result<ParsedMessage, ParseError> {\n\n Ok(ParsedMessage::AddressedSafetyRelatedMessage(\n\n AddressedSafetyRelatedMessage {\n\n own_vessel: { own_vessel },\n\n station: { station },\n\n source_mmsi: { pick_u64(&bv, 8, 30) as u32 },\n\n sequence_number: { pick_u64(&bv, 38, 2) as u8 },\n\n destination_mmsi: { pick_u64(&bv, 40, 30) as u32 },\n", "file_path": "src/ais/vdm_t12.rs", "rank": 69, "score": 26.058208349418813 }, { "content": "/// Type 20: Data Link Management Message\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct DataLinkManagementMessage {\n\n /// True if the data is about own vessel, false if about other.\n\n pub own_vessel: bool,\n\n\n\n /// AIS station type.\n\n pub station: Station,\n\n\n\n /// Interrogation case based on data length\n\n pub case: InterrogationCase,\n\n\n\n /// Source MMSI (30 bits)\n\n pub mmsi: u32,\n\n\n\n /// Offset number 1 (12 bits)\n\n pub offset1: u16,\n\n\n\n /// Reserved offset number (4)\n\n pub number1: u8,\n", "file_path": "src/ais/vdm_t20.rs", "rank": 70, "score": 25.759774593343728 }, { "content": "/// Type 17: DGNSS Broadcast Binary Message.\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct DgnssBroadcastBinaryMessage {\n\n /// True if the data is about own vessel, false if about other.\n\n pub own_vessel: bool,\n\n\n\n /// AIS station type.\n\n pub station: Station,\n\n\n\n /// Source MMSI (30 bits)\n\n pub mmsi: u32,\n\n\n\n /// Latitude (17 bits)\n\n pub latitude: Option<f64>,\n\n\n\n /// Longitude (18 bits)\n\n pub longitude: Option<f64>,\n\n\n\n /// Payload (80-815 bits). Note that it appears to be tied to the now obsolete RTCM2 protocol.\n\n pub payload: BitVec,\n", "file_path": "src/ais/vdm_t17.rs", "rank": 71, "score": 25.305866263883964 }, { "content": "\n\n// -------------------------------------------------------------------------------------------------\n\n\n\n/// Types 1, 2, 3 and 18: Position Report Class A, and Long Range AIS Broadcast message\n\n#[derive(Default, Clone, Debug, PartialEq)]\n\npub struct VesselDynamicData {\n\n /// True if the data is about own vessel, false if about other.\n\n pub own_vessel: bool,\n\n\n\n /// AIS station type.\n\n pub station: Station,\n\n\n\n /// Class A or Class B\n\n pub ais_type: AisClass,\n\n\n\n /// User ID (30 bits)\n\n pub mmsi: u32,\n\n\n\n // TODO: timestamp\n\n /// Navigation status\n", "file_path": "src/ais/mod.rs", "rank": 72, "score": 25.15707376719755 }, { "content": "/// Type 22: Channel Management\n\n#[derive(Default, Clone, Debug, PartialEq)]\n\npub struct ChannelManagement {\n\n /// True if the data is about own vessel, false if about other.\n\n pub own_vessel: bool,\n\n\n\n /// AIS station type.\n\n pub station: Station,\n\n\n\n /// User ID (30 bits)\n\n pub mmsi: u32,\n\n\n\n /// Channel A number (12 bits).\n\n pub channel_a: u16,\n\n\n\n /// Channel B number (12 bits).\n\n pub channel_b: u16,\n\n\n\n /// TxRx mode:\n\n /// 0 = TxA/TxB, RxA/RxB (default)\n", "file_path": "src/ais/vdm_t22.rs", "rank": 73, "score": 24.792508661080205 }, { "content": "/// Type 9: Standard SAR Aircraft Position Report\n\n#[derive(Default, Clone, Debug, PartialEq)]\n\npub struct StandardSarAircraftPositionReport {\n\n /// True if the data is about own vessel, false if about other.\n\n pub own_vessel: bool,\n\n\n\n /// AIS station type.\n\n pub station: Station,\n\n\n\n /// User ID (30 bits)\n\n pub mmsi: u32,\n\n\n\n /// Altitude\n\n pub altitude: Option<u16>,\n\n\n\n /// Speed over ground in knots. Value 1022 means 1022 knots or more.\n\n pub sog_knots: Option<u16>,\n\n\n\n /// Position accuracy: true = high (<= 10 m), false = low (> 10 m)\n\n pub high_position_accuracy: bool,\n", "file_path": "src/ais/vdm_t9.rs", "rank": 74, "score": 24.776915902555096 }, { "content": "/// Type 23: Group Assignment Command\n\n#[derive(Default, Clone, Debug, PartialEq)]\n\npub struct GroupAssignmentCommand {\n\n /// True if the data is about own vessel, false if about other.\n\n pub own_vessel: bool,\n\n\n\n /// AIS station type.\n\n pub station: Station,\n\n\n\n /// User ID (30 bits)\n\n pub mmsi: u32,\n\n\n\n /// Northeast latitude to 0.1 minutes.\n\n pub ne_lat: Option<f64>,\n\n\n\n /// Northeast longitude to 0.1 minutes.\n\n pub ne_lon: Option<f64>,\n\n\n\n /// Southwest latitude to 0.1 minutes.\n\n pub sw_lat: Option<f64>,\n", "file_path": "src/ais/vdm_t23.rs", "rank": 75, "score": 24.539831932424427 }, { "content": " bv: &BitVec,\n\n station: Station,\n\n own_vessel: bool,\n\n) -> Result<ParsedMessage, ParseError> {\n\n Ok(ParsedMessage::UtcDateInquiry(UtcDateInquiry {\n\n own_vessel: { own_vessel },\n\n station: { station },\n\n source_mmsi: { pick_u64(&bv, 8, 30) as u32 },\n\n destination_mmsi: { pick_u64(&bv, 40, 30) as u32 },\n\n }))\n\n}\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_parse_vdm_type10() {\n", "file_path": "src/ais/vdm_t10.rs", "rank": 76, "score": 22.489872625379896 }, { "content": " /// Report interval.\n\n pub interval: StationInterval,\n\n\n\n /// Quiet time specifies how many minutes the affected stations are to remain silent.\n\n /// None = none\n\n /// 1-15 = quiet time in minutes\n\n pub quiet: Option<u8>,\n\n}\n\n\n\n/// Station Type (for message type 23).\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum StationType {\n\n /// All types of mobiles (default)\n\n AllTypes,\n\n\n\n /// Reserved for future use\n\n Reserved1,\n\n\n\n /// All types of Class B mobile stations\n\n AllTypesOfClassBMobile,\n", "file_path": "src/ais/vdm_t23.rs", "rank": 77, "score": 21.756391387487945 }, { "content": "// -------------------------------------------------------------------------------------------------\n\n\n\n/// AIS station based on talker id\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum Station {\n\n BaseStation, // !AB\n\n DependentAisBaseStation, // !AD\n\n MobileStation, // !AI (the most common one)\n\n AidToNavigationStation, // !AN\n\n AisReceivingStation, // !AR\n\n LimitedBaseStation, // !AS\n\n AisTransmittingStation, // !AT\n\n RepeaterStation, // !AX\n\n Other, // !BS, !SA, etc.\n\n}\n\n\n\nimpl Default for Station {\n\n fn default() -> Station {\n\n Station::Other\n\n }\n", "file_path": "src/ais/mod.rs", "rank": 78, "score": 20.993561115016348 }, { "content": " fn longitude(&self) -> Option<f64> {\n\n self.longitude\n\n }\n\n}\n\n\n\n/// Navigation status for VesselDynamicData\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum NavigationStatus {\n\n UnderWayUsingEngine = 0, // 0\n\n AtAnchor = 1, // 1\n\n NotUnderCommand = 2, // 2\n\n RestrictedManoeuverability = 3, // 3\n\n ConstrainedByDraught = 4, // 4\n\n Moored = 5, // 5\n\n Aground = 6, // 6\n\n EngagedInFishing = 7, // 7\n\n UnderWaySailing = 8, // 8\n\n Reserved9 = 9, // 9, may be renamed in the future\n\n Reserved10 = 10, // 10, may be renamed in the future\n\n Reserved11 = 11, // 11, may be renamed in the future\n", "file_path": "src/ais/mod.rs", "rank": 79, "score": 20.61062007706056 }, { "content": " pub mothership_mmsi: Option<u32>,\n\n}\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n\n/// Ship type derived from combined ship and cargo type field\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum ShipType {\n\n NotAvailable = 0, // 0\n\n Reserved1 = 10, // 1x\n\n WingInGround = 20, // 2x\n\n Fishing = 30, // 30\n\n Towing = 31, // 31\n\n TowingLong = 32, // 32; Towing: length exceeds 200m or breadth exceeds 25m\n\n DredgingOrUnderwaterOps = 33, // 33\n\n DivingOps = 34, // 34\n\n MilitaryOps = 35, // 35\n\n Sailing = 36, // 36\n\n PleasureCraft = 37, // 37\n\n Reserved38 = 38, // 38\n", "file_path": "src/ais/mod.rs", "rank": 80, "score": 20.261449920630618 }, { "content": " _station: Station,\n\n own_vessel: bool,\n\n) -> Result<ParsedMessage, ParseError> {\n\n Ok(ParsedMessage::VesselStaticData(VesselStaticData {\n\n own_vessel,\n\n ais_type: AisClass::ClassB,\n\n mmsi: pick_u64(&bv, 8, 30) as u32,\n\n ais_version_indicator: pick_u64(&bv, 38, 2) as u8,\n\n imo_number: {\n\n let raw = pick_u64(&bv, 40, 30) as u32;\n\n match raw {\n\n 0 => None,\n\n _ => Some(raw),\n\n }\n\n },\n\n call_sign: {\n\n let raw = pick_string(&bv, 70, 7);\n\n match raw.as_str() {\n\n \"\" => None,\n\n _ => Some(raw),\n", "file_path": "src/ais/vdm_t5.rs", "rank": 81, "score": 19.595278043337352 }, { "content": "/// Parse error returned by `NmeaParser::parse_sentence()`. `String` data type is used instead of\n\n/// `static &str` because the error messages are expected to contain context-specific details.\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum ParseError {\n\n /// Unsupported (or unimplemented) sentence type\n\n UnsupportedSentenceType(String),\n\n\n\n /// NMEA checksum doesn't match\n\n CorruptedSentence(String),\n\n\n\n /// The sentence format isn't what expected\n\n InvalidSentence(String),\n\n}\n\n\n\nimpl From<String> for ParseError {\n\n fn from(s: String) -> Self {\n\n ParseError::InvalidSentence(s)\n\n }\n\n}\n\n\n", "file_path": "src/error.rs", "rank": 82, "score": 19.555541336674587 }, { "content": " /// true = 12.5 kHz\n\n pub channel_b_band: bool,\n\n\n\n /// Size of transitional zone (3 bits).\n\n pub zonesize: u8,\n\n}\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n\n/// AIS VDM/VDO type 22: Channel Management\n\npub(crate) fn handle(\n\n bv: &BitVec,\n\n station: Station,\n\n own_vessel: bool,\n\n) -> Result<ParsedMessage, ParseError> {\n\n let addressed = pick_u64(&bv, 139, 1) != 0;\n\n Ok(ParsedMessage::ChannelManagement(ChannelManagement {\n\n own_vessel: { own_vessel },\n\n station: { station },\n\n mmsi: { pick_u64(&bv, 8, 30) as u32 },\n", "file_path": "src/ais/vdm_t22.rs", "rank": 83, "score": 19.33718452962026 }, { "content": " /// Communication state\n\n /// Diagnostic information for the radio system.\n\n /// https://www.itu.int/dms_pubrec/itu-r/rec/m/R-REC-M.1371-1-200108-S!!PDF-E.pdf\n\n pub radio_status: Option<u32>,\n\n}\n\n\n\n/// AIS class which is either Class A or Class B\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum AisClass {\n\n /// AIS class not known.\n\n Unknown,\n\n\n\n /// AIS class A.\n\n ClassA, // Message types 1, 2, 3, 5\n\n\n\n /// AIS class B.\n\n ClassB, // Message types 14, 18, 19, 24\n\n}\n\n\n\nimpl Default for AisClass {\n", "file_path": "src/ais/mod.rs", "rank": 84, "score": 19.0044531259395 }, { "content": " pub sw_lon: Option<f64>,\n\n\n\n /// MMSI of destination 1 (30 bits).\n\n pub dest1_mmsi: Option<u32>,\n\n\n\n /// MMSI of destination 2 (30 bits).\n\n pub dest2_mmsi: Option<u32>,\n\n\n\n /// Addressed:\n\n /// false = broadcast,\n\n /// true = addressed\n\n pub addressed: bool,\n\n\n\n /// Channel A band:\n\n /// false = default,\n\n /// true = 12.5 kHz\n\n pub channel_a_band: bool,\n\n\n\n /// Channel B band:\n\n /// false = default,\n", "file_path": "src/ais/vdm_t22.rs", "rank": 85, "score": 18.25937159906674 }, { "content": " self.latitude\n\n }\n\n\n\n fn longitude(&self) -> Option<f64> {\n\n self.longitude\n\n }\n\n}\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n\n/// AIS VDM/VDO type 4: Base Station Report\n\npub(crate) fn handle(\n\n bv: &BitVec,\n\n station: Station,\n\n own_vessel: bool,\n\n) -> Result<ParsedMessage, ParseError> {\n\n Ok(ParsedMessage::BaseStationReport(BaseStationReport {\n\n own_vessel: { own_vessel },\n\n station: { station },\n\n mmsi: { pick_u64(&bv, 8, 30) as u32 },\n", "file_path": "src/ais/vdm_t4.rs", "rank": 86, "score": 18.162475251758657 }, { "content": " CargoType::Reserved7 => write!(f, \"(reserved)\"),\n\n CargoType::Reserved8 => write!(f, \"(reserved)\"),\n\n CargoType::Reserved9 => write!(f, \"(reserved)\"),\n\n }\n\n }\n\n}\n\n\n\nimpl Default for CargoType {\n\n fn default() -> CargoType {\n\n CargoType::Undefined\n\n }\n\n}\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n\n/// EPFD position fix types\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum PositionFixType {\n\n Undefined = 0, // 0\n\n GPS = 1, // 1\n", "file_path": "src/ais/mod.rs", "rank": 87, "score": 18.06741822934964 }, { "content": " 10 => Ok(StationType::Reserved10),\n\n 11 => Ok(StationType::Reserved11),\n\n 12 => Ok(StationType::Reserved12),\n\n 13 => Ok(StationType::Reserved13),\n\n 14 => Ok(StationType::Reserved14),\n\n 15 => Ok(StationType::Reserved15),\n\n _ => Err(format!(\"Station type value out of range: {}\", val)),\n\n }\n\n }\n\n}\n\n\n\n/// Station interval (for message type 23)\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum StationInterval {\n\n /// As given by the autonomous mode\n\n Autonomous,\n\n\n\n /// 10 minutes\n\n Time10min,\n\n\n", "file_path": "src/ais/vdm_t23.rs", "rank": 88, "score": 17.976921521487803 }, { "content": " pub assigned_mode_flag: bool,\n\n}\n\n\n\nimpl LatLon for AidToNavigationReport {\n\n fn latitude(&self) -> Option<f64> {\n\n self.latitude\n\n }\n\n\n\n fn longitude(&self) -> Option<f64> {\n\n self.longitude\n\n }\n\n}\n\n\n\n/// Type of navigation aid\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum NavAidType {\n\n /// Default, type not specified\n\n NotSpecified, // 0\n\n\n\n /// Reference point\n", "file_path": "src/ais/vdm_t21.rs", "rank": 89, "score": 17.834226000610307 }, { "content": "/// The four cases of interrogation, depending on data length mostly.\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum InterrogationCase {\n\n /// One station is interrogated for one message type.\n\n Case1,\n\n\n\n /// One station is interrogated for two message types.\n\n Case2,\n\n\n\n /// Two stations are interrogated for one message type each.\n\n Case3,\n\n\n\n /// One station is interrogated for two message types, and a second for one message type.\n\n Case4,\n\n}\n\n\n\nimpl InterrogationCase {\n\n pub fn new(bv: &BitVec) -> InterrogationCase {\n\n let len = bv.len();\n\n if len >= 160 {\n", "file_path": "src/ais/vdm_t15.rs", "rank": 90, "score": 17.57454459037168 }, { "content": " pub data: BitVec,\n\n}\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n\n/// AIS VDM/VDO type 25: Single Slot Binary Message\n\n#[allow(clippy::collapsible_if)]\n\npub(crate) fn handle(\n\n bv: &BitVec,\n\n station: Station,\n\n own_vessel: bool,\n\n) -> Result<ParsedMessage, ParseError> {\n\n let addressed = pick_u64(&bv, 38, 1) != 0;\n\n let structured = pick_u64(&bv, 39, 1) != 0;\n\n\n\n Ok(ParsedMessage::SingleSlotBinaryMessage(\n\n SingleSlotBinaryMessage {\n\n own_vessel: { own_vessel },\n\n station: { station },\n\n mmsi: { pick_u64(&bv, 8, 30) as u32 },\n", "file_path": "src/ais/vdm_t25.rs", "rank": 91, "score": 17.52955727718083 }, { "content": " /// Repeat increment (11 bits)\n\n pub increment4: u8,\n\n}\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n\n/// AIS VDM/VDO type 20: Data Link Management Message\n\npub(crate) fn handle(\n\n bv: &BitVec,\n\n station: Station,\n\n own_vessel: bool,\n\n) -> Result<ParsedMessage, ParseError> {\n\n let case = InterrogationCase::new(bv);\n\n Ok(ParsedMessage::DataLinkManagementMessage(\n\n DataLinkManagementMessage {\n\n own_vessel,\n\n station,\n\n case,\n\n mmsi: { pick_u64(&bv, 8, 30) as u32 },\n\n offset1: { pick_u64(&bv, 40, 12) as u16 },\n", "file_path": "src/ais/vdm_t20.rs", "rank": 92, "score": 17.228733433791287 }, { "content": "pub(crate) fn handle(\n\n bv: &BitVec,\n\n station: Station,\n\n own_vessel: bool,\n\n) -> Result<ParsedMessage, ParseError> {\n\n Ok(ParsedMessage::SafetyRelatedAcknowledgement(\n\n SafetyRelatedAcknowledgement {\n\n own_vessel: { own_vessel },\n\n station: { station },\n\n mmsi: { pick_u64(&bv, 8, 30) as u32 },\n\n mmsi1: { pick_u64(&bv, 40, 30) as u32 },\n\n mmsi1_seq: { pick_u64(&bv, 70, 2) as u8 },\n\n mmsi2: { pick_u64(&bv, 72, 30) as u32 },\n\n mmsi2_seq: { pick_u64(&bv, 102, 2) as u8 },\n\n mmsi3: { pick_u64(&bv, 104, 30) as u32 },\n\n mmsi3_seq: { pick_u64(&bv, 134, 2) as u8 },\n\n mmsi4: { pick_u64(&bv, 136, 30) as u32 },\n\n mmsi4_seq: { pick_u64(&bv, 166, 2) as u8 },\n\n },\n\n ))\n", "file_path": "src/ais/vdm_t13.rs", "rank": 93, "score": 17.175389861660673 }, { "content": "\n\n/// AIS VDM/VDO type 6: Binary Addressed Message. Implementation of the 920-bit data field is\n\n/// unimplemented currently.\n\npub(crate) fn handle(\n\n bv: &BitVec,\n\n station: Station,\n\n own_vessel: bool,\n\n) -> Result<ParsedMessage, ParseError> {\n\n Ok(ParsedMessage::BinaryAddressedMessage(\n\n BinaryAddressedMessage {\n\n own_vessel: { own_vessel },\n\n station: { station },\n\n mmsi: { pick_u64(&bv, 8, 30) as u32 },\n\n sequence_number: { pick_u64(&bv, 38, 2) as u8 },\n\n destination_mmsi: { pick_u64(&bv, 40, 30) as u32 },\n\n retransmit_flag: { pick_u64(&bv, 70, 1) != 0 },\n\n dac: { pick_u64(&bv, 72, 10) as u16 },\n\n fid: { pick_u64(&bv, 82, 6) as u8 }, // TODO: data (depending on DAC and FID\n\n },\n\n ))\n", "file_path": "src/ais/vdm_t6.rs", "rank": 94, "score": 17.15020157451931 }, { "content": "\n\nimpl LatLon for GnsData {\n\n fn latitude(&self) -> Option<f64> {\n\n self.latitude\n\n }\n\n\n\n fn longitude(&self) -> Option<f64> {\n\n self.longitude\n\n }\n\n}\n\n\n\n/// GNS mode indicator\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum GnsModeIndicator {\n\n /// Satellite system not used in position fix, or fix not valid\n\n Invalid,\n\n /// Satellite system used in non-differential mode in position fix\n\n Autonomous,\n\n /// Satellite system used in differential mode in position fix\n\n Differential,\n", "file_path": "src/gnss/gns.rs", "rank": 95, "score": 17.116703343743634 }, { "content": " }\n\n}\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n\n/// AIS VDM/VDO type 9: Standard SAR Aircraft Position Report\n\npub(crate) fn handle(\n\n bv: &BitVec,\n\n station: Station,\n\n own_vessel: bool,\n\n) -> Result<ParsedMessage, ParseError> {\n\n Ok(ParsedMessage::StandardSarAircraftPositionReport(\n\n StandardSarAircraftPositionReport {\n\n own_vessel: { own_vessel },\n\n station: { station },\n\n mmsi: { pick_u64(&bv, 8, 30) as u32 },\n\n altitude: {\n\n let raw = pick_u64(&bv, 38, 12) as u16;\n\n if raw != 4095 {\n\n Some(raw)\n", "file_path": "src/ais/vdm_t9.rs", "rank": 96, "score": 16.916567581749312 }, { "content": " ShipType::Cargo => write!(f, \"cargo\"),\n\n ShipType::Tanker => write!(f, \"tanker\"),\n\n ShipType::Other => write!(f, \"other\"),\n\n }\n\n }\n\n}\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n\n/// Cargo type derived from combined ship and cargo type field\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum CargoType {\n\n Undefined = 10, // x0\n\n HazardousCategoryA = 11, // x1\n\n HazardousCategoryB = 12, // x2\n\n HazardousCategoryC = 13, // x3\n\n HazardousCategoryD = 14, // x4\n\n Reserved5 = 15, // x5\n\n Reserved6 = 16, // x6\n\n Reserved7 = 17, // x7\n", "file_path": "src/ais/mod.rs", "rank": 97, "score": 16.857212847103224 }, { "content": "}\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n\n/// AIS VDM/VDO type 21: Aid-to-Navigation Report\n\npub(crate) fn handle(\n\n bv: &BitVec,\n\n station: Station,\n\n own_vessel: bool,\n\n) -> Result<ParsedMessage, ParseError> {\n\n Ok(ParsedMessage::AidToNavigationReport(\n\n AidToNavigationReport {\n\n own_vessel: { own_vessel },\n\n station: { station },\n\n mmsi: { pick_u64(&bv, 8, 30) as u32 },\n\n aid_type: {\n\n NavAidType::new(pick_u64(&bv, 38, 5) as u8)\n\n .ok()\n\n .unwrap_or(NavAidType::NotSpecified)\n\n },\n", "file_path": "src/ais/vdm_t21.rs", "rank": 98, "score": 16.740572931875185 }, { "content": "}\n\n\n\n// -------------------------------------------------------------------------------------------------\n\n\n\n/// AIS VDM/VDO type 17: DGNSS Broadcast Binary Message\n\npub(crate) fn handle(\n\n bv: &BitVec,\n\n station: Station,\n\n own_vessel: bool,\n\n) -> Result<ParsedMessage, ParseError> {\n\n Ok(ParsedMessage::DgnssBroadcastBinaryMessage(\n\n DgnssBroadcastBinaryMessage {\n\n own_vessel: { own_vessel },\n\n station: { station },\n\n mmsi: { pick_u64(&bv, 8, 30) as u32 },\n\n latitude: {\n\n let lat_raw = pick_i64(&bv, 58, 17) as i32;\n\n if lat_raw != 0xd548 {\n\n Some((lat_raw as f64) / 600.0)\n\n } else {\n", "file_path": "src/ais/vdm_t17.rs", "rank": 99, "score": 16.74057293187519 } ]
Rust
sourcemap/src/types.rs
jaspervandenberg/symbolic
c420a47d67a4b17cbdc48de5b5900e8e2e7fa62f
use std::mem; use std::borrow::Cow; use sourcemap; use symbolic_common::Result; pub struct SourceView<'a> { sv: sourcemap::SourceView<'a>, } pub struct SourceMapView { sm: sourcemap::SourceMap, } pub struct TokenMatch<'a> { pub src_line: u32, pub src_col: u32, pub dst_line: u32, pub dst_col: u32, pub src_id: u32, pub name: Option<&'a str>, pub src: Option<&'a str>, pub function_name: Option<String>, } impl<'a> SourceView<'a> { pub fn new(source: &'a str) -> SourceView<'a> { SourceView { sv: sourcemap::SourceView::new(source), } } pub fn from_string(source: String) -> SourceView<'static> { SourceView { sv: sourcemap::SourceView::from_string(source), } } pub fn from_bytes(source: &'a [u8]) -> SourceView<'a> { match String::from_utf8_lossy(source) { Cow::Owned(s) => SourceView::from_string(s), Cow::Borrowed(s) => SourceView::new(s), } } pub fn as_str(&self) -> &str { self.sv.source() } pub fn get_line(&self, idx: u32) -> Option<&str> { self.sv.get_line(idx) } pub fn line_count(&self) -> usize { self.sv.line_count() } } impl SourceMapView { pub fn from_json_slice(buffer: &[u8]) -> Result<SourceMapView> { Ok(SourceMapView { sm: match sourcemap::decode_slice(buffer)? { sourcemap::DecodedMap::Regular(sm) => sm, sourcemap::DecodedMap::Index(smi) => smi.flatten()?, }, }) } pub fn lookup_token<'a>(&'a self, line: u32, col: u32) -> Option<TokenMatch<'a>> { self.sm .lookup_token(line, col) .map(|tok| self.make_token_match(tok)) } pub fn get_token<'a>(&'a self, idx: u32) -> Option<TokenMatch<'a>> { self.sm.get_token(idx).map(|tok| self.make_token_match(tok)) } pub fn get_token_count(&self) -> u32 { self.sm.get_token_count() } pub fn get_source_view<'a>(&'a self, idx: u32) -> Option<&'a SourceView<'a>> { self.sm .get_source_view(idx) .map(|x| unsafe { mem::transmute(x) }) } pub fn get_source_name(&self, idx: u32) -> Option<&str> { self.sm.get_source(idx) } pub fn get_source_count(&self) -> u32 { self.sm.get_source_count() } pub fn lookup_token_with_function_name<'a, 'b>( &'a self, line: u32, col: u32, minified_name: &str, source: &SourceView<'b>, ) -> Option<TokenMatch<'a>> { self.sm.lookup_token(line, col).map(|token| { let mut rv = self.make_token_match(token); rv.function_name = source .sv .get_original_function_name(token, minified_name) .map(|x| x.to_string()); rv }) } fn make_token_match<'a>(&'a self, tok: sourcemap::Token<'a>) -> TokenMatch<'a> { TokenMatch { src_line: tok.get_src_line(), src_col: tok.get_src_col(), dst_line: tok.get_dst_line(), dst_col: tok.get_dst_col(), src_id: tok.get_src_id(), name: tok.get_name(), src: tok.get_source(), function_name: None, } } }
use std::mem; use std::borrow::Cow; use sourcemap; use symbolic_common::Result; pub struct SourceView<'a> { sv: sourcemap::SourceView<'a>, } pub struct SourceMapView { sm: sourcemap::SourceMap, } pub struct TokenMatch<'a> { pub src_line: u32, pub src_col: u32, pub dst_line: u32, pub dst_col: u32, pub src_id: u32, pub name: Option<&'a str>, pub src: Option<&'a str>,
self.sm.get_source_count() } pub fn lookup_token_with_function_name<'a, 'b>( &'a self, line: u32, col: u32, minified_name: &str, source: &SourceView<'b>, ) -> Option<TokenMatch<'a>> { self.sm.lookup_token(line, col).map(|token| { let mut rv = self.make_token_match(token); rv.function_name = source .sv .get_original_function_name(token, minified_name) .map(|x| x.to_string()); rv }) } fn make_token_match<'a>(&'a self, tok: sourcemap::Token<'a>) -> TokenMatch<'a> { TokenMatch { src_line: tok.get_src_line(), src_col: tok.get_src_col(), dst_line: tok.get_dst_line(), dst_col: tok.get_dst_col(), src_id: tok.get_src_id(), name: tok.get_name(), src: tok.get_source(), function_name: None, } } }
pub function_name: Option<String>, } impl<'a> SourceView<'a> { pub fn new(source: &'a str) -> SourceView<'a> { SourceView { sv: sourcemap::SourceView::new(source), } } pub fn from_string(source: String) -> SourceView<'static> { SourceView { sv: sourcemap::SourceView::from_string(source), } } pub fn from_bytes(source: &'a [u8]) -> SourceView<'a> { match String::from_utf8_lossy(source) { Cow::Owned(s) => SourceView::from_string(s), Cow::Borrowed(s) => SourceView::new(s), } } pub fn as_str(&self) -> &str { self.sv.source() } pub fn get_line(&self, idx: u32) -> Option<&str> { self.sv.get_line(idx) } pub fn line_count(&self) -> usize { self.sv.line_count() } } impl SourceMapView { pub fn from_json_slice(buffer: &[u8]) -> Result<SourceMapView> { Ok(SourceMapView { sm: match sourcemap::decode_slice(buffer)? { sourcemap::DecodedMap::Regular(sm) => sm, sourcemap::DecodedMap::Index(smi) => smi.flatten()?, }, }) } pub fn lookup_token<'a>(&'a self, line: u32, col: u32) -> Option<TokenMatch<'a>> { self.sm .lookup_token(line, col) .map(|tok| self.make_token_match(tok)) } pub fn get_token<'a>(&'a self, idx: u32) -> Option<TokenMatch<'a>> { self.sm.get_token(idx).map(|tok| self.make_token_match(tok)) } pub fn get_token_count(&self) -> u32 { self.sm.get_token_count() } pub fn get_source_view<'a>(&'a self, idx: u32) -> Option<&'a SourceView<'a>> { self.sm .get_source_view(idx) .map(|x| unsafe { mem::transmute(x) }) } pub fn get_source_name(&self, idx: u32) -> Option<&str> { self.sm.get_source(idx) } pub fn get_source_count(&self) -> u32 {
random
[ { "content": "/// Checks whether an ELF binary contains a section.\n\n///\n\n/// This is useful to determine whether the binary contains certain information\n\n/// without loading its section data.\n\npub fn has_elf_section(elf: &elf::Elf, sh_type: u32, name: &str) -> bool {\n\n for header in &elf.section_headers {\n\n if header.sh_type != sh_type {\n\n continue;\n\n }\n\n\n\n if let Some(Ok(section_name)) = elf.shdr_strtab.get(header.sh_name) {\n\n if section_name == name {\n\n return true;\n\n }\n\n }\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "debuginfo/src/elf.rs", "rank": 0, "score": 219231.55284865087 }, { "content": "/// Checks whether a Mach object file contains a section.\n\n///\n\n/// Depending on its name, the section will searched in the `\"__TEXT\"` or the\n\n/// `\"__DWARF\"` segment. This is useful to determine whether the object contains\n\n/// certain information without iterating over all section headers and loading\n\n/// their data.\n\npub fn has_mach_section(mach: &mach::MachO, name: &str) -> bool {\n\n find_mach_section(mach, name).is_some()\n\n}\n\n\n", "file_path": "debuginfo/src/mach.rs", "rank": 1, "score": 183850.91901893518 }, { "content": "/// Checks whether a Mach object file contains a segment.\n\n///\n\n/// This is useful to determine whether the object contains certain information\n\n/// without iterating over all section headers and loading their data.\n\npub fn has_mach_segment(mach: &mach::MachO, name: &str) -> bool {\n\n find_mach_segment(mach, name).is_some()\n\n}\n\n\n", "file_path": "debuginfo/src/mach.rs", "rank": 2, "score": 183847.78090124048 }, { "content": "pub fn get_register_name(arch: Arch, register: u8) -> Result<&'static str> {\n\n use symbolic_common::CpuFamily::*;\n\n let index = register as usize;\n\n\n\n Ok(match arch.cpu_family() {\n\n Intel32 => I386[index],\n\n Intel64 => X86_64[index],\n\n Arm64 => ARM64[index],\n\n Arm32 => ARM[index],\n\n _ => return Err(ErrorKind::Format(\"unsupported CPU family\").into()),\n\n })\n\n}\n\n\n\n/// Names for x86 CPU registers by register number\n\nstatic I386: &'static [&'static str] = &[\n\n \"$eax\", \"$ecx\", \"$edx\", \"$ebx\", \"$esp\", \"$ebp\", \"$esi\", \"$edi\", \"$eip\", \"$eflags\", \"$unused1\",\n\n \"$st0\", \"$st1\", \"$st2\", \"$st3\", \"$st4\", \"$st5\", \"$st6\", \"$st7\", \"$unused2\", \"$unused3\",\n\n \"$xmm0\", \"$xmm1\", \"$xmm2\", \"$xmm3\", \"$xmm4\", \"$xmm5\", \"$xmm6\", \"$xmm7\", \"$mm0\", \"$mm1\", \"$mm2\",\n\n \"$mm3\", \"$mm4\", \"$mm5\", \"$mm6\", \"$mm7\", \"$fcw\", \"$fsw\", \"$mxcsr\", \"$es\", \"$cs\", \"$ss\", \"$ds\",\n\n \"$fs\", \"$gs\", \"$unused4\", \"$unused5\", \"$tr\", \"$ldtr\",\n", "file_path": "minidump/src/registers.rs", "rank": 3, "score": 164215.04115749005 }, { "content": "/// Joins unknown paths together.\n\n///\n\n/// This kinda implements some windows/unix path joining semantics but it does\n\n/// not attempt to be perfect. It for instance currently does not fully\n\n/// understand windows paths.\n\npub fn common_join_path(base: &str, other: &str) -> String {\n\n // absolute paths\n\n if base == \"\" || is_absolute_windows_path(other) || is_absolute_unix_path(other) {\n\n return other.into();\n\n }\n\n\n\n // other weird cases\n\n if other == \"\" {\n\n return base.into();\n\n }\n\n\n\n let win_abs = is_absolute_windows_path(base);\n\n let unix_abs = is_absolute_unix_path(base);\n\n let win_style = win_abs || (!unix_abs && base.chars().any(|x| x == '\\\\'));\n\n\n\n return if win_style {\n\n format!(\n\n \"{}\\\\{}\",\n\n base.trim_right_matches(&['\\\\', '/'][..]),\n\n other.trim_left_matches(&['\\\\', '/'][..])\n\n )\n\n } else {\n\n format!(\n\n \"{}/{}\",\n\n base.trim_right_matches('/'),\n\n other.trim_left_matches('/')\n\n )\n\n };\n\n}\n\n\n", "file_path": "symcache/src/utils.rs", "rank": 4, "score": 161126.2989675483 }, { "content": "/// Demangles an identifier and falls back to the original symbol.\n\n///\n\n/// This is a shortcut for using ``Name::try_demangle``.\n\n///\n\n/// ```\n\n/// # use symbolic_demangle::*;\n\n/// let rv = demangle(\"_ZN3foo3barE\");\n\n/// assert_eq!(&rv, \"foo::bar\");\n\n/// ```\n\npub fn demangle(ident: &str) -> String {\n\n Name::new(ident).try_demangle(Default::default())\n\n}\n", "file_path": "demangle/src/lib.rs", "rank": 5, "score": 154931.8918546064 }, { "content": "/// Trims a path to a given length.\n\n///\n\n/// This attempts to not completely destroy the path in the process.\n\npub fn shorten_filename<'a>(filename: &'a str, length: usize) -> Cow<'a, str> {\n\n // trivial cases\n\n if filename.len() <= length {\n\n return Cow::Borrowed(filename);\n\n } else if length <= 10 {\n\n if length > 3 {\n\n return Cow::Owned(format!(\"{}...\", &filename[..length - 3]));\n\n }\n\n return Cow::Borrowed(&filename[..length]);\n\n }\n\n\n\n let mut rv = String::new();\n\n let mut last_idx = 0;\n\n let mut piece_iter = filename.match_indices(&['\\\\', '/'][..]);\n\n let mut final_sep = \"/\";\n\n let max_len = length - 4;\n\n\n\n // make sure we get two segments at the start.\n\n loop {\n\n if let Some((idx, sep)) = piece_iter.next() {\n", "file_path": "symcache/src/utils.rs", "rank": 6, "score": 149484.63060941998 }, { "content": "#[derive(Debug, PartialEq, Eq)]\n\nstruct DwarfRow {\n\n address: u64,\n\n file_index: u64,\n\n line: Option<u64>,\n\n}\n\n\n\nimpl<'input> DwarfLineProgram<'input> {\n\n fn parse<'info>(\n\n info: &'info DwarfInfo<'input>,\n\n line_offset: DebugLineOffset,\n\n address_size: u8,\n\n comp_dir: Option<Buf<'input>>,\n\n comp_name: Option<Buf<'input>>,\n\n ) -> Result<Self> {\n\n let program = info.debug_line\n\n .program(line_offset, address_size, comp_dir, comp_name)?;\n\n\n\n let mut sequences = vec![];\n\n let mut sequence_rows: Vec<DwarfRow> = vec![];\n\n let mut prev_address = 0;\n", "file_path": "symcache/src/dwarf.rs", "rank": 7, "score": 115157.31651040197 }, { "content": "#[derive(Debug)]\n\nstruct DwarfSeq {\n\n low_address: u64,\n\n high_address: u64,\n\n rows: Vec<DwarfRow>,\n\n}\n\n\n", "file_path": "symcache/src/dwarf.rs", "rank": 8, "score": 115157.31651040197 }, { "content": "#[repr(C)]\n\nstruct SymbolEntry {\n\n debug_identifier: *const c_char,\n\n symbol_size: usize,\n\n symbol_data: *const u8,\n\n}\n\n\n", "file_path": "minidump/src/processor.rs", "rank": 9, "score": 115157.31651040197 }, { "content": "/// Allows to demangle potentially mangled names. Non-mangled names are largely\n\n/// ignored and language detection will not return a language.\n\n///\n\n/// Upon formatting the symbol is automatically demangled (without\n\n/// arguments).\n\npub trait Demangle {\n\n /// Infers the language of a mangled name\n\n ///\n\n /// In case the symbol is not mangled or not one of the supported languages\n\n /// the return value will be `None`. If the language of the symbol was\n\n /// specified explicitly, this is returned instead.\n\n fn detect_language(&self) -> Option<Language>;\n\n\n\n /// Demangles the name with the given options\n\n fn demangle(&self, opts: DemangleOptions) -> Result<Option<String>>;\n\n\n\n /// Tries to demangle the name and falls back to the original name\n\n fn try_demangle(&self, opts: DemangleOptions) -> String;\n\n}\n\n\n\nimpl<'a> Demangle for Name<'a> {\n\n fn detect_language(&self) -> Option<Language> {\n\n if let Some(lang) = self.language() {\n\n return Some(lang);\n\n }\n", "file_path": "demangle/src/lib.rs", "rank": 10, "score": 110226.5020546313 }, { "content": "/// Gives access to the symbol table of an `Object` file\n\npub trait SymbolTable {\n\n /// Returns the symbols of this `Object`\n\n fn symbols(&self) -> Result<Symbols>;\n\n}\n\n\n\nimpl<'data> SymbolTable for Object<'data> {\n\n fn symbols(&self) -> Result<Symbols> {\n\n match self.target {\n\n ObjectTarget::MachOSingle(macho) => Symbols::from_macho(macho),\n\n ObjectTarget::MachOFat(_, ref macho) => Symbols::from_macho(macho),\n\n _ => Err(ErrorKind::Internal(\"symbol table not implemented\").into()),\n\n }\n\n }\n\n}\n", "file_path": "debuginfo/src/symbols.rs", "rank": 11, "score": 108118.21575907718 }, { "content": "/// Provides access to DWARF debugging information in object files\n\npub trait DwarfData {\n\n /// Checks whether this object contains DWARF infos\n\n fn has_dwarf_data(&self) -> bool;\n\n\n\n /// Loads a specific dwarf section if its in the file\n\n fn get_dwarf_section<'input>(\n\n &'input self,\n\n section: DwarfSection,\n\n ) -> Option<DwarfSectionData<'input>>;\n\n}\n\n\n\nimpl<'input> DwarfData for Object<'input> {\n\n fn has_dwarf_data(&self) -> bool {\n\n match self.target {\n\n // We assume an ELF contains debug information if it still contains\n\n // the debug_info section. The file utility uses a similar mechanism,\n\n // except that it checks for the \".symtab\" section instead.\n\n ObjectTarget::Elf(ref elf) => has_elf_section(\n\n elf,\n\n elf::section_header::SHT_PROGBITS,\n", "file_path": "debuginfo/src/dwarf.rs", "rank": 12, "score": 108118.21575907718 }, { "content": "pub trait BreakpadData {\n\n fn has_breakpad_data(&self) -> bool;\n\n fn breakpad_records<'input>(&'input self) -> BreakpadRecords<'input>;\n\n}\n\n\n\nimpl<'data> BreakpadData for Object<'data> {\n\n fn has_breakpad_data(&self) -> bool {\n\n self.kind() == ObjectKind::Breakpad\n\n }\n\n\n\n fn breakpad_records<'input>(&'input self) -> BreakpadRecords<'input> {\n\n BreakpadRecords::from_bytes(self.as_bytes())\n\n }\n\n}\n\n\n\nimpl<'data> BreakpadData for FatObject<'data> {\n\n fn has_breakpad_data(&self) -> bool {\n\n self.kind() == ObjectKind::Breakpad\n\n }\n\n\n\n fn breakpad_records<'input>(&'input self) -> BreakpadRecords<'input> {\n\n BreakpadRecords::from_bytes(self.as_bytes())\n\n }\n\n}\n\n\n", "file_path": "debuginfo/src/breakpad.rs", "rank": 13, "score": 108118.21575907718 }, { "content": "#[derive(Debug)]\n\nstruct DwarfLineProgram<'input> {\n\n sequences: Vec<DwarfSeq>,\n\n program_rows: StateMachine<Buf<'input>, IncompleteLineNumberProgram<Buf<'input>>>,\n\n}\n\n\n", "file_path": "symcache/src/dwarf.rs", "rank": 14, "score": 106330.55983004041 }, { "content": "struct SymCacheWriter<W: Write> {\n\n writer: RefCell<(u64, W)>,\n\n header: CacheFileHeader,\n\n symbol_map: HashMap<Vec<u8>, u32>,\n\n symbols: Vec<Seg<u8, u16>>,\n\n files: HashMap<Vec<u8>, Seg<u8, u8>>,\n\n file_record_map: HashMap<FileRecord, u16>,\n\n file_records: Vec<FileRecord>,\n\n func_records: Vec<FuncRecord>,\n\n line_record_bytes: RefCell<u64>,\n\n}\n\n\n\nimpl<W: Write> SymCacheWriter<W> {\n\n pub fn new(writer: W) -> SymCacheWriter<W> {\n\n SymCacheWriter {\n\n writer: RefCell::new((0, writer)),\n\n header: Default::default(),\n\n symbol_map: HashMap::new(),\n\n symbols: vec![],\n\n files: HashMap::new(),\n", "file_path": "symcache/src/writer.rs", "rank": 15, "score": 100915.14792102773 }, { "content": "/// Locates and reads a section in a Mach object file.\n\n///\n\n/// Depending on its name, the segment will be loaded from either the `\"__TEXT\"`\n\n/// or the `\"__DWARF\"` segment.\n\npub fn find_mach_section<'data>(\n\n mach: &mach::MachO<'data>,\n\n name: &str,\n\n) -> Option<MachSection<'data>> {\n\n let segment_name = match name {\n\n \"__eh_frame\" => \"__TEXT\",\n\n _ => \"__DWARF\",\n\n };\n\n\n\n let segment = match find_mach_segment(mach, segment_name) {\n\n Some(segment) => segment,\n\n None => return None,\n\n };\n\n\n\n for section in segment {\n\n if let Ok((header, data)) = section {\n\n if header.name().map(|sec| sec == name).unwrap_or(false) {\n\n return Some(MachSection { header, data });\n\n }\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "debuginfo/src/mach.rs", "rank": 16, "score": 100724.87511268411 }, { "content": "fn is_maybe_objc(ident: &str) -> bool {\n\n (ident.starts_with(\"-[\") || ident.starts_with(\"+[\")) && ident.ends_with(\"]\")\n\n}\n\n\n", "file_path": "demangle/src/lib.rs", "rank": 17, "score": 97995.0301444637 }, { "content": "fn is_absolute_windows_path(s: &str) -> bool {\n\n // UNC\n\n if s.len() > 2 && &s[..2] == \"\\\\\\\\\" {\n\n return true;\n\n }\n\n\n\n // other paths\n\n let mut char_iter = s.chars();\n\n if_chain! {\n\n if let Some(fc) = char_iter.next();\n\n if matches!(fc, 'A'...'Z') || matches!(fc, 'a'...'z');\n\n if let Some(sc) = char_iter.next();\n\n if sc == ':';\n\n if let Some(tc) = char_iter.next();\n\n if tc == '\\\\' || tc == '/';\n\n then {\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n}\n\n\n", "file_path": "symcache/src/utils.rs", "rank": 18, "score": 97995.0301444637 }, { "content": "fn is_maybe_cpp(ident: &str) -> bool {\n\n ident.starts_with(\"_Z\") || ident.starts_with(\"__Z\")\n\n}\n\n\n", "file_path": "demangle/src/lib.rs", "rank": 19, "score": 97995.0301444637 }, { "content": "fn is_absolute_unix_path(s: &str) -> bool {\n\n let mut char_iter = s.chars();\n\n char_iter.next() == Some('/')\n\n}\n\n\n", "file_path": "symcache/src/utils.rs", "rank": 20, "score": 97995.0301444637 }, { "content": "/// Locates and reads a section in an ELF binary.\n\npub fn find_elf_section<'elf, 'data>(\n\n elf: &'elf elf::Elf,\n\n data: &'data [u8],\n\n sh_type: u32,\n\n name: &str,\n\n) -> Option<ElfSection<'elf, 'data>> {\n\n for header in &elf.section_headers {\n\n if header.sh_type != sh_type {\n\n continue;\n\n }\n\n\n\n if let Some(Ok(section_name)) = elf.shdr_strtab.get(header.sh_name) {\n\n if section_name != name {\n\n continue;\n\n }\n\n\n\n let offset = header.sh_offset as usize;\n\n let size = header.sh_size as usize;\n\n return Some(ElfSection {\n\n header: header,\n\n data: &data[offset..][..size],\n\n });\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "debuginfo/src/elf.rs", "rank": 21, "score": 96206.25448438588 }, { "content": "/// Locates and reads a segment in a Mach object file.\n\npub fn find_mach_segment<'mach, 'data>(\n\n mach: &'mach mach::MachO<'data>,\n\n name: &str,\n\n) -> Option<MachSegment<'mach, 'data>> {\n\n for segment in &mach.segments {\n\n if segment.name().map(|seg| seg == name).unwrap_or(false) {\n\n return Some(segment);\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "debuginfo/src/mach.rs", "rank": 22, "score": 96206.25448438588 }, { "content": "fn err(msg: &'static str) -> Error {\n\n Error::from(ErrorKind::BadDwarfData(msg))\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct DwarfInfo<'input> {\n\n pub units: Vec<CompilationUnitHeader<Buf<'input>>>,\n\n pub debug_abbrev: DebugAbbrev<Buf<'input>>,\n\n pub debug_ranges: DebugRanges<Buf<'input>>,\n\n pub debug_line: DebugLine<Buf<'input>>,\n\n pub debug_str: DebugStr<Buf<'input>>,\n\n pub vmaddr: u64,\n\n abbrev_cache: RefCell<LruCache<DebugAbbrevOffset<usize>, Arc<Abbreviations>, FnvBuildHasher>>,\n\n}\n\n\n\nimpl<'input> DwarfInfo<'input> {\n\n pub fn from_object(obj: &'input Object) -> Result<DwarfInfo<'input>> {\n\n macro_rules! section {\n\n ($sect:ident, $mandatory:expr) => {{\n\n let sect = match obj.get_dwarf_section(DwarfSection::$sect) {\n", "file_path": "symcache/src/dwarf.rs", "rank": 23, "score": 95843.31533995565 }, { "content": "struct LineDebug<'a>(RefCell<Option<Lines<'a>>>);\n\n\n\nimpl<'a> fmt::Debug for LineDebug<'a> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.debug_list()\n\n .entries(self.0.borrow_mut().take().unwrap().filter_map(|x| x.ok()))\n\n .finish()\n\n }\n\n}\n\n\n\nimpl<'a> fmt::Debug for Function<'a> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.debug_struct(\"Function\")\n\n .field(\"id\", &self.id())\n\n .field(\"parent_id\", &self.parent_id())\n\n .field(\"symbol\", &self.symbol())\n\n .field(\"addr\", &self.addr())\n\n .field(\"comp_dir\", &self.comp_dir())\n\n .field(\"lang\", &self.lang())\n\n .field(\"lines()\", &LineDebug(RefCell::new(Some(self.lines()))))\n", "file_path": "symcache/src/cache.rs", "rank": 24, "score": 92466.29002159245 }, { "content": "/// Converts an owned raw pointer to characters to an owned `String`.\n\n/// If the pointer is NULL, an empty string `\"\"` is returned.\n\npub fn ptr_to_string(ptr: *mut c_char) -> String {\n\n if ptr.is_null() {\n\n return String::new();\n\n }\n\n\n\n let string = unsafe { CStr::from_ptr(ptr) }\n\n .to_string_lossy()\n\n .into_owned();\n\n\n\n unsafe { string_delete(ptr) };\n\n string\n\n}\n", "file_path": "minidump/src/utils.rs", "rank": 25, "score": 90573.38631711053 }, { "content": "/// Converts an object into a vector of symcache data.\n\npub fn to_vec(obj: &Object) -> Result<Vec<u8>> {\n\n let mut buf = Vec::<u8>::new();\n\n buf.write_all(CacheFileHeader::default().as_bytes())?;\n\n let header = {\n\n let mut writer = SymCacheWriter::new(&mut buf);\n\n writer.write_object(obj)?;\n\n writer.header\n\n };\n\n let header_bytes = header.as_bytes();\n\n (&mut buf[..header_bytes.len()]).copy_from_slice(header_bytes);\n\n Ok(buf)\n\n}\n\n\n", "file_path": "symcache/src/writer.rs", "rank": 26, "score": 89117.630052479 }, { "content": " pub src_col: u32,\n\n pub dst_line: u32,\n\n pub dst_col: u32,\n\n pub src_id: u32,\n\n pub name: SymbolicStr,\n\n pub src: SymbolicStr,\n\n pub function_name: SymbolicStr,\n\n}\n\n\n\n\n\nffi_fn! {\n\n /// Creates a source view from a given path.\n\n ///\n\n /// This shares the underlying memory and does not copy it if that is\n\n /// possible. Will ignore utf-8 decoding errors.\n\n unsafe fn symbolic_sourceview_from_bytes(bytes: *const c_char, len: usize)\n\n -> Result<*mut SymbolicSourceView>\n\n {\n\n let sv = SourceView::from_bytes(\n\n slice::from_raw_parts(bytes as *const _, len));\n", "file_path": "cabi/src/sourcemap.rs", "rank": 30, "score": 87187.7261890605 }, { "content": " line: u32, col: u32)\n\n -> Result<*mut SymbolicTokenMatch>\n\n {\n\n let sm = ssm as *const SourceMapView;\n\n convert_token_match((*sm).lookup_token(line, col))\n\n }\n\n}\n\n\n\nffi_fn! {\n\n /// Looks up a token.\n\n unsafe fn symbolic_sourcemapview_lookup_token_with_function_name(\n\n ssm: *const SymbolicSourceMapView, line: u32, col: u32,\n\n minified_name: *const SymbolicStr, ssv: *const SymbolicSourceView)\n\n -> Result<*mut SymbolicTokenMatch>\n\n {\n\n let sm = ssm as *const SourceMapView;\n\n let sv = ssv as *const SourceView<'static>;\n\n convert_token_match((*sm).lookup_token_with_function_name(\n\n line, col, (*minified_name).as_str(), mem::transmute(sv)))\n\n }\n", "file_path": "cabi/src/sourcemap.rs", "rank": 31, "score": 87185.8743684107 }, { "content": "use std::ptr;\n\nuse std::mem;\n\nuse std::slice;\n\nuse std::os::raw::c_char;\n\n\n\nuse symbolic_common::Result;\n\nuse symbolic_sourcemap::{SourceMapView, SourceView, TokenMatch};\n\n\n\nuse core::SymbolicStr;\n\n\n\n/// Represents a source view\n\npub struct SymbolicSourceView;\n\n\n\n/// Represents a sourcemap view\n\npub struct SymbolicSourceMapView;\n\n\n\n/// Represents a single token after lookup.\n\n#[repr(C)]\n\npub struct SymbolicTokenMatch {\n\n pub src_line: u32,\n", "file_path": "cabi/src/sourcemap.rs", "rank": 32, "score": 87185.10221099154 }, { "content": " -> Result<SymbolicStr>\n\n {\n\n let sm = ssm as *const SourceMapView;\n\n Ok(SymbolicStr::new((*sm)\n\n .get_source_name(index)\n\n .unwrap_or(\"\")))\n\n }\n\n}\n\n\n\nffi_fn! {\n\n /// Return the number of sources.\n\n unsafe fn symbolic_sourcemapview_get_source_count(ssm: *const SymbolicSourceMapView)\n\n -> Result<u32>\n\n {\n\n let sm = ssm as *const SourceMapView;\n\n Ok((*sm).get_source_count())\n\n }\n\n}\n\n\n\nffi_fn! {\n", "file_path": "cabi/src/sourcemap.rs", "rank": 35, "score": 87181.36210724387 }, { "content": " {\n\n let sv = ssv as *mut SourceView<'static>;\n\n Ok((*sv).line_count() as u32)\n\n }\n\n}\n\n\n\nffi_fn! {\n\n /// Loads a sourcemap from a JSON byte slice.\n\n unsafe fn symbolic_sourcemapview_from_json_slice(data: *const c_char,\n\n len: usize)\n\n -> Result<*mut SymbolicSourceMapView>\n\n {\n\n let bytes = slice::from_raw_parts(data as *const _, len);\n\n let sm = SourceMapView::from_json_slice(bytes)?;\n\n Ok(Box::into_raw(Box::new(sm)) as *mut SymbolicSourceMapView)\n\n }\n\n}\n\n\n\nffi_fn! {\n\n /// Frees a source map view\n\n unsafe fn symbolic_sourcemapview_free(smv: *const SymbolicSourceMapView) {\n\n if !smv.is_null() {\n\n let sm = smv as *mut SourceMapView;\n\n Box::from_raw(sm);\n\n }\n\n }\n\n}\n\n\n", "file_path": "cabi/src/sourcemap.rs", "rank": 37, "score": 87179.28976070842 }, { "content": "}\n\n\n\nffi_fn! {\n\n /// Return the sourceview for a given source.\n\n unsafe fn symbolic_sourcemapview_get_sourceview(ssm: *const SymbolicSourceMapView,\n\n index: u32)\n\n -> Result<*const SymbolicSourceView>\n\n {\n\n let sm = ssm as *const SourceMapView;\n\n Ok((*sm)\n\n .get_source_view(index)\n\n .map(|x| mem::transmute(x))\n\n .unwrap_or(ptr::null()))\n\n }\n\n}\n\n\n\nffi_fn! {\n\n /// Return the source name for an index.\n\n unsafe fn symbolic_sourcemapview_get_source_name(ssm: *const SymbolicSourceMapView,\n\n index: u32)\n", "file_path": "cabi/src/sourcemap.rs", "rank": 38, "score": 87178.8577499085 }, { "content": "//! Provides sourcemap support.\n\nextern crate sourcemap;\n\nextern crate symbolic_common;\n\n\n\nmod types;\n\n\n\npub use types::*;\n", "file_path": "sourcemap/src/lib.rs", "rank": 40, "score": 87177.01005750953 }, { "content": " Ok(SymbolicStr::new((*sv).as_str()))\n\n }\n\n}\n\n\n\nffi_fn! {\n\n /// Returns a specific line.\n\n unsafe fn symbolic_sourceview_get_line(ssv: *const SymbolicSourceView,\n\n idx: u32)\n\n -> Result<SymbolicStr>\n\n {\n\n let sv = ssv as *mut SourceView<'static>;\n\n let line = (*sv).get_line(idx).unwrap_or(\"\");\n\n Ok(SymbolicStr::new(line))\n\n }\n\n}\n\n\n\nffi_fn! {\n\n /// Returns the number of lines.\n\n unsafe fn symbolic_sourceview_get_line_count(ssv: *const SymbolicSourceView)\n\n -> Result<u32>\n", "file_path": "cabi/src/sourcemap.rs", "rank": 41, "score": 87176.58199026564 }, { "content": " /// Returns a specific token.\n\n unsafe fn symbolic_sourcemapview_get_token(ssm: *const SymbolicSourceMapView,\n\n idx: u32)\n\n -> Result<*mut SymbolicTokenMatch>\n\n {\n\n let sm = ssm as *const SourceMapView;\n\n convert_token_match((*sm).get_token(idx))\n\n }\n\n}\n\n\n\nffi_fn! {\n\n /// Returns the number of tokens.\n\n unsafe fn symbolic_sourcemapview_get_tokens(ssm: *const SymbolicSourceMapView)\n\n -> Result<u32>\n\n {\n\n let sm = ssm as *const SourceMapView;\n\n Ok((*sm).get_token_count())\n\n }\n\n}\n\n\n", "file_path": "cabi/src/sourcemap.rs", "rank": 42, "score": 87176.08794683077 }, { "content": " Ok(Box::into_raw(Box::new(sv)) as *mut SymbolicSourceView)\n\n }\n\n}\n\n\n\nffi_fn! {\n\n /// Frees a source view.\n\n unsafe fn symbolic_sourceview_free(ssv: *mut SymbolicSourceView) {\n\n if !ssv.is_null() {\n\n let sv = ssv as *mut SourceView<'static>;\n\n Box::from_raw(sv);\n\n }\n\n }\n\n}\n\n\n\nffi_fn! {\n\n /// Returns the underlying source (borrowed).\n\n unsafe fn symbolic_sourceview_as_str(ssv: *const SymbolicSourceView)\n\n -> Result<SymbolicStr>\n\n {\n\n let sv = ssv as *mut SourceView<'static>;\n", "file_path": "cabi/src/sourcemap.rs", "rank": 44, "score": 87172.276004375 }, { "content": "ffi_fn! {\n\n /// Free a token match\n\n unsafe fn symbolic_token_match_free(stm: *mut SymbolicTokenMatch) {\n\n if !stm.is_null() {\n\n let tm = stm as *mut SymbolicTokenMatch;\n\n (*tm).name.free();\n\n (*tm).src.free();\n\n (*tm).function_name.free();\n\n Box::from_raw(tm);\n\n }\n\n }\n\n}\n", "file_path": "cabi/src/sourcemap.rs", "rank": 45, "score": 87171.2485765224 }, { "content": "/// Gets the virtual memory address of this object's .text (code) section.\n\npub fn get_elf_vmaddr(elf: &elf::Elf) -> Result<u64> {\n\n // For non-PIC executables (e_type == ET_EXEC), the load address is\n\n // the start address of the first PT_LOAD segment. (ELF requires\n\n // the segments to be sorted by load address.) For PIC executables\n\n // and dynamic libraries (e_type == ET_DYN), this address will\n\n // normally be zero.\n\n for phdr in &elf.program_headers {\n\n if phdr.p_type == elf::program_header::PT_LOAD {\n\n return Ok(phdr.p_vaddr);\n\n }\n\n }\n\n\n\n Ok(0)\n\n}\n", "file_path": "debuginfo/src/elf.rs", "rank": 46, "score": 85886.60086017427 }, { "content": "/// Loads the virtual memory address of this object's __TEXT (code) segment.\n\npub fn get_mach_vmaddr(macho: &mach::MachO) -> Result<u64> {\n\n for seg in &macho.segments {\n\n if seg.name()? == \"__TEXT\" {\n\n return Ok(seg.vmaddr);\n\n }\n\n }\n\n\n\n Ok(0)\n\n}\n", "file_path": "debuginfo/src/mach.rs", "rank": 47, "score": 84375.48375901757 }, { "content": "/// Resolves the object identifier from Mach object load commands.\n\npub fn get_mach_id(macho: &mach::MachO) -> Option<ObjectId> {\n\n for cmd in &macho.load_commands {\n\n if let mach::load_command::CommandVariant::Uuid(ref uuid_cmd) = cmd.command {\n\n return Uuid::from_bytes(&uuid_cmd.uuid)\n\n .ok()\n\n .map(ObjectId::from_uuid);\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "debuginfo/src/mach.rs", "rank": 48, "score": 82928.09467682696 }, { "content": "fn try_demangle_objc(ident: &str, _opts: DemangleOptions) -> Result<Option<String>> {\n\n Ok(Some(ident.to_string()))\n\n}\n\n\n", "file_path": "demangle/src/lib.rs", "rank": 49, "score": 81676.07942949109 }, { "content": "fn try_demangle_swift(ident: &str, opts: DemangleOptions) -> Result<Option<String>> {\n\n let mut buf = vec![0 as c_char; 4096];\n\n let sym = match CString::new(ident) {\n\n Ok(sym) => sym,\n\n Err(_) => {\n\n return Err(ErrorKind::Internal(\"embedded null byte\").into());\n\n }\n\n };\n\n\n\n let simplified = match opts.format {\n\n DemangleFormat::Short => if opts.with_arguments {\n\n 1\n\n } else {\n\n 2\n\n },\n\n DemangleFormat::Full => 0,\n\n };\n\n\n\n unsafe {\n\n let rv = symbolic_demangle_swift(sym.as_ptr(), buf.as_mut_ptr(), buf.len(), simplified);\n\n if rv == 0 {\n\n return Ok(None);\n\n }\n\n\n\n let s = CStr::from_ptr(buf.as_ptr()).to_string_lossy();\n\n return Ok(Some(s.to_string()));\n\n }\n\n}\n\n\n", "file_path": "demangle/src/lib.rs", "rank": 50, "score": 81676.07942949109 }, { "content": "fn try_demangle_cpp(ident: &str, opts: DemangleOptions) -> Result<Option<String>> {\n\n let symbol = match Symbol::new(ident) {\n\n Ok(symbol) => symbol,\n\n Err(_) => return Ok(None),\n\n };\n\n\n\n let opts = CppOptions {\n\n no_params: !opts.with_arguments,\n\n };\n\n\n\n Ok(match symbol.demangle(&opts) {\n\n Ok(demangled) => Some(demangled),\n\n Err(_) => None,\n\n })\n\n}\n\n\n", "file_path": "demangle/src/lib.rs", "rank": 51, "score": 81676.07942949109 }, { "content": "fn try_demangle_objcpp(ident: &str, opts: DemangleOptions) -> Result<Option<String>> {\n\n if is_maybe_objc(ident) {\n\n try_demangle_objc(ident, opts)\n\n } else if is_maybe_cpp(ident) {\n\n try_demangle_cpp(ident, opts)\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "demangle/src/lib.rs", "rank": 52, "score": 81676.07942949109 }, { "content": "fn try_demangle_rust(ident: &str, _opts: DemangleOptions) -> Result<Option<String>> {\n\n if let Ok(dm) = rustc_demangle::try_demangle(ident) {\n\n Ok(Some(format!(\"{:#}\", dm)))\n\n } else {\n\n Err(ErrorKind::BadSymbol(\"Not a valid Rust symbol\".into()).into())\n\n }\n\n}\n\n\n", "file_path": "demangle/src/lib.rs", "rank": 53, "score": 81676.07942949109 }, { "content": " SymbolicStr name;\n", "file_path": "cabi/include/symbolic.h", "rank": 54, "score": 81002.58412549333 }, { "content": "/// Given a writer and object, dumps the object into the writer.\n\n///\n\n/// In case a symcache is to be constructed from memory the `SymCache::from_object`\n\n/// method can be used instead.\n\n///\n\n/// This requires the writer to be seekable.\n\npub fn to_writer<W: Write + Seek>(mut w: W, obj: &Object) -> Result<()> {\n\n w.write_all(CacheFileHeader::default().as_bytes())?;\n\n let header = {\n\n let mut writer = SymCacheWriter::new(&mut w);\n\n writer.write_object(obj)?;\n\n writer.header\n\n };\n\n w.seek(SeekFrom::Start(0))?;\n\n w.write_all(header.as_bytes())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "symcache/src/writer.rs", "rank": 55, "score": 80397.97895300397 }, { "content": " def name(self):\n\n \"\"\"File name of the loaded module's debug file\"\"\"\n", "file_path": "py/symbolic/minidump.py", "rank": 56, "score": 79963.34785880073 }, { "content": "/// Tries to obtain the object identifier of an ELF object.\n\n///\n\n/// As opposed to Mach-O, ELF does not specify a unique ID for object files in\n\n/// its header. Compilers and linkers usually add either `SHT_NOTE` sections or\n\n/// `PT_NOTE` program header elements for this purpose.\n\n///\n\n/// If neither of the above are present, this function will hash the first page\n\n/// of the `.text` section (program code) to synthesize a unique ID. This is\n\n/// likely not a valid UUID since was generated off a hash value.\n\n///\n\n/// If all of the above fails, the identifier will be `None`.\n\npub fn get_elf_id(elf: &elf::Elf, data: &[u8]) -> Option<ObjectId> {\n\n // Search for a GNU build identifier node in the program headers or the\n\n // build ID section. If errors occur during this process, fall through\n\n // silently to the next method.\n\n if let Some(identifier) = find_build_id(elf, data) {\n\n return create_elf_id(identifier, elf.little_endian);\n\n }\n\n\n\n // We were not able to locate the build ID, so fall back to hashing the\n\n // first page of the \".text\" (program code) section. This algorithm XORs\n\n // 16-byte chunks directly into a UUID buffer.\n\n if let Some(section) = find_elf_section(elf, data, elf::section_header::SHT_PROGBITS, \".text\") {\n\n let mut hash = [0; UUID_SIZE];\n\n for i in 0..cmp::min(section.data.len(), PAGE_SIZE) {\n\n hash[i % UUID_SIZE] ^= section.data[i];\n\n }\n\n\n\n return create_elf_id(&hash, elf.little_endian);\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "debuginfo/src/elf.rs", "rank": 57, "score": 79261.51962471595 }, { "content": " def get_source_name(self, idx):\n\n \"\"\"Returns the name of the source at the given index.\"\"\"\n\n return decode_str(self._methodcall(\n", "file_path": "py/symbolic/sourcemap.py", "rank": 58, "score": 78128.31914687467 }, { "content": "class SourceMapView(RustObject):\n\n \"\"\"Gives access to a source map.\"\"\"\n\n __dealloc_func__ = lib.symbolic_sourcemapview_free\n\n\n\n @classmethod\n\n def from_json_bytes(cls, data):\n\n \"\"\"Constructs a sourcemap from bytes of JSON data.\"\"\"\n\n data = bytes(data)\n\n return cls._from_objptr(rustcall(\n\n lib.symbolic_sourcemapview_from_json_slice, data, len(data)))\n\n\n\n def lookup(self, line, col, minified_function_name=None,\n\n minified_source=None):\n\n \"\"\"Looks up a token from the sourcemap and optionally also\n\n resolves a function name from a stacktrace to the original one.\n\n \"\"\"\n\n if minified_function_name is None or minified_source is None:\n\n rv = self._methodcall(\n\n lib.symbolic_sourcemapview_lookup_token, line, col)\n\n else:\n\n if not isinstance(minified_source, SourceView):\n\n raise TypeError('source view required')\n\n rv = self._methodcall(\n\n lib.symbolic_sourcemapview_lookup_token_with_function_name,\n\n line, col, encode_str(minified_function_name),\n\n minified_source._objptr)\n\n if rv != ffi.NULL:\n\n try:\n\n return SourceMapTokenMatch._from_objptr(rv)\n\n finally:\n\n rustcall(lib.symbolic_token_match_free, rv)\n\n\n\n def get_sourceview(self, idx):\n\n \"\"\"Given a source index returns the source view that created it.\"\"\"\n\n rv = self._methodcall(lib.symbolic_sourcemapview_get_sourceview, idx)\n\n if rv != ffi.NULL:\n\n return SourceView._from_objptr(rv, shared=True)\n\n\n\n @property\n\n def source_count(self):\n\n \"\"\"Returns the number of sources.\"\"\"\n\n return self._methodcall(lib.symbolic_sourcemapview_get_source_count)\n\n\n\n def get_source_name(self, idx):\n\n \"\"\"Returns the name of the source at the given index.\"\"\"\n\n return decode_str(self._methodcall(\n\n lib.symbolic_sourcemapview_get_source_name, idx)) or None\n\n\n\n def iter_sources(self):\n\n \"\"\"Iterates over the sources in the file.\"\"\"\n\n for src_id in range_type(self.source_count):\n\n yield src_id, self.get_source_name(src_id)\n\n\n\n def __len__(self):\n\n return self._methodcall(lib.symbolic_sourcemapview_get_tokens)\n\n\n\n def __getitem__(self, idx):\n\n rv = self._methodcall(lib.symbolic_sourcemapview_get_token, idx)\n\n if rv == ffi.NULL:\n\n raise IndexError('Token out of range')\n\n try:\n\n return SourceMapTokenMatch._from_objptr(rv)\n\n finally:\n\n rustcall(lib.symbolic_token_match_free, rv)\n\n\n\n def __iter__(self):\n\n for x in range_type(len(self)):\n", "file_path": "py/symbolic/sourcemap.py", "rank": 59, "score": 77779.31670042977 }, { "content": " SymbolicStr src;\n", "file_path": "cabi/include/symbolic.h", "rank": 60, "score": 77519.45176773981 }, { "content": "SymbolicStr symbolic_sourcemapview_get_source_name(const SymbolicSourceMapView *ssm,\n", "file_path": "cabi/include/symbolic.h", "rank": 61, "score": 76668.0502247011 }, { "content": " uint32_t src_line;\n", "file_path": "cabi/include/symbolic.h", "rank": 62, "score": 76563.21414814236 }, { "content": " uint32_t src_id;\n", "file_path": "cabi/include/symbolic.h", "rank": 63, "score": 76563.21414814236 }, { "content": " uint32_t src_col;\n", "file_path": "cabi/include/symbolic.h", "rank": 64, "score": 76563.21414814236 }, { "content": "def test_unicode_names(get_sourceview, get_sourcemapview):\n\n source = get_sourceview('unicode.js')\n\n index = get_sourcemapview('unicode.min.map')\n\n verify_index(index, {\n\n 'unicode.js': source\n", "file_path": "py/tests/test_sourcemaps.py", "rank": 65, "score": 76324.64537638887 }, { "content": "SymbolicTokenMatch *symbolic_sourcemapview_lookup_token_with_function_name(const SymbolicSourceMapView *ssm,\n\n uint32_t line,\n\n uint32_t col,\n\n const SymbolicStr *minified_name,\n", "file_path": "cabi/include/symbolic.h", "rank": 66, "score": 74937.68202132087 }, { "content": "pub fn assert_demangle(\n\n language: Language,\n\n input: &str,\n\n with_args: Option<&str>,\n\n without_args: Option<&str>,\n\n) {\n\n let name = Name::with_language(input, language);\n\n if let Some(rv) = name.demangle(WITH_ARGS).unwrap() {\n\n assert_eq!(Some(rv.as_str()), with_args);\n\n } else {\n\n assert_eq!(None, with_args);\n\n }\n\n\n\n if let Some(rv) = name.demangle(WITHOUT_ARGS).unwrap() {\n\n assert_eq!(Some(rv.as_str()), without_args);\n\n } else {\n\n assert_eq!(None, without_args);\n\n }\n\n}\n", "file_path": "demangle/tests/utils/mod.rs", "rank": 67, "score": 68420.03218933773 }, { "content": "fn convert_token_match(token: Option<TokenMatch>) -> Result<*mut SymbolicTokenMatch> {\n\n Ok(token.map(|token| {\n\n Box::into_raw(Box::new(SymbolicTokenMatch {\n\n src_line: token.src_line,\n\n src_col: token.src_col,\n\n dst_line: token.dst_line,\n\n dst_col: token.dst_col,\n\n src_id: token.src_id,\n\n name: SymbolicStr::new(token.name.unwrap_or(\"\")),\n\n src: SymbolicStr::new(token.src.unwrap_or(\"\")),\n\n function_name: token.function_name\n\n .map(|name| SymbolicStr::from_string(name))\n\n .unwrap_or(Default::default()),\n\n }))\n\n }).unwrap_or(ptr::null_mut()))\n\n}\n\n\n\nffi_fn! {\n\n /// Looks up a token.\n\n unsafe fn symbolic_sourcemapview_lookup_token(ssm: *const SymbolicSourceMapView,\n", "file_path": "cabi/src/sourcemap.rs", "rank": 68, "score": 65563.56449190673 }, { "content": "fn assert_none(input: &str) {\n\n let name = Name::new(input);\n\n assert_eq!(name.detect_language(), None);\n\n}\n\n\n", "file_path": "demangle/tests/detection.rs", "rank": 69, "score": 64901.29020712768 }, { "content": "fn err(msg: &str) -> Box<Error> {\n\n Box::new(io::Error::new(io::ErrorKind::Other, msg))\n\n}\n\n\n", "file_path": "examples/symcache_debug.rs", "rank": 70, "score": 59054.72324954116 }, { "content": "fn assert_language(input: &str, lang: Language) {\n\n let name = Name::new(input);\n\n assert_eq!(name.detect_language(), Some(lang));\n\n}\n\n\n", "file_path": "demangle/tests/detection.rs", "rank": 71, "score": 58109.47207258709 }, { "content": "{\n\n \"version\": 3,\n\n \"file\": \"min.js\",\n\n \"sections\": [\n\n {\n\n \"offset\": {\n\n \"line\": 0,\n\n \"column\": 0\n\n },\n\n \"map\": {\n\n \"version\":3,\n\n \"sources\":[\"file1.js\"],\n\n \"names\":[\"add\",\"a\",\"b\"],\n\n \"mappings\":\"AAAA,QAASA,KAAIC,EAAGC,GACf,YACA,OAAOD,GAAIC\",\n\n \"file\":\"file1.min.js\"\n\n }\n\n },\n\n {\n\n \"offset\": {\n\n \"line\": 1,\n\n \"column\": 0\n\n },\n\n \"map\": {\n\n \"version\":3,\n\n \"sources\":[\"file2.js\"],\n\n \"names\":[\"multiply\",\"a\",\"b\",\"divide\",\"add\",\"c\",\"e\",\"Raven\",\"captureException\"],\n\n \"mappings\":\"AAAA,QAASA,UAASC,EAAGC,GACpB,YACA,OAAOD,GAAIC,EAEZ,QAASC,QAAOF,EAAGC,GAClB,YACA,KACC,MAAOF,UAASI,IAAIH,EAAGC,GAAID,EAAGC,GAAKG,EAClC,MAAOC,GACRC,MAAMC,iBAAiBF\",\n\n \"file\":\"file2.min.js\"\n\n }\n\n }\n\n ]\n\n}\n", "file_path": "py/tests/res/sourcemaps/indexed.sourcemap.js", "rank": 72, "score": 44262.77943422185 }, { "content": "\n\npub use symbolic_proguard as proguard;\n\npub use symbolic_sourcemap as sourcemap;\n\npub use symbolic_demangle as demangle;\n\npub use symbolic_minidump as minidump;\n\npub use symbolic_symcache as symcache;\n\npub use symbolic_debuginfo as debuginfo;\n\npub use symbolic_common as common;\n\n\n\npub use common::{Error, ErrorKind, Result, ResultExt};\n", "file_path": "src/lib.rs", "rank": 73, "score": 43034.95767918271 }, { "content": "//! Symbolic works with symbols and debug info.\n\n//!\n\n//! This library implements various utilities to help Sentry\n\n//! symbolicate stacktraces. It is built to also be used independently\n\n//! of Sentry and in parts.\n\n\n\n#[doc(hidden)]\n\npub extern crate symbolic_common;\n\n#[doc(hidden)]\n\npub extern crate symbolic_debuginfo;\n\n#[doc(hidden)]\n\npub extern crate symbolic_demangle;\n\n#[doc(hidden)]\n\npub extern crate symbolic_minidump;\n\n#[doc(hidden)]\n\npub extern crate symbolic_proguard;\n\n#[doc(hidden)]\n\npub extern crate symbolic_sourcemap;\n\n#[doc(hidden)]\n\npub extern crate symbolic_symcache;\n", "file_path": "src/lib.rs", "rank": 74, "score": 43030.0955556967 }, { "content": "#[test]\n\nfn clang_imported_struct() {\n\n assert_demangle(\n\n Language::Swift,\n\n \"_T08mangling17uses_clang_structySC6CGRectV1r_tF\",\n\n Some(\"uses_clang_struct(r:)\"),\n\n Some(\"uses_clang_struct\"),\n\n );\n\n}\n\n\n\n/// ```\n\n/// func uses_optionals(x: Int?) -> UnicodeScalar? { return nil }\n\n/// ```\n", "file_path": "demangle/tests/swift.rs", "rank": 75, "score": 42267.94844576436 }, { "content": "from symbolic._lowlevel import lib, ffi\n\nfrom symbolic._compat import range_type\n\nfrom symbolic.utils import RustObject, rustcall, decode_str, encode_str, \\\n\n attached_refs\n\n\n\n\n\n__all__ = ['SourceView', 'SourceMapView', 'SourceMapTokenMatch']\n\n\n\n\n\nclass SourceMapTokenMatch(object):\n\n \"\"\"Represents a token matched or looked up from the index.\"\"\"\n\n\n\n def __init__(self, src_line, src_col, dst_line, dst_col,\n\n src_id=None, name=None, src=None, function_name=None):\n\n self.src_line = src_line\n\n self.src_col = src_col\n\n self.dst_line = dst_line\n\n self.dst_col = dst_col\n\n self.src_id = src_id\n\n self.name = name\n\n self.src = src\n\n self.function_name = function_name\n\n\n\n @classmethod\n\n def _from_objptr(cls, tm):\n\n rv = object.__new__(cls)\n\n rv.src_line = tm.src_line\n\n rv.src_col = tm.src_col\n\n rv.dst_line = tm.dst_line\n\n rv.dst_col = tm.dst_col\n\n rv.src_id = tm.src_id\n\n rv.name = decode_str(tm.name) or None\n\n rv.src = decode_str(tm.src) or None\n\n rv.function_name = decode_str(tm.function_name) or None\n\n return rv\n\n\n\n def __eq__(self, other):\n\n if self.__class__ is not other.__class__:\n\n return False\n\n return self.__dict__ == other.__dict__\n\n\n\n def __ne__(self, other):\n\n return not self.__eq__(other)\n\n\n\n def __repr__(self):\n\n return '<SourceMapTokenMatch %s:%d>' % (\n\n self.src,\n\n self.src_line,\n\n )\n\n\n\n\n\nclass SourceView(RustObject):\n\n \"\"\"Gives reasonably efficient access to javascript sourcecode.\"\"\"\n\n __dealloc_func__ = lib.symbolic_sourceview_free\n\n\n\n @classmethod\n\n def from_bytes(cls, data):\n\n \"\"\"Constructs a source view from bytes.\"\"\"\n\n data = bytes(data)\n\n rv = cls._from_objptr(rustcall(lib.symbolic_sourceview_from_bytes,\n\n data, len(data)))\n\n # we need to keep this reference alive or we crash. hard.\n\n attached_refs[rv] = data\n\n return rv\n\n\n\n def get_source(self):\n\n return decode_str(self._methodcall(lib.symbolic_sourceview_as_str))\n\n\n\n def __len__(self):\n\n return self._methodcall(lib.symbolic_sourceview_get_line_count)\n\n\n\n def __getitem__(self, idx):\n\n if not isinstance(idx, slice):\n\n if idx >= len(self):\n\n raise IndexError('No such line')\n\n return decode_str(self._methodcall(\n\n lib.symbolic_sourceview_get_line, idx))\n\n\n\n rv = []\n\n for idx in range_type(*idx.indices(len(self))):\n\n try:\n\n rv.append(self[idx])\n\n except IndexError:\n\n pass\n\n return rv\n\n\n\n def __iter__(self):\n\n for x in range_type(len(self)):\n\n yield self[x]\n\n\n\n\n\nclass SourceMapView(RustObject):\n\n \"\"\"Gives access to a source map.\"\"\"\n\n __dealloc_func__ = lib.symbolic_sourcemapview_free\n\n\n\n @classmethod\n\n def from_json_bytes(cls, data):\n\n \"\"\"Constructs a sourcemap from bytes of JSON data.\"\"\"\n\n data = bytes(data)\n\n return cls._from_objptr(rustcall(\n\n lib.symbolic_sourcemapview_from_json_slice, data, len(data)))\n\n\n\n def lookup(self, line, col, minified_function_name=None,\n\n minified_source=None):\n\n \"\"\"Looks up a token from the sourcemap and optionally also\n\n resolves a function name from a stacktrace to the original one.\n\n \"\"\"\n\n if minified_function_name is None or minified_source is None:\n\n rv = self._methodcall(\n\n lib.symbolic_sourcemapview_lookup_token, line, col)\n\n else:\n\n if not isinstance(minified_source, SourceView):\n\n raise TypeError('source view required')\n\n rv = self._methodcall(\n\n lib.symbolic_sourcemapview_lookup_token_with_function_name,\n\n line, col, encode_str(minified_function_name),\n\n minified_source._objptr)\n\n if rv != ffi.NULL:\n\n try:\n\n return SourceMapTokenMatch._from_objptr(rv)\n\n finally:\n\n rustcall(lib.symbolic_token_match_free, rv)\n\n\n\n def get_sourceview(self, idx):\n\n \"\"\"Given a source index returns the source view that created it.\"\"\"\n\n rv = self._methodcall(lib.symbolic_sourcemapview_get_sourceview, idx)\n\n if rv != ffi.NULL:\n\n return SourceView._from_objptr(rv, shared=True)\n\n\n\n @property\n\n def source_count(self):\n\n \"\"\"Returns the number of sources.\"\"\"\n\n return self._methodcall(lib.symbolic_sourcemapview_get_source_count)\n\n\n\n def get_source_name(self, idx):\n\n \"\"\"Returns the name of the source at the given index.\"\"\"\n\n return decode_str(self._methodcall(\n\n lib.symbolic_sourcemapview_get_source_name, idx)) or None\n\n\n\n def iter_sources(self):\n\n \"\"\"Iterates over the sources in the file.\"\"\"\n\n for src_id in range_type(self.source_count):\n\n yield src_id, self.get_source_name(src_id)\n\n\n\n def __len__(self):\n\n return self._methodcall(lib.symbolic_sourcemapview_get_tokens)\n\n\n\n def __getitem__(self, idx):\n\n rv = self._methodcall(lib.symbolic_sourcemapview_get_token, idx)\n\n if rv == ffi.NULL:\n\n raise IndexError('Token out of range')\n\n try:\n\n return SourceMapTokenMatch._from_objptr(rv)\n\n finally:\n\n rustcall(lib.symbolic_token_match_free, rv)\n\n\n\n def __iter__(self):\n\n for x in range_type(len(self)):\n\n yield self[x]\n", "file_path": "py/symbolic/sourcemap.py", "rank": 76, "score": 41876.01716773337 }, { "content": " .unwrap_or(None)\n\n .unwrap_or(\"?\")\n\n }\n\n\n\n /// The demangled function name.\n\n ///\n\n /// This demangles with default settings. For further control the symbolic\n\n /// demangle crate can be manually used on the symbol.\n\n pub fn function_name(&self) -> String {\n\n Name::with_language(self.symbol(), self.lang()).try_demangle(Default::default())\n\n }\n\n\n\n /// The language of the function\n\n pub fn lang(&self) -> Language {\n\n Language::from_u32(self.fun.lang as u32).unwrap_or(Language::Unknown)\n\n }\n\n\n\n /// The compilation dir of the function\n\n pub fn comp_dir(&self) -> &str {\n\n self.cache\n", "file_path": "symcache/src/cache.rs", "rank": 77, "score": 41857.32785729921 }, { "content": " pub fn symbol(&self) -> &'a str {\n\n self.symbol.unwrap_or(\"?\")\n\n }\n\n\n\n /// The demangled function name.\n\n ///\n\n /// This demangles with default settings. For further control the symbolic\n\n /// demangle crate can be manually used on the symbol.\n\n pub fn function_name(&self) -> String {\n\n Name::with_language(self.symbol(), self.lang()).try_demangle(Default::default())\n\n }\n\n\n\n /// The filename of the current line.\n\n pub fn filename(&self) -> &'a str {\n\n self.filename\n\n }\n\n\n\n /// The base dir of the current line.\n\n pub fn base_dir(&self) -> &str {\n\n self.base_dir\n", "file_path": "symcache/src/cache.rs", "rank": 78, "score": 41855.44243369222 }, { "content": "use symbolic_common::{Arch, ErrorKind};\n\n\n\nuse core::SymbolicStr;\n\n\n\n/// Mach-O architecture\n\n#[repr(C)]\n\npub struct SymbolicMachoArch {\n\n pub cputype: u32,\n\n pub cpusubtype: u32,\n\n}\n\n\n\n/// ELF architecture\n\n#[repr(C)]\n\npub struct SymbolicElfArch {\n\n pub machine: u16,\n\n}\n\n\n\nffi_fn! {\n\n /// Checks if an architecture is known.\n\n unsafe fn symbolic_arch_is_known(arch: *const SymbolicStr) -> Result<bool> {\n", "file_path": "cabi/src/common.rs", "rank": 79, "score": 41855.40867811374 }, { "content": " Other,\n\n}\n\n\n\nimpl ObjectClass {\n\n pub fn name(&self) -> &'static str {\n\n use ObjectClass::*;\n\n match *self {\n\n None => \"none\",\n\n Relocatable => \"rel\",\n\n Executable => \"exe\",\n\n Library => \"lib\",\n\n Dump => \"dump\",\n\n Debug => \"dbg\",\n\n Other => \"other\",\n\n }\n\n }\n\n\n\n pub fn parse(string: &str) -> Result<ObjectClass> {\n\n use ObjectClass::*;\n\n Ok(match string {\n", "file_path": "common/src/types.rs", "rank": 80, "score": 41854.5398123594 }, { "content": " }\n\n}\n\n\n\n/// OS and CPU information\n\n#[repr(C)]\n\npub struct SymbolicSystemInfo {\n\n pub os_name: SymbolicStr,\n\n pub os_version: SymbolicStr,\n\n pub os_build: SymbolicStr,\n\n pub cpu_family: SymbolicStr,\n\n pub cpu_info: SymbolicStr,\n\n pub cpu_count: u32,\n\n}\n\n\n\n/// State of a crashed process\n\n#[repr(C)]\n\npub struct SymbolicProcessState {\n\n pub requesting_thread: i32,\n\n pub timestamp: u64,\n\n pub crashed: bool,\n", "file_path": "cabi/src/minidump.rs", "rank": 81, "score": 41854.53037556004 }, { "content": " pub instr_addr: u64,\n\n pub line: u32,\n\n pub lang: SymbolicStr,\n\n pub symbol: SymbolicStr,\n\n pub filename: SymbolicStr,\n\n pub base_dir: SymbolicStr,\n\n pub comp_dir: SymbolicStr,\n\n}\n\n\n\n/// Represents a lookup result of one or more items.\n\n#[repr(C)]\n\npub struct SymbolicLookupResult {\n\n pub items: *mut SymbolicLineInfo,\n\n pub len: usize,\n\n}\n\n\n\n/// Represents an instruction info.\n\n#[repr(C)]\n\npub struct SymbolicInstructionInfo {\n\n /// The address of the instruction we want to use as a base.\n", "file_path": "cabi/src/symcache.rs", "rank": 82, "score": 41854.14224536293 }, { "content": "\n\npub use core::*;\n\npub use common::*;\n\npub use demangle::*;\n\npub use debuginfo::*;\n\npub use symcache::*;\n\npub use sourcemap::*;\n\npub use proguard::*;\n\npub use minidump::*;\n", "file_path": "cabi/src/lib.rs", "rank": 83, "score": 41853.92861041679 }, { "content": " write!(f, \"{}\", self.as_str())\n\n }\n\n}\n\n\n\n/// Represents the physical object file format.\n\n#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Copy, Clone)]\n\npub enum ObjectKind {\n\n Breakpad,\n\n Elf,\n\n MachO,\n\n}\n\n\n\nimpl ObjectKind {\n\n /// Returns the name of the object kind.\n\n pub fn name(&self) -> &'static str {\n\n use ObjectKind::*;\n\n match *self {\n\n Breakpad => \"breakpad\",\n\n Elf => \"elf\",\n\n MachO => \"macho\",\n", "file_path": "common/src/types.rs", "rank": 84, "score": 41853.57407952674 }, { "content": " pub fn uuid(&self) -> uuid::Uuid {\n\n self.mv.uuid()\n\n }\n\n\n\n /// Returns true if this file has line infos.\n\n pub fn has_line_info(&self) -> bool {\n\n self.mv.has_line_info()\n\n }\n\n\n\n /// Converts a dotted path.\n\n pub fn convert_dotted_path(&self, path: &str, lineno: u32) -> String {\n\n let mut iter = path.splitn(2, ':');\n\n let cls_name = iter.next().unwrap_or(\"\");\n\n let meth_name = iter.next();\n\n if let Some(cls) = self.mv.find_class(cls_name) {\n\n let class_name = cls.class_name();\n\n if let Some(meth_name) = meth_name {\n\n let lineno = if lineno == 0 {\n\n None\n\n } else {\n", "file_path": "proguard/src/types.rs", "rank": 85, "score": 41853.414949738646 }, { "content": "}\n\n\n\n/// Represents the kind of debug information inside an object.\n\n#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Copy, Clone)]\n\npub enum DebugKind {\n\n Dwarf,\n\n Breakpad,\n\n}\n\n\n\nimpl DebugKind {\n\n /// Returns the name of the object kind.\n\n pub fn name(&self) -> &'static str {\n\n use DebugKind::*;\n\n match *self {\n\n Dwarf => \"dwarf\",\n\n Breakpad => \"breakpad\",\n\n }\n\n }\n\n\n\n /// Parses the object kind from its name.\n", "file_path": "common/src/types.rs", "rank": 86, "score": 41853.35493250382 }, { "content": "use std::mem;\n\nuse std::ptr;\n\nuse std::str;\n\nuse std::slice;\n\nuse std::ffi::CStr;\n\nuse std::os::raw::c_char;\n\n\n\nuse uuid::Uuid;\n\n\n\nuse symbolic_common::ErrorKind;\n\n\n\nuse utils::{set_panic_hook, LAST_ERROR};\n\n\n\n/// Represents a string.\n\n#[repr(C)]\n\npub struct SymbolicStr {\n\n pub data: *mut c_char,\n\n pub len: usize,\n\n pub owned: bool,\n\n}\n", "file_path": "cabi/src/core.rs", "rank": 87, "score": 41853.313442436156 }, { "content": "use std::slice;\n\nuse std::os::raw::c_char;\n\nuse std::ffi::CStr;\n\n\n\nuse symbolic_common::ByteView;\n\nuse symbolic_proguard::ProguardMappingView;\n\n\n\nuse core::{SymbolicStr, SymbolicUuid};\n\n\n\n/// Represents a proguard mapping view\n\npub struct SymbolicProguardMappingView;\n\n\n\nffi_fn! {\n\n /// Creates a proguard mapping view from a path.\n\n unsafe fn symbolic_proguardmappingview_from_path(path: *const c_char)\n\n -> Result<*mut SymbolicProguardMappingView>\n\n {\n\n let bv = ByteView::from_path(CStr::from_ptr(path).to_str()?)?;\n\n let sv = ProguardMappingView::parse(bv)?;\n\n Ok(Box::into_raw(Box::new(sv)) as *mut SymbolicProguardMappingView)\n", "file_path": "cabi/src/proguard.rs", "rank": 88, "score": 41853.24721640502 }, { "content": "use symbolic_common::{Language, Name};\n\nuse symbolic_demangle::{Demangle, DemangleFormat, DemangleOptions};\n\n\n\nuse core::SymbolicStr;\n\n\n\nunsafe fn get_name(ident: *const SymbolicStr, lang: *const SymbolicStr) -> Name<'static> {\n\n if lang.is_null() {\n\n Name::new((*ident).as_str())\n\n } else {\n\n let lang = Language::parse((*lang).as_str());\n\n Name::with_language((*ident).as_str(), lang)\n\n }\n\n}\n\n\n\nffi_fn! {\n\n /// Demangles a given identifier.\n\n ///\n\n /// This demangles with the default behavior in symbolic. If no language\n\n /// is specified, it will be auto-detected.\n\n unsafe fn symbolic_demangle(\n", "file_path": "cabi/src/demangle.rs", "rank": 89, "score": 41852.975898228775 }, { "content": "use std::borrow::Cow;\n\nuse std::collections::{BTreeMap, HashSet};\n\nuse std::iter::{IntoIterator, Peekable};\n\nuse std::slice;\n\n\n\nuse goblin::mach;\n\nuse regex::Regex;\n\n\n\nuse symbolic_common::{ErrorKind, Name, Result};\n\n\n\nuse object::{Object, ObjectTarget};\n\n\n\nlazy_static! {\n\n static ref HIDDEN_SYMBOL_RE: Regex = Regex::new(\"__?hidden#\\\\d+_\").unwrap();\n\n}\n\n\n\n/// A single symbol\n\n#[derive(Debug)]\n\npub struct Symbol<'data> {\n\n name: Cow<'data, str>,\n", "file_path": "debuginfo/src/symbols.rs", "rank": 90, "score": 41852.85514591568 }, { "content": " \"rust\" => Rust,\n\n \"swift\" => Swift,\n\n _ => Unknown,\n\n }\n\n }\n\n\n\n /// Returns the name of the language\n\n pub fn name(&self) -> &'static str {\n\n use Language::*;\n\n match *self {\n\n Unknown | __Max => \"unknown\",\n\n C => \"c\",\n\n Cpp => \"cpp\",\n\n D => \"d\",\n\n Go => \"go\",\n\n ObjC => \"objc\",\n\n ObjCpp => \"objcpp\",\n\n Rust => \"rust\",\n\n Swift => \"swift\",\n\n }\n", "file_path": "common/src/types.rs", "rank": 91, "score": 41852.666019268894 }, { "content": "use std::ffi::CStr;\n\nuse std::mem;\n\nuse std::os::raw::c_char;\n\nuse std::slice;\n\nuse uuid::Uuid;\n\n\n\nuse symbolic_common::{Arch, ByteView};\n\nuse symbolic_debuginfo::Object;\n\nuse symbolic_minidump::{BreakpadAsciiCfiWriter, CallStack, CodeModule, CodeModuleId, FrameInfoMap,\n\n ProcessState, StackFrame, SystemInfo};\n\n\n\nuse core::{SymbolicStr, SymbolicUuid};\n\nuse debuginfo::SymbolicObject;\n\n\n\n/// Contains stack frame information (CFI) for images\n\npub struct SymbolicFrameInfoMap;\n\n\n\n/// Indicates how well the instruction pointer derived during stack walking is trusted\n\n#[repr(u32)]\n\npub enum SymbolicFrameTrust {\n", "file_path": "cabi/src/minidump.rs", "rank": 92, "score": 41852.46690799042 }, { "content": "use std::cmp::Ordering;\n\nuse std::fmt;\n\nuse std::hash::{Hash, Hasher};\n\nuse std::mem;\n\nuse std::slice;\n\nuse std::marker::PhantomData;\n\n\n\nuse uuid::Uuid;\n\n\n\nuse symbolic_common::{ErrorKind, Result};\n\n\n\n#[repr(C, packed)]\n\n#[derive(Default)]\n\npub struct Seg<T, L = u32> {\n\n pub offset: u32,\n\n pub len: L,\n\n _ty: PhantomData<T>,\n\n}\n\n\n\nimpl<T, L> Seg<T, L> {\n", "file_path": "symcache/src/types.rs", "rank": 93, "score": 41852.09515492411 }, { "content": "use std::ptr;\n\nuse std::os::raw::c_char;\n\nuse std::ffi::CStr;\n\n\n\nuse symbolic_common::ByteView;\n\nuse symbolic_debuginfo::{FatObject, Object, ObjectId};\n\n\n\nuse core::{SymbolicStr, SymbolicUuid};\n\n\n\nuse uuid::Uuid;\n\n\n\n/// A potential multi arch object.\n\npub struct SymbolicFatObject;\n\n\n\n/// A single arch object.\n\npub struct SymbolicObject;\n\n\n\n/// Unique identifier for Objects.\n\n#[repr(C)]\n\npub struct SymbolicObjectId {\n", "file_path": "cabi/src/debuginfo.rs", "rank": 94, "score": 41852.04200184651 }, { "content": "use std::mem;\n\nuse std::slice;\n\nuse std::os::raw::c_char;\n\nuse std::ffi::CStr;\n\n\n\nuse symbolic_debuginfo::Object;\n\nuse symbolic_symcache::{SymCache, InstructionInfo, SYMCACHE_LATEST_VERSION};\n\nuse symbolic_common::{ByteView, Arch};\n\n\n\nuse core::{SymbolicStr, SymbolicUuid};\n\nuse debuginfo::SymbolicObject;\n\n\n\n/// Represents a symbolic sym cache.\n\npub struct SymbolicSymCache;\n\n\n\n/// Represents a single symbol after lookup.\n\n#[repr(C)]\n\npub struct SymbolicLineInfo {\n\n pub sym_addr: u64,\n\n pub line_addr: u64,\n", "file_path": "cabi/src/symcache.rs", "rank": 95, "score": 41851.87965698062 }, { "content": " \"arm\" => Arm,\n\n \"arm64\" => Arm64,\n\n \"ppc\" => Ppc,\n\n \"ppc64\" => Ppc64,\n\n _ => Unknown,\n\n }\n\n }\n\n\n\n /// Returns the breakpad name for this Arch\n\n pub fn to_breakpad(&self) -> &'static str {\n\n use CpuFamily::*;\n\n match self.cpu_family() {\n\n Intel32 => \"x86\",\n\n // Use the breakpad symbol constant here\n\n Intel64 => \"x86_64\",\n\n Arm32 => \"arm\",\n\n Arm64 => \"arm64\",\n\n Ppc32 => \"ppc\",\n\n Ppc64 => \"ppc64\",\n\n Unknown => \"unknown\",\n", "file_path": "common/src/types.rs", "rank": 96, "score": 41851.84092278529 }, { "content": "\n\n/// The latest version of the file format.\n\npub const SYMCACHE_LATEST_VERSION: u32 = 1;\n\n\n\n/// Information on a matched source line.\n\npub struct LineInfo<'a> {\n\n cache: &'a SymCache<'a>,\n\n sym_addr: u64,\n\n line_addr: u64,\n\n instr_addr: u64,\n\n line: u32,\n\n lang: Language,\n\n symbol: Option<&'a str>,\n\n filename: &'a str,\n\n base_dir: &'a str,\n\n comp_dir: &'a str,\n\n}\n\n\n\n/// An abstraction around a symbolication cache file.\n\npub struct SymCache<'a> {\n", "file_path": "symcache/src/cache.rs", "rank": 97, "score": 41851.79248205947 }, { "content": " /// Returns the string representation of this symbol\n\n pub fn as_str(&self) -> &str {\n\n self.name().as_ref()\n\n }\n\n}\n\n\n\nimpl<'data> Into<Name<'data>> for Symbol<'data> {\n\n fn into(self) -> Name<'data> {\n\n Name::new(self.name)\n\n }\n\n}\n\n\n\nimpl<'data> Into<Cow<'data, str>> for Symbol<'data> {\n\n fn into(self) -> Cow<'data, str> {\n\n self.name\n\n }\n\n}\n\n\n\nimpl<'data> Into<String> for Symbol<'data> {\n\n fn into(self) -> String {\n\n self.name.into()\n\n }\n\n}\n\n\n\n/// Internal wrapper around certain symbol table implementations\n\n#[derive(Clone, Copy, Debug)]\n", "file_path": "debuginfo/src/symbols.rs", "rank": 98, "score": 41851.7212402808 }, { "content": " _ => Some(lang),\n\n };\n\n\n\n Name {\n\n string: string.into(),\n\n lang: lang_opt,\n\n }\n\n }\n\n\n\n /// The raw, mangled string of the symbol\n\n pub fn as_str(&self) -> &str {\n\n &self.string\n\n }\n\n\n\n /// The language of the mangled symbol\n\n pub fn language(&self) -> Option<Language> {\n\n self.lang\n\n }\n\n}\n\n\n", "file_path": "common/src/types.rs", "rank": 99, "score": 12.881278567720774 } ]
Rust
arch/x86/cpu/idt.rs
mvdnes/element76
87e0206b022bda177dd1ca8fb6e2989429c2c8bf
/* * This file contains code for the Interrupt Descriptor Table * * See: http://www.jamesmolloy.co.uk/tutorial_html/4.-The%20GDT%20and%20IDT.html */ use core::marker::Copy; use core::clone::Clone; const IDT_COUNT: usize = 256; static mut IDT_ENTRIES: [IDTEntry; IDT_COUNT] = [IDTEntry { base_low: 0, selector: 0, zero: 0, flags: 0, base_high: 0 }; IDT_COUNT]; static mut IDT_PTR: IDTPointer = IDTPointer { limit: 0, base: 0 }; #[repr(packed)] struct IDTEntry { base_low: u16, selector: u16, zero: u8, flags: u8, base_high: u16 } impl Copy for IDTEntry {} impl Clone for IDTEntry { fn clone(&self) -> Self { *self } } #[repr(packed)] struct IDTPointer { limit: u16, base: usize } pub fn init_idt() { unsafe { IDT_PTR.limit = (::core::mem::size_of::<IDTEntry>() * IDT_COUNT - 1) as u16; IDT_PTR.base = &IDT_ENTRIES as *const [IDTEntry; IDT_COUNT] as usize; idt_set_gate( 0, isr0 as usize, 0x08, 0x8E); idt_set_gate( 1, isr1 as usize, 0x08, 0x8E); idt_set_gate( 2, isr2 as usize, 0x08, 0x8E); idt_set_gate( 3, isr3 as usize, 0x08, 0x8E); idt_set_gate( 4, isr4 as usize, 0x08, 0x8E); idt_set_gate( 5, isr5 as usize, 0x08, 0x8E); idt_set_gate( 6, isr6 as usize, 0x08, 0x8E); idt_set_gate( 7, isr7 as usize, 0x08, 0x8E); idt_set_gate( 8, isr8 as usize, 0x08, 0x8E); idt_set_gate( 9, isr9 as usize, 0x08, 0x8E); idt_set_gate(10, isr10 as usize, 0x08, 0x8E); idt_set_gate(11, isr11 as usize, 0x08, 0x8E); idt_set_gate(12, isr12 as usize, 0x08, 0x8E); idt_set_gate(13, isr13 as usize, 0x08, 0x8E); idt_set_gate(14, isr14 as usize, 0x08, 0x8E); idt_set_gate(15, isr15 as usize, 0x08, 0x8E); idt_set_gate(16, isr16 as usize, 0x08, 0x8E); idt_set_gate(17, isr17 as usize, 0x08, 0x8E); idt_set_gate(18, isr18 as usize, 0x08, 0x8E); idt_set_gate(19, isr19 as usize, 0x08, 0x8E); idt_set_gate(20, isr20 as usize, 0x08, 0x8E); idt_set_gate(21, isr21 as usize, 0x08, 0x8E); idt_set_gate(22, isr22 as usize, 0x08, 0x8E); idt_set_gate(23, isr23 as usize, 0x08, 0x8E); idt_set_gate(24, isr24 as usize, 0x08, 0x8E); idt_set_gate(25, isr25 as usize, 0x08, 0x8E); idt_set_gate(26, isr26 as usize, 0x08, 0x8E); idt_set_gate(27, isr27 as usize, 0x08, 0x8E); idt_set_gate(28, isr28 as usize, 0x08, 0x8E); idt_set_gate(29, isr29 as usize, 0x08, 0x8E); idt_set_gate(30, isr30 as usize, 0x08, 0x8E); idt_set_gate(31, isr31 as usize, 0x08, 0x8E); idt_set_gate(32, irq0 as usize, 0x08, 0x8E); idt_set_gate(33, irq1 as usize, 0x08, 0x8E); idt_set_gate(34, irq2 as usize, 0x08, 0x8E); idt_set_gate(35, irq3 as usize, 0x08, 0x8E); idt_set_gate(36, irq4 as usize, 0x08, 0x8E); idt_set_gate(37, irq5 as usize, 0x08, 0x8E); idt_set_gate(38, irq6 as usize, 0x08, 0x8E); idt_set_gate(39, irq7 as usize, 0x08, 0x8E); idt_set_gate(40, irq8 as usize, 0x08, 0x8E); idt_set_gate(41, irq9 as usize, 0x08, 0x8E); idt_set_gate(42, irq10 as usize, 0x08, 0x8E); idt_set_gate(43, irq11 as usize, 0x08, 0x8E); idt_set_gate(44, irq12 as usize, 0x08, 0x8E); idt_set_gate(45, irq13 as usize, 0x08, 0x8E); idt_set_gate(46, irq14 as usize, 0x08, 0x8E); idt_set_gate(47, irq15 as usize, 0x08, 0x8E); idt_flush(&IDT_PTR as *const IDTPointer as u32); } } unsafe fn idt_set_gate(n: usize, base: usize, sel: u16, flags: u8) { IDT_ENTRIES[n].base_low = (base & 0xFFFF) as u16; IDT_ENTRIES[n].base_high = ((base >> 16) & 0xFFFF) as u16; IDT_ENTRIES[n].selector = sel; IDT_ENTRIES[n].zero = 0; IDT_ENTRIES[n].flags = (flags & 0b11100000) | 0b01110; } extern { fn idt_flush(pointer: u32); fn isr0 (); fn isr1 (); fn isr2 (); fn isr3 (); fn isr4 (); fn isr5 (); fn isr6 (); fn isr7 (); fn isr8 (); fn isr9 (); fn isr10(); fn isr11(); fn isr12(); fn isr13(); fn isr14(); fn isr15(); fn isr16(); fn isr17(); fn isr18(); fn isr19(); fn isr20(); fn isr21(); fn isr22(); fn isr23(); fn isr24(); fn isr25(); fn isr26(); fn isr27(); fn isr28(); fn isr29(); fn isr30(); fn isr31(); fn irq0 (); fn irq1 (); fn irq2 (); fn irq3 (); fn irq4 (); fn irq5 (); fn irq6 (); fn irq7 (); fn irq8 (); fn irq9 (); fn irq10(); fn irq11(); fn irq12(); fn irq13(); fn irq14(); fn irq15(); }
/* * This file contains code for the Interrupt Descriptor Table * * See: http://www.jamesmolloy.co.uk/tutorial_html/4.-The%20GDT%20and%20IDT.html */ use core::marker::Copy; use core::clone::Clone; const IDT_COUNT: usize = 256; static mut IDT_ENTRIES: [IDTEntry; IDT_COUNT] = [IDTEntry { base_low: 0, selector: 0, zero: 0, flags: 0, base_high: 0 }; IDT_COUNT]; static mut IDT_PTR: IDTPointer = IDTPointer { limit: 0, base: 0 }; #[repr(packed)] struct IDTEntry { base_low: u16, selector: u16, zero: u8, flags: u8, base_high: u16 } impl Copy for IDTEntry {} impl Clone for IDTEntry { fn clone(&self) -> Self { *self } } #[repr(packed)] struct IDTPointer { limit: u16, base: usize } pub fn init_idt() { unsafe { IDT_PTR.limit = (::core::mem::size_of::<IDTEntry>() * IDT_COUNT - 1) as u16; IDT_PTR.base = &IDT_ENTRIES as *const [IDTEntry; IDT_COUNT] as usize; idt_set_gate( 0, isr0 as usize, 0x08, 0x8E); idt_set_gate( 1, isr1 as usize, 0x08, 0x8E); idt_set_gate( 2, isr2 as usize, 0x08, 0x8E); idt_set_gate( 3, isr3 as usize, 0x08, 0x8E); idt_set_gate( 4, isr4 as usize, 0x08, 0x8E); idt_set_gate( 5, isr5 as usize, 0x08, 0x8E); idt_set_gate( 6, isr6 as usize, 0x08, 0x8E); idt_set_gate( 7, isr7 as usize, 0x08, 0x8E); idt_set_gate( 8, isr8 as usize, 0x08, 0x8E); idt_set_gate( 9, isr9 as usize, 0x08, 0x8E); idt_set_gate(10, isr10 as usize, 0x08, 0x8E); idt_set_gate(11, isr11 as usize, 0x08, 0x8E); idt_set_gate(12, isr12 as usize, 0x08, 0x8E); idt_set_gate(13, isr13 as usize, 0x08, 0x8E); idt_set_gate(14, isr14 as usize, 0x08, 0x8E); idt_set_gate(15, isr15 as usize, 0x08, 0x8E); idt_set_gate(16, isr16 as usize, 0x08, 0x8E); idt_set_gate(17, isr17 as usize, 0x08, 0x8E); idt_set_gate(18, isr18 as usize, 0x08, 0x8E); idt_set_gate(19, isr19 as usize, 0x0
, 0x08, 0x8E); idt_set_gate(44, irq12 as usize, 0x08, 0x8E); idt_set_gate(45, irq13 as usize, 0x08, 0x8E); idt_set_gate(46, irq14 as usize, 0x08, 0x8E); idt_set_gate(47, irq15 as usize, 0x08, 0x8E); idt_flush(&IDT_PTR as *const IDTPointer as u32); } } unsafe fn idt_set_gate(n: usize, base: usize, sel: u16, flags: u8) { IDT_ENTRIES[n].base_low = (base & 0xFFFF) as u16; IDT_ENTRIES[n].base_high = ((base >> 16) & 0xFFFF) as u16; IDT_ENTRIES[n].selector = sel; IDT_ENTRIES[n].zero = 0; IDT_ENTRIES[n].flags = (flags & 0b11100000) | 0b01110; } extern { fn idt_flush(pointer: u32); fn isr0 (); fn isr1 (); fn isr2 (); fn isr3 (); fn isr4 (); fn isr5 (); fn isr6 (); fn isr7 (); fn isr8 (); fn isr9 (); fn isr10(); fn isr11(); fn isr12(); fn isr13(); fn isr14(); fn isr15(); fn isr16(); fn isr17(); fn isr18(); fn isr19(); fn isr20(); fn isr21(); fn isr22(); fn isr23(); fn isr24(); fn isr25(); fn isr26(); fn isr27(); fn isr28(); fn isr29(); fn isr30(); fn isr31(); fn irq0 (); fn irq1 (); fn irq2 (); fn irq3 (); fn irq4 (); fn irq5 (); fn irq6 (); fn irq7 (); fn irq8 (); fn irq9 (); fn irq10(); fn irq11(); fn irq12(); fn irq13(); fn irq14(); fn irq15(); }
8, 0x8E); idt_set_gate(20, isr20 as usize, 0x08, 0x8E); idt_set_gate(21, isr21 as usize, 0x08, 0x8E); idt_set_gate(22, isr22 as usize, 0x08, 0x8E); idt_set_gate(23, isr23 as usize, 0x08, 0x8E); idt_set_gate(24, isr24 as usize, 0x08, 0x8E); idt_set_gate(25, isr25 as usize, 0x08, 0x8E); idt_set_gate(26, isr26 as usize, 0x08, 0x8E); idt_set_gate(27, isr27 as usize, 0x08, 0x8E); idt_set_gate(28, isr28 as usize, 0x08, 0x8E); idt_set_gate(29, isr29 as usize, 0x08, 0x8E); idt_set_gate(30, isr30 as usize, 0x08, 0x8E); idt_set_gate(31, isr31 as usize, 0x08, 0x8E); idt_set_gate(32, irq0 as usize, 0x08, 0x8E); idt_set_gate(33, irq1 as usize, 0x08, 0x8E); idt_set_gate(34, irq2 as usize, 0x08, 0x8E); idt_set_gate(35, irq3 as usize, 0x08, 0x8E); idt_set_gate(36, irq4 as usize, 0x08, 0x8E); idt_set_gate(37, irq5 as usize, 0x08, 0x8E); idt_set_gate(38, irq6 as usize, 0x08, 0x8E); idt_set_gate(39, irq7 as usize, 0x08, 0x8E); idt_set_gate(40, irq8 as usize, 0x08, 0x8E); idt_set_gate(41, irq9 as usize, 0x08, 0x8E); idt_set_gate(42, irq10 as usize, 0x08, 0x8E); idt_set_gate(43, irq11 as usize
random
[ { "content": "fn parse_keycode(code: u8) -> KeyboardKey\n\n{\n\n\tmatch code\n\n\t{\n\n\t\t1 => KeyboardKey::Escape,\n\n\t\t2 => KeyboardKey::Printable('1', '!'),\n\n\t\t3 => KeyboardKey::Printable('2', '@'),\n\n\t\t4 => KeyboardKey::Printable('3', '#'),\n\n\t\t5 => KeyboardKey::Printable('4', '$'),\n\n\t\t6 => KeyboardKey::Printable('5', '%'),\n\n\t\t7 => KeyboardKey::Printable('6', '^'),\n\n\t\t8 => KeyboardKey::Printable('7', '&'),\n\n\t\t9 => KeyboardKey::Printable('8', '*'),\n\n\t\t10 => KeyboardKey::Printable('9', '('),\n\n\t\t11 => KeyboardKey::Printable('0', ')'),\n\n\t\t12 => KeyboardKey::Printable('-', '_'),\n\n\t\t13 => KeyboardKey::Printable('=', '+'),\n\n\t\t14 => KeyboardKey::Backspace,\n\n\t\t15 => KeyboardKey::Tab,\n\n\t\t16 => KeyboardKey::Printable('q', 'Q'),\n", "file_path": "kernel/keyboard.rs", "rank": 0, "score": 99366.95718499557 }, { "content": "pub fn handle_interrupt(interrupt_number: u32, error_code: u32)\n\n{\n\n\tmatch interrupt_number\n\n\t{\n\n\t\t0x20 => timer::handle_irq(),\n\n\t\t0x21 => keyboard::keyboard_irq(),\n\n\t\t_ => unknown_irq(interrupt_number, error_code),\n\n\t};\n\n}\n\n\n", "file_path": "kernel/interrupts/mod.rs", "rank": 1, "score": 92100.70417915254 }, { "content": "pub fn remap_pic(offset: u8)\n\n{\n\n\tunsafe\n\n\t{\n\n\t\t// Initialize\n\n\t\tio::outport(PIC1, ICW1);\n\n\t\tio::io_wait();\n\n\t\tio::outport(PIC2, ICW1);\n\n\t\tio::io_wait();\n\n\n\n\t\t// Set offset\n\n\t\tio::outport(PIC1_DATA, offset);\n\n\t\tio::io_wait();\n\n\t\tio::outport(PIC2_DATA, offset + 8);\n\n\t\tio::io_wait();\n\n\n\n\t\t// Connect Master to slave\n\n\t\tio::outport(PIC1_DATA, 4);\n\n\t\tio::io_wait();\n\n\t\tio::outport(PIC2_DATA, 2);\n", "file_path": "arch/x86/cpu/pic.rs", "rank": 2, "score": 88887.87730029189 }, { "content": "pub fn acknowledge_irq(irq: u8)\n\n{\n\n\tif irq >= 8\n\n\t{\n\n\t\tunsafe { io::outport(PIC2, IRQ_ACK); }\n\n\t}\n\n\tunsafe { io::outport(PIC1, IRQ_ACK); }\n\n}\n", "file_path": "arch/x86/cpu/pic.rs", "rank": 3, "score": 88887.87730029189 }, { "content": "pub fn handle_irq()\n\n{\n\n\tlet mut printer = StdioWriter { xpos: 0, ypos: 10, fg: Color::White, bg: Color::Black };\n\n\tlet mytick = unsafe\n\n\t{\n\n\t\tTICK = (TICK + 1) % 50;\n\n\t\tTICK\n\n\t};\n\n\tif mytick % 25 == 0\n\n\t{\n\n\t\tprinter.print_screen(if mytick < 25 { \"tick\" } else { \"tock\" });\n\n\t}\n\n}\n", "file_path": "kernel/interrupts/timer.rs", "rank": 4, "score": 82034.8816536862 }, { "content": "pub fn keyboard_irq()\n\n{\n\n\tlet mut printer = unsafe { IRQPRINTER };\n\n\tmatch kernel::keyboard::get_key()\n\n\t{\n\n\t\tKeyboardAction::KeyUp(KeyboardKey::Escape) => { platform::cpu::request_int3(); },\n\n\t\tKeyboardAction::KeyUp(KeyboardKey::Shift) => unsafe { SHIFT -= 1; },\n\n\t\tKeyboardAction::KeyDown(key) => match key\n\n\t\t{\n\n\t\t\tKeyboardKey::Printable(c, d) => { printer.print_char(if unsafe {SHIFT == 0} {c} else {d}); },\n\n\t\t\tKeyboardKey::Backspace => { printer.backspace(); },\n\n\t\t\tKeyboardKey::Return => { printer.crlf(); },\n\n\t\t\tKeyboardKey::Shift => unsafe { SHIFT += 1; },\n\n\t\t\tKeyboardKey::Tab => { printer.tab(); },\n\n\t\t\tKeyboardKey::Unknown(c) => { printer.print_hex(c as u32, 8); printer.print_char(' '); },\n\n\t\t\t_ => {},\n\n\t\t},\n\n\t\t_ => {},\n\n\t};\n\n\tunsafe { IRQPRINTER = printer; };\n\n}\n", "file_path": "kernel/interrupts/keyboard.rs", "rank": 5, "score": 82034.8816536862 }, { "content": "pub fn enable_interrupts()\n\n{\n\n\tpic::enable_irq(0);\n\n\tpic::enable_irq(1);\n\n\tunsafe\n\n\t{\n\n\t\tllvm_asm!(\"sti\");\n\n\t}\n\n}\n", "file_path": "arch/x86/cpu/mod.rs", "rank": 7, "score": 79589.51191292069 }, { "content": "pub fn putc(xpos: u32, ypos: u32, value: u8)\n\n{\n\n\tif xpos >= COLS || ypos >= ROWS { return }\n\n\tunsafe\n\n\t{\n\n\t\t*((0xb8000 + ypos * COLS * 2 + xpos * 2) as *mut u8) = value;\n\n\t}\n\n}\n\n\n", "file_path": "arch/x86/vga/mod.rs", "rank": 8, "score": 75255.88086840865 }, { "content": "fn unknown_irq(interrupt_number: u32, error_code: u32)\n\n{\n\n\tlet mut printer = StdioWriter::new();\n\n\tprinter.fg = Color::White;\n\n\tprinter.bg = Color::Black;\n\n\tprinter.go_to(10, 5);\n\n\tprinter.print_screen(\"Interrupt received\");\n\n\tprinter.fg = Color::Black;\n\n\tprinter.bg = Color::White;\n\n\tprinter.go_to(10, 6);\n\n\tprinter.print_hex(interrupt_number, 32);\n\n\tprinter.go_to(10, 7);\n\n\tprinter.print_bin(error_code, 32);\n\n}\n", "file_path": "kernel/interrupts/mod.rs", "rank": 9, "score": 65726.98761670127 }, { "content": "#[no_mangle]\n\npub fn entry() -> !\n\n{\n\n platform::cpu::setup();\n\n platform::mmu::setup();\n\n platform::cpu::enable_interrupts();\n\n main();\n\n loop { platform::cpu::idle(); }\n\n}\n\n\n", "file_path": "kernel/main.rs", "rank": 10, "score": 64658.020689282945 }, { "content": "pub fn idle()\n\n{\n\n\tunsafe\n\n\t{\n\n\t\tllvm_asm!(\"hlt\");\n\n\t}\n\n}\n\n\n", "file_path": "arch/x86/cpu/mod.rs", "rank": 11, "score": 61052.38093054874 }, { "content": "pub fn setup()\n\n{\n\n features::enable_sse();\n\n\tgdt::init_gdt();\n\n\tpic::remap_pic(IRQ_OFFSET);\n\n\tidt::init_idt();\n\n\ttimer::set_interval(50);\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn isr_handler(args: &InterruptArguments, _fpu_sse_data: [u8; 512])\n\n{\n\n\tkernel::interrupts::handle_interrupt(args.interrupt_number, args.error_code);\n\n\n\n\t// Ack IRQ\n\n\tif args.interrupt_number >= (IRQ_OFFSET as u32)\n\n\t{\n\n\t\tpic::acknowledge_irq(args.interrupt_number as u8 - IRQ_OFFSET);\n\n\t}\n\n}\n\n\n", "file_path": "arch/x86/cpu/mod.rs", "rank": 12, "score": 61052.38093054874 }, { "content": "pub fn setup()\n\n{\n\n\tunsafe\n\n\t{\n\n\t\tPLACEMENT_ADDRESS = end;\n\n\t}\n\n}\n", "file_path": "arch/x86/mmu/mod.rs", "rank": 13, "score": 61052.38093054874 }, { "content": "pub fn halt() -> !\n\n{\n\n\tloop\n\n\t{\n\n\t\tunsafe\n\n\t\t{\n\n\t\t\tllvm_asm!(\"cli\");\n\n\t\t\tllvm_asm!(\"hlt\");\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "arch/x86/cpu/mod.rs", "rank": 14, "score": 61052.38093054874 }, { "content": "pub fn enable_sse() {\n\n unsafe {\n\n llvm_asm!(\"mov %cr0, %eax\n\n andw $$0xFFFB, %ax\n\n orw $$0x2, %ax\n\n mov %eax, %cr0\n\n mov %cr4, %eax\n\n orw $$(3 << 9), %ax\n\n mov %eax, %cr4\"\n\n :\n\n :\n\n : \"eax\", \"cr0\", \"cr4\"\n\n : \"volatile\");\n\n }\n\n}\n\n\n", "file_path": "arch/x86/cpu/features.rs", "rank": 15, "score": 59472.70191564098 }, { "content": "pub fn io_wait()\n\n{\n\n\tunsafe { outport(0x80, 0); };\n\n}\n", "file_path": "arch/x86/io/mod.rs", "rank": 16, "score": 59472.70191564098 }, { "content": "pub fn request_int3()\n\n{\n\n\tunsafe\n\n\t{\n\n\t\tllvm_asm!(\"int $$0x03\");\n\n\t}\n\n\tpic::disable_irq(0);\n\n}\n\n\n", "file_path": "arch/x86/cpu/mod.rs", "rank": 17, "score": 59472.70191564098 }, { "content": "pub fn init_gdt()\n\n{\n\n\tunsafe\n\n\t{\n\n\t\tGDT_PTR.limit = (::core::mem::size_of::<GDTEntry>() * GDT_COUNT - 1) as u16;\n\n\t\tGDT_PTR.base = &GDT_ENTRIES as *const [GDTEntry; GDT_COUNT] as usize;\n\n\n\n\t\tgdt_set_gate(0, 0, 0, 0, 0);\n\n\t\tgdt_set_gate(1, 0, 0xFFFFFFFF, 0x9A, 0xCF);\n\n\t\tgdt_set_gate(2, 0, 0xFFFFFFFF, 0x92, 0xCF);\n\n\t\tgdt_set_gate(3, 0, 0xFFFFFFFF, 0xFA, 0xCF);\n\n\t\tgdt_set_gate(4, 0, 0xFFFFFFFF, 0xF2, 0xCF);\n\n\n\n\t\tgdt_flush(&GDT_PTR as *const GDTPointer as u32);\n\n\t};\n\n}\n\n\n\nunsafe fn gdt_set_gate(n: usize, base: usize, limit: usize, access: u8, gran: u8)\n\n{\n\n\tGDT_ENTRIES[n].base_low = (base & 0xFFFF) as u16;\n", "file_path": "arch/x86/cpu/gdt.rs", "rank": 18, "score": 59472.70191564098 }, { "content": "pub fn get_key() -> KeyboardAction\n\n{\n\n\tmatch keyboard::get_key()\n\n\t{\n\n\t\tArchKeyboardAction::Down(code) => KeyboardAction::KeyDown(parse_keycode(code)),\n\n\t\tArchKeyboardAction::Up(code) => KeyboardAction::KeyUp(parse_keycode(code)),\n\n\t}\n\n}\n\n\n", "file_path": "kernel/keyboard.rs", "rank": 19, "score": 57663.38164390582 }, { "content": "#[panic_handler]\n\n#[no_mangle]\n\npub fn panic(info: &PanicInfo) -> !\n\n{\n\n let mut printer = StdioWriter::new();\n\n printer.bg = Color::Black;\n\n printer.fg = Color::Red;\n\n\n\n printer.print_screen(\"RUST FAIL\");\n\n printer.crlf();\n\n\n\n match info.message() {\n\n Some(args) => { let _ = printer.write_fmt(*args); },\n\n None => { printer.print_screen(\"<No message provided>\"); },\n\n };\n\n\n\n match info.location() {\n\n Some(location) => {\n\n printer.crlf();\n\n printer.print_screen(location.file());\n\n printer.print_char(':');\n\n printer.print_dec(location.line());\n\n printer.print_char(':');\n\n printer.print_dec(location.column());\n\n },\n\n None => {},\n\n }\n\n\n\n platform::cpu::halt();\n\n}\n", "file_path": "kernel/main.rs", "rank": 20, "score": 56120.49640737324 }, { "content": "pub fn get_key() -> ArchKeyboardAction\n\n{\n\n\tlet raw = unsafe { platform::io::inport(0x60) };\n\n\tlet key = raw & 0x7F;\n\n\tmatch raw & 0x80\n\n\t{\n\n\t\t0 => ArchKeyboardAction::Down(key),\n\n\t\t_ => ArchKeyboardAction::Up(key),\n\n\t}\n\n}\n", "file_path": "arch/x86/keyboard/mod.rs", "rank": 21, "score": 53620.29281442608 }, { "content": "pub fn enable_irq(irq: u32)\n\n{\n\n\tlet (port, line) = if irq < 8\n\n\t{\n\n\t\t(PIC1_DATA, irq)\n\n\t}\n\n\telse\n\n\t{\n\n\t\t(PIC2_DATA, irq - 8)\n\n\t};\n\n\n\n\tunsafe\n\n\t{\n\n\t\tlet value = io::inport(port) & !(1 << line);\n\n\t\tio::outport(port, value);\n\n\t}\n\n}\n\n\n", "file_path": "arch/x86/cpu/pic.rs", "rank": 22, "score": 53322.37476925264 }, { "content": "pub fn disable_irq(irq: u32)\n\n{\n\n\tlet (port, line) = if irq < 8\n\n\t{\n\n\t\t(PIC1_DATA, irq)\n\n\t}\n\n\telse\n\n\t{\n\n\t\t(PIC2_DATA, irq - 8)\n\n\t};\n\n\n\n\tunsafe\n\n\t{\n\n\t\tlet value = io::inport(port) | (1 << line);\n\n\t\tio::outport(port, value);\n\n\t}\n\n}\n\n\n", "file_path": "arch/x86/cpu/pic.rs", "rank": 23, "score": 53322.37476925264 }, { "content": "pub fn set_interval(frequency: u32)\n\n{\n\n\tlet divisor = 1193180 / frequency;\n\n\tlet l = divisor as u8;\n\n\tlet h = (divisor >> 8) as u8;\n\n\tunsafe\n\n\t{\n\n\t\tio::outport(TIMER_COMMAND, 0x36); // 0x36 tells timer to repeat\n\n\t\tio::outport(TIMER_CHANNEL0, l);\n\n\t\tio::outport(TIMER_CHANNEL0, h);\n\n\t}\n\n}\n", "file_path": "arch/x86/cpu/timer.rs", "rank": 24, "score": 53322.37476925264 }, { "content": "pub fn move_cursor(xpos: u32, ypos: u32)\n\n{\n\n\tif xpos >= COLS || ypos >= COLS { return };\n\n\tlet pos = ypos * COLS + xpos;\n\n\tunsafe\n\n\t{\n\n\t\tio::outport(0x3D4, 14);\n\n\t\tio::outport(0x3D5, (pos >> 8) as u8);\n\n\t\tio::outport(0x3D4, 15);\n\n\t\tio::outport(0x3D5, pos as u8);\n\n\t}\n\n}\n", "file_path": "arch/x86/vga/mod.rs", "rank": 25, "score": 48428.96140537846 }, { "content": "pub fn setbg(xpos: u32, ypos: u32, value: Color)\n\n{\n\n\tif xpos >= COLS || ypos >= ROWS { return }\n\n\tunsafe\n\n\t{\n\n\t\tlet ptr = (0xb8000 + ypos * COLS * 2 + xpos * 2 + 1) as *mut u8;\n\n\t\t*ptr = (*ptr & 0x0F) | (((value as u8) << 4) & 0x70);\n\n\t}\n\n}\n\n\n", "file_path": "arch/x86/vga/mod.rs", "rank": 26, "score": 45426.51972115107 }, { "content": "pub fn setfg(xpos: u32, ypos: u32, value: Color)\n\n{\n\n\tif xpos >= COLS || ypos >= ROWS { return }\n\n\tunsafe\n\n\t{\n\n\t\tlet ptr = (0xb8000 + ypos * COLS * 2 + xpos * 2 + 1) as *mut u8;\n\n\t\t*ptr = (*ptr & 0xF0) | (value as u8 & 0x0F);\n\n\t}\n\n}\n\n\n", "file_path": "arch/x86/vga/mod.rs", "rank": 27, "score": 45426.51972115107 }, { "content": "#[repr(packed)]\n\nstruct GDTEntry\n\n{\n\n\tlimit_low: u16,\n\n\tbase_low: u16,\n\n\tbase_middle: u8,\n\n\taccess: u8,\n\n\tgranularity: u8,\n\n\tbase_high: u8\n\n}\n\n\n\nimpl Copy for GDTEntry {}\n\nimpl Clone for GDTEntry { fn clone(&self) -> Self { *self } }\n\n\n", "file_path": "arch/x86/cpu/gdt.rs", "rank": 29, "score": 42736.86951472715 }, { "content": "#[repr(packed)]\n\nstruct GDTPointer\n\n{\n\n\tlimit: u16,\n\n\tbase: usize\n\n}\n\n\n", "file_path": "arch/x86/cpu/gdt.rs", "rank": 30, "score": 42736.86951472715 }, { "content": "fn main()\n\n{\n\n let mut printer = StdioWriter::new();\n\n printer.bg = Color::Red;\n\n printer.fg = Color::Yellow;\n\n printer.clear_screen();\n\n printer.fg = Color::White;\n\n printer.go_to(3, 3);\n\n printer.print_screen(\"Hello, World!\");\n\n}\n\n\n", "file_path": "kernel/main.rs", "rank": 32, "score": 32981.87372059596 }, { "content": "use crate::kernel::stdio::StdioWriter;\n\nuse crate::platform::vga::Color;\n\n\n\nstatic mut TICK: u32 = 48;\n\n\n", "file_path": "kernel/interrupts/timer.rs", "rank": 33, "score": 24098.632916509112 }, { "content": "use crate::kernel::stdio::StdioWriter;\n\nuse crate::kernel::keyboard::*;\n\nuse crate::platform::vga::Color;\n\nuse crate::{kernel, platform};\n\n\n\nstatic mut SHIFT: u32 = 0;\n\nstatic mut IRQPRINTER: StdioWriter = StdioWriter{ xpos: 0, ypos: 4, fg: Color::Yellow, bg: Color::LightRed };\n\n\n", "file_path": "kernel/interrupts/keyboard.rs", "rank": 34, "score": 24098.473390631207 }, { "content": "use crate::kernel::stdio::StdioWriter;\n\nuse crate::platform::vga::Color;\n\n\n\nmod timer;\n\nmod keyboard;\n\n\n", "file_path": "kernel/interrupts/mod.rs", "rank": 35, "score": 24095.176874475368 }, { "content": "/*\n\n * This file contains code for the Global Descriptor Table\n\n *\n\n * See: http://www.jamesmolloy.co.uk/tutorial_html/4.-The%20GDT%20and%20IDT.html\n\n */\n\n\n\nuse core::marker::Copy;\n\nuse core::clone::Clone;\n\n\n\nconst GDT_COUNT: usize = 5;\n\nstatic mut GDT_ENTRIES: [GDTEntry; GDT_COUNT] = [GDTEntry { limit_low: 0, base_low: 0, base_middle: 0, access: 0, granularity: 0, base_high: 0 }; GDT_COUNT];\n\nstatic mut GDT_PTR: GDTPointer = GDTPointer { limit: 0, base: 0 };\n\n\n\n#[repr(packed)]\n", "file_path": "arch/x86/cpu/gdt.rs", "rank": 39, "score": 14.910095045401786 }, { "content": "use crate::platform;\n\nuse core::marker::Copy;\n\nuse core::clone::Clone;\n\n\n\npub enum ArchKeyboardAction\n\n{\n\n\tDown(u8),\n\n\tUp(u8)\n\n}\n\n\n\nimpl Copy for ArchKeyboardAction {}\n\nimpl Clone for ArchKeyboardAction { fn clone(&self) -> Self { *self } }\n\n\n", "file_path": "arch/x86/keyboard/mod.rs", "rank": 40, "score": 13.30506595725446 }, { "content": "use crate::kernel;\n\nuse core::marker::Copy;\n\nuse core::clone::Clone;\n\n\n\nmod gdt;\n\nmod idt;\n\nmod pic;\n\nmod timer;\n\nmod features;\n\n\n\nstatic IRQ_OFFSET: u8 = 0x20;\n\n\n\n#[repr(C)]\n\npub struct InterruptArguments {\n\n\t_ds: u32, _edi: u32, _esi: u32, _ebp: u32, _esp: u32, _ebx: u32, _edx: u32, _ecx: u32, _eax: u32,\n\n\tinterrupt_number: u32,\n\n\terror_code: u32,\n\n\t_eip: u32, _cs: u32, _eflags: u32, _useresp: u32, _ss: u32,\n\n}\n\n\n\nimpl Copy for InterruptArguments {}\n\nimpl Clone for InterruptArguments { fn clone(&self) -> Self { *self } }\n\n\n", "file_path": "arch/x86/cpu/mod.rs", "rank": 41, "score": 11.384195976906687 }, { "content": "use core::marker::Copy;\n\nuse core::clone::Clone;\n\nuse crate::platform::io;\n\n\n\npub enum Color {\n\n Black = 0,\n\n Blue = 1,\n\n Green = 2,\n\n Cyan = 3,\n\n Red = 4,\n\n Pink = 5,\n\n Brown = 6,\n\n LightGray = 7,\n\n DarkGray = 8,\n\n LightBlue = 9,\n\n LightGreen = 10,\n\n LightCyan = 11,\n\n LightRed = 12,\n\n LightPink = 13,\n\n Yellow = 14,\n\n White = 15,\n\n}\n\n\n\nimpl Copy for Color {}\n\nimpl Clone for Color { fn clone(&self) -> Self { *self } }\n\n\n\npub static ROWS: u32 = 25;\n\npub static COLS: u32 = 80;\n\n\n", "file_path": "arch/x86/vga/mod.rs", "rank": 42, "score": 11.14001582899611 }, { "content": "use crate::platform::vga::{Color, COLS, ROWS};\n\nuse crate::platform::vga;\n\n\n\npub struct StdioWriter\n\n{\n\n\tpub xpos: u32,\n\n\tpub ypos: u32,\n\n\tpub fg: Color,\n\n\tpub bg: Color\n\n}\n\n\n\nimpl Copy for StdioWriter {}\n\nimpl Clone for StdioWriter { fn clone(&self) -> Self { *self } }\n\n\n\nimpl StdioWriter\n\n{\n\n\tpub fn new() -> StdioWriter\n\n\t{\n\n\t\tStdioWriter\n\n\t\t{\n", "file_path": "kernel/stdio.rs", "rank": 43, "score": 9.762495389201915 }, { "content": "use crate::platform::io;\n\n\n\nstatic PIC1: u16 = 0x20;\n\nstatic PIC1_DATA: u16 = 0x21;\n\nstatic PIC2: u16 = 0xA0;\n\nstatic PIC2_DATA: u16 = 0xA1;\n\n\n\nstatic IRQ_ACK: u8 = 0x20;\n\nstatic ICW1: u8 = 0x11;\n\nstatic ICW4: u8 = 0x01;\n\n\n", "file_path": "arch/x86/cpu/pic.rs", "rank": 44, "score": 8.527698509599432 }, { "content": "pub unsafe fn outport(address: u16, value: u8)\n\n{\n\n\tllvm_asm!(\"out %al, %dx\" :: \"{al}\"(value), \"{dx}\"(address));\n\n}\n\n\n\npub unsafe fn inport(address: u16) -> u8\n\n{\n\n\tlet result;\n\n\tllvm_asm!(\"in %dx, %al\" : \"={al}\"(result) : \"{dx}\"(address));\n\n\tresult\n\n}\n\n\n", "file_path": "arch/x86/io/mod.rs", "rank": 45, "score": 7.573014235679849 }, { "content": "\tGDT_ENTRIES[n].base_middle = ((base >> 16) & 0xFF) as u8;\n\n\tGDT_ENTRIES[n].base_high = ((base >> 24) & 0xFF) as u8;\n\n\n\n\tGDT_ENTRIES[n].limit_low = (limit & 0xFFFF) as u16;\n\n\tGDT_ENTRIES[n].granularity = ((limit >> 16) & 0x0F) as u8;\n\n\n\n\tGDT_ENTRIES[n].granularity |= gran & 0xF0;\n\n\tGDT_ENTRIES[n].access = access;\n\n}\n\n\n\nextern\n\n{\n\n\tfn gdt_flush(pointer: u32);\n\n}\n", "file_path": "arch/x86/cpu/gdt.rs", "rank": 46, "score": 7.544562739603623 }, { "content": "use crate::platform::io;\n\n\n\nstatic TIMER_COMMAND: u16 = 0x43;\n\nstatic TIMER_CHANNEL0: u16 = 0x40;\n\n\n", "file_path": "arch/x86/cpu/timer.rs", "rank": 47, "score": 6.6503686584295885 }, { "content": "\n\n\tpub fn go_to(&mut self, x: u32, y: u32)\n\n\t{\n\n\t\tself.move_coords(x, y);\n\n\t\tself.set_cursor();\n\n\t}\n\n\n\n\tpub fn backspace(&mut self)\n\n\t{\n\n\t\tself.go_left();\n\n\t\tself.raw_print_char(' ' as u8);\n\n\t\tself.set_cursor();\n\n\t}\n\n\n\n\tpub fn tab(&mut self)\n\n\t{\n\n\t\tlet x = self.xpos;\n\n\t\tfor _ in 0 .. 4 - (x % 4)\n\n\t\t{\n\n\t\t\tself.raw_print_char(' ' as u8);\n", "file_path": "kernel/stdio.rs", "rank": 48, "score": 6.3106059859701995 }, { "content": "\n\n\t\tlet mut fac = 1;\n\n\t\tlet mut nv = v;\n\n\t\twhile fac <= v { fac *= 10; }\n\n\t\tfac /= 10;\n\n\t\twhile fac > 0\n\n\t\t{\n\n\t\t\tlet n = nv / fac;\n\n\t\t\tlet c = n as u8 + '0' as u8;\n\n\t\t\tself.raw_print_char(c);\n\n\t\t\tself.go_right();\n\n\t\t\tnv -= n * fac;\n\n\t\t\tfac /= 10;\n\n\t\t}\n\n\t\tself.set_cursor();\n\n\t}\n\n\n\n\tpub fn print_bin(&mut self, v: u32, sz: u32)\n\n\t{\n\n\t\tself.print_screen(\"0b\");\n", "file_path": "kernel/stdio.rs", "rank": 50, "score": 5.193862389946688 }, { "content": "\n\n\t\tlet mut i = (sz - 1) as i32;\n\n\t\twhile i >= 0\n\n\t\t{\n\n\t\t\tlet c = match (v >> (i as u32)) & 0x1\n\n\t\t\t{\n\n\t\t\t\t0 => '0',\n\n\t\t\t\t_ => '1',\n\n\t\t\t} as u8;\n\n\t\t\tself.raw_print_char(c);\n\n\t\t\tself.go_right();\n\n\t\t\ti -= 1;\n\n\t\t}\n\n\t\tself.set_cursor();\n\n\t}\n\n\n\n\tpub fn print_hex(&mut self, v: u32, sz: u32)\n\n\t{\n\n\t\tself.print_screen(\"0x\");\n\n\n", "file_path": "kernel/stdio.rs", "rank": 51, "score": 4.864423485211584 }, { "content": "\t\tlet mut i = (sz - 4) as i32;\n\n\t\twhile i >= 0\n\n\t\t{\n\n\t\t\tlet c = match (v >> (i as u32)) & 0xF\n\n\t\t\t{\n\n\t\t\t\tc if c <= 9 => c + '0' as u32,\n\n\t\t\t\tc => c - 10 + 'A' as u32,\n\n\t\t\t} as u8;\n\n\t\t\tself.raw_print_char(c);\n\n\t\t\tself.go_right();\n\n\t\t\ti -= 4;\n\n\t\t}\n\n\t\tself.set_cursor();\n\n\t}\n\n\n\n\tpub fn print_char(&mut self, value: char)\n\n\t{\n\n\t\tself.raw_print_char(value as u8);\n\n\t\tself.go_right();\n\n\t\tself.set_cursor();\n", "file_path": "kernel/stdio.rs", "rank": 52, "score": 4.858350727954506 }, { "content": "use crate::platform::keyboard;\n\nuse crate::platform::keyboard::ArchKeyboardAction;\n\n\n\npub enum KeyboardKey\n\n{\n\n\tPrintable(char, char),\n\n\tReturn,\n\n\tBackspace,\n\n\tShift,\n\n\tEscape,\n\n\tTab,\n\n\tUnknown(u8)\n\n}\n\n\n\npub enum KeyboardAction\n\n{\n\n\tKeyUp(KeyboardKey),\n\n\tKeyDown(KeyboardKey),\n\n}\n\n\n", "file_path": "kernel/keyboard.rs", "rank": 53, "score": 4.744842587724941 }, { "content": "extern\n\n{\n\n\tstatic end: u32;\n\n}\n\n\n\nstatic mut PLACEMENT_ADDRESS: u32 = 0;\n\n\n", "file_path": "arch/x86/mmu/mod.rs", "rank": 54, "score": 4.689675175372255 }, { "content": "#![no_std]\n\n#![feature(llvm_asm)]\n\n#![feature(panic_info_message)]\n\n\n\n#[path = \"arch/x86/\"]\n\npub mod platform {\n\n\tpub mod vga;\n\n\tpub mod cpu;\n\n\tpub mod mmu;\n\n\tmod io;\n\n\tpub mod keyboard;\n\n}\n\n\n\npub mod kernel {\n\n\tpub mod main;\n\n\tpub mod interrupts;\n\n\tmod stdio;\n\n\tmod keyboard;\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn _Unwind_Resume() -> ! {\n\n loop {}\n\n}\n", "file_path": "kernel_x86.rs", "rank": 55, "score": 3.667065665733343 }, { "content": "\t}\n\n\n\n\tfn raw_print_char(&self, value: u8)\n\n\t{\n\n\t\tvga::putc(self.xpos, self.ypos, value);\n\n\t\tvga::setfg(self.xpos, self.ypos, self.fg);\n\n\t\tvga::setbg(self.xpos, self.ypos, self.bg);\n\n\t}\n\n\n\n\tpub fn print_screen(&mut self, value: &str)\n\n\t{\n\n\t\tfor c in value.bytes()\n\n\t\t{\n\n\t\t\tself.raw_print_char(c);\n\n\t\t\tself.go_right();\n\n\t\t}\n\n\t\tself.set_cursor();\n\n\t}\n\n}\n\n\n", "file_path": "kernel/stdio.rs", "rank": 56, "score": 3.4524054426307003 }, { "content": "\t\tlet mut newx = x;\n\n\t\tlet mut newy = y;\n\n\t\tif newx >= COLS { newx = 0; newy += 1; }\n\n\t\tif newy >= ROWS { newy = 0; }\n\n\t\tself.xpos = newx;\n\n\t\tself.ypos = newy;\n\n\t}\n\n\n\n\tfn set_cursor(&self)\n\n\t{\n\n\t\tvga::move_cursor(self.xpos, self.ypos);\n\n\t}\n\n\n\n\tpub fn print_dec(&mut self, v: u32)\n\n\t{\n\n\t\tif v == 0\n\n\t\t{\n\n\t\t\tself.print_char('0');\n\n\t\t\treturn;\n\n\t\t}\n", "file_path": "kernel/stdio.rs", "rank": 57, "score": 3.4360579027468474 }, { "content": "\t\t\tself.go_right();\n\n\t\t}\n\n\t\tself.set_cursor();\n\n\t}\n\n\n\n\tpub fn crlf(&mut self)\n\n\t{\n\n\t\tself.xpos = 0;\n\n\t\tself.ypos = if self.ypos == ROWS - 1 { 0 } else { self.ypos + 1 };\n\n\t\tself.set_cursor();\n\n\t}\n\n\n\n\tfn go_right(&mut self)\n\n\t{\n\n\t\tif self.xpos == COLS - 1\n\n\t\t{\n\n\t\t\tself.xpos = 0;\n\n\t\t\tself.ypos = (self.ypos + ROWS + 1) % ROWS;\n\n\t\t}\n\n\t\telse\n", "file_path": "kernel/stdio.rs", "rank": 58, "score": 3.3555590014539503 }, { "content": "impl ::core::fmt::Write for StdioWriter\n\n{\n\n\tfn write_str(&mut self, s: &str) -> ::core::fmt::Result\n\n\t{\n\n\t\tfor b in s.bytes()\n\n\t\t{\n\n\t\t\tself.raw_print_char(b);\n\n\t\t\tself.go_right();\n\n\t\t}\n\n\t\tself.set_cursor();\n\n\t\tOk(())\n\n\t}\n\n}\n", "file_path": "kernel/stdio.rs", "rank": 59, "score": 3.007514316051438 }, { "content": "\t\t\txpos: 0,\n\n\t\t\typos: 0,\n\n\t\t\tfg: Color::White,\n\n\t\t\tbg: Color::Black\n\n\t\t}\n\n\t}\n\n\n\n\tpub fn clear_screen(&mut self)\n\n\t{\n\n\t\tfor y in 0u32 .. ROWS\n\n\t\t{\n\n\t\t\tfor x in 0u32 .. COLS\n\n\t\t\t{\n\n\t\t\t\tvga::putc(x, y, 0);\n\n\t\t\t\tvga::setfg(x, y, self.fg);\n\n\t\t\t\tvga::setbg(x, y, self.bg);\n\n\t\t\t}\n\n\t\t}\n\n\t\tself.go_to(0, 0);\n\n\t}\n", "file_path": "kernel/stdio.rs", "rank": 60, "score": 2.718646454592699 }, { "content": "use crate::platform::vga::Color;\n\nuse crate::kernel::stdio::StdioWriter;\n\nuse crate::platform;\n\nuse core::fmt::Write;\n\nuse core::panic::PanicInfo;\n\n\n\n#[no_mangle]\n", "file_path": "kernel/main.rs", "rank": 61, "score": 2.6855153544447545 }, { "content": "\t\t{\n\n\t\t\tself.xpos += 1;\n\n\t\t}\n\n\t}\n\n\n\n\tfn go_left(&mut self)\n\n\t{\n\n\t\tif self.xpos == 0\n\n\t\t{\n\n\t\t\tself.xpos = COLS - 1;\n\n\t\t\tself.ypos = (self.ypos + ROWS - 1) % ROWS;\n\n\t\t}\n\n\t\telse\n\n\t\t{\n\n\t\t\tself.xpos -= 1;\n\n\t\t}\n\n\t}\n\n\n\n\tfn move_coords(&mut self, x: u32, y: u32)\n\n\t{\n", "file_path": "kernel/stdio.rs", "rank": 63, "score": 2.182354480267917 }, { "content": "\t\tio::io_wait();\n\n\n\n\t\t// Finalize\n\n\t\tio::outport(PIC1_DATA, ICW4);\n\n\t\tio::io_wait();\n\n\t\tio::outport(PIC2_DATA, ICW4);\n\n\t\tio::io_wait();\n\n\n\n\t\t// Disable all interrupts\n\n\t\tio::outport(PIC1_DATA, 0xFF);\n\n\t\tio::outport(PIC2_DATA, 0xFF);\n\n\t}\n\n}\n\n\n", "file_path": "arch/x86/cpu/pic.rs", "rank": 64, "score": 1.308671920967262 } ]
Rust
src/bdd.rs
tangentstorm/bex
c2dea80e284de3fd577fda892577edc829c07bf7
use std::collections::HashMap; use std::collections::HashSet; use std::cell::RefCell; extern crate num_cpus; use bincode; use base::{Base}; use io; use reg::Reg; use {vhl, vhl::{HiLo, Walkable}}; use nid::{NID,O,I}; use vid::{VID,VidOrdering,topmost_of3}; mod bdd_sols; mod bdd_swarm; use self::bdd_swarm::*; pub type BDDHashMap<K,V> = vhl::VHLHashMap<K,V>; #[derive(Debug, PartialEq, Eq, Hash, Serialize, Deserialize, Clone, Copy)] pub struct ITE {i:NID, t:NID, e:NID} impl ITE { pub fn new (i:NID, t:NID, e:NID)-> ITE { ITE { i, t, e } } pub fn top_vid(&self)->VID { let (i,t,e) = (self.i.vid(), self.t.vid(), self.e.vid()); topmost_of3(i,t,e) }} #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum Norm { Nid(NID), Ite(ITE), Not(ITE)} impl ITE { pub fn norm(f0:NID, g0:NID, h0:NID)->Norm { let mut f = f0; let mut g = g0; let mut h = h0; loop { if f.is_const() { return Norm::Nid(if f==I { g } else { h }) } if g==h { return Norm::Nid(g) } if g==f { if h.is_const() { return Norm::Nid(if h==I { I } else { f }) } else { g=I }} else if g.is_const() && h.is_const() { return if g==I { Norm::Nid(f) } else { Norm::Nid(!f) }} else { let nf = !f; if g==nf { g=O } else if h==nf { h=I } else if h==f { h=O } else { let (fv, fi) = (f.vid(), f.idx()); macro_rules! cmp { ($x0:expr,$x1:expr) => { { let x0=$x0; ((x0.is_above(&fv)) || ((x0==fv) && ($x1<fi))) }}} if g.is_const() && cmp!(h.vid(),h.idx()) { if g==I { g = f; f = h; h = g; g = I; } else { f = !h; g = O; h = nf; }} else if h.is_const() && cmp!(g.vid(),g.idx()) { if h==I { f = !g; g = nf; h = I; } else { h = f; f = g; g = h; h = O; }} else { let ng = !g; if (h==ng) && cmp!(g.vid(), g.idx()) { h=f; f=g; g=h; h=nf; } else if f.is_inv() { f=g; g=h; h=f; f=nf; } else if g.is_inv() { return match ITE::norm(f,ng,!h) { Norm::Nid(nid) => Norm::Nid(!nid), Norm::Not(ite) => Norm::Ite(ite), Norm::Ite(ite) => Norm::Not(ite)}} else { return Norm::Ite(ITE::new(f,g,h)) }}}}}} } #[derive(Debug, Serialize, Deserialize, Clone)] pub struct BddState { hilos: vhl::HiLoCache, xmemo: BDDHashMap<ITE, NID> } thread_local!{ pub static COUNT_XMEMO_TEST: RefCell<u64> = RefCell::new(0); pub static COUNT_XMEMO_FAIL: RefCell<u64> = RefCell::new(0); } impl BddState { fn new()->BddState { BddState { hilos: vhl::HiLoCache::new(), xmemo: BDDHashMap::default() }} #[inline] fn tup(&self, n:NID)-> (NID, NID) { if n.is_const() { if n==I { (I, O) } else { (O, I) } } else if n.is_vid() { if n.is_inv() { (O, I) } else { (I, O) }} else { let hilo = self.hilos.get_hilo(n); (hilo.hi, hilo.lo) }} #[inline] fn simple_node(&mut self, v:VID, hilo:HiLo)->NID { match self.get_simple_node(v, hilo) { Some(n) => n, None => { self.hilos.insert(v, hilo) }}} #[inline] fn get_memo(&self, ite:&ITE) -> Option<NID> { if ite.i.is_vid() { debug_assert!(!ite.i.is_inv()); let hilo = if ite.i.is_inv() { HiLo::new(ite.e,ite.t) } else { HiLo::new(ite.t,ite.e) }; self.get_simple_node(ite.i.vid(), hilo) } else { COUNT_XMEMO_TEST.with(|c| *c.borrow_mut() += 1 ); let test = self.xmemo.get(&ite).copied(); if test == None { COUNT_XMEMO_FAIL.with(|c| *c.borrow_mut() += 1 ); } test }} #[inline] fn get_simple_node(&self, v:VID, hl:HiLo)-> Option<NID> { self.hilos.get_node(v, hl) }} #[derive(Debug, Serialize, Deserialize)] pub struct BDDBase { pub tags: HashMap<String, NID>, swarm: BddSwarm} impl BDDBase { #[inline] fn tup(&self, n:NID)->(NID,NID) { self.swarm.tup(n) } pub fn load(path:&str)->::std::io::Result<BDDBase> { let s = io::get(path)?; Ok(bincode::deserialize(&s).unwrap()) } pub fn gt(&mut self, x:NID, y:NID)->NID { self.ite(x, !y, O) } pub fn lt(&mut self, x:NID, y:NID)->NID { self.ite(x, O, y) } #[inline] pub fn ite(&mut self, f:NID, g:NID, h:NID)->NID { self.swarm.ite(f,g,h) } pub fn swap(&mut self, n:NID, x:VID, y:VID)-> NID { if x.is_below(&y) { return self.swap(n,y,x) } /* x ____ x'____ : \ : \ y __ y __ => y'__ y'__ : \ : \ : \ : \ ll lh hl hh ll hl lh hh */ let (xlo, xhi) = (self.when_lo(x,n), self.when_hi(x,n)); let (xlo_ylo, xlo_yhi) = (self.when_lo(y,xlo), self.when_hi(y,xlo)); let (xhi_ylo, xhi_yhi) = (self.when_lo(y,xhi), self.when_hi(y,xhi)); let lo = self.ite(NID::from_vid(x), xlo_ylo, xhi_ylo); let hi = self.ite(NID::from_vid(y), xlo_yhi, xhi_yhi); self.ite(NID::from_vid(x), lo, hi) } pub fn node_count(&self, n:NID)->usize { let mut c = 0; self.walk(n, &mut |_,_,_,_| c+=1); c } fn tt_aux(&mut self, res:&mut Vec<u8>, n:NID, i:usize, level:u32) { if level == 0 { match n { O => {} I => { res[i] = 1; } x => panic!("expected a leaf nid, got {}", x) }} else { let v = VID::var(level-1); let lo = self.when_lo(v,n); self.tt_aux(res, lo, i*2, level-1); let hi = self.when_hi(v,n); self.tt_aux(res, hi, i*2+1, level-1); }} pub fn tt(&mut self, n0:NID, num_vars:u32)->Vec<u8> { if !n0.vid().is_var() { todo!("tt only works for actual variables. got {:?}", n0); } if num_vars > 16 { panic!("refusing to generate a truth table of 2^{} bytes", num_vars) } if num_vars == 0 { panic!("num_vars should be > 0")} let mut res = vec![0;(1 << num_vars) as usize]; self.tt_aux(&mut res, n0, 0, num_vars); res } } impl Base for BDDBase { fn new()->BDDBase { BDDBase{swarm: BddSwarm::new(), tags:HashMap::new()}} fn when_hi(&mut self, x:VID, y:NID)->NID { let yv = y.vid(); match x.cmp_depth(&yv) { VidOrdering::Level => self.tup(y).0, VidOrdering::Above => y, VidOrdering::Below => { let (yt, ye) = self.tup(y); let (th,el) = (self.when_hi(x,yt), self.when_hi(x,ye)); self.ite(NID::from_vid(yv), th, el) }}} fn when_lo(&mut self, x:VID, y:NID)->NID { let yv = y.vid(); match x.cmp_depth(&yv) { VidOrdering::Level => self.tup(y).1, VidOrdering::Above => y, VidOrdering::Below => { let (yt, ye) = self.tup(y); let (th,el) = (self.when_lo(x,yt), self.when_lo(x,ye)); self.ite(NID::from_vid(yv), th, el) }}} fn def(&mut self, _s:String, _i:VID)->NID { todo!("BDDBase::def()") } fn tag(&mut self, n:NID, s:String)->NID { self.tags.insert(s, n); n } fn get(&self, s:&str)->Option<NID> { Some(*self.tags.get(s)?) } fn and(&mut self, x:NID, y:NID)->NID { self.ite(x, y, O) } fn xor(&mut self, x:NID, y:NID)->NID { self.ite(x, !y, y) } fn or(&mut self, x:NID, y:NID)->NID { self.ite(x, I, y) } fn sub(&mut self, v:VID, n:NID, ctx:NID)->NID { if ctx.might_depend_on(v) { let (zt,ze) = self.tup(ctx); let zv = ctx.vid(); if v==zv { self.ite(n, zt, ze) } else { let th = self.sub(v, n, zt); let el = self.sub(v, n, ze); self.ite(NID::from_vid(zv), th, el) }} else { ctx }} fn save(&self, path:&str)->::std::io::Result<()> { let s = bincode::serialize(&self).unwrap(); io::put(path, &s) } fn dot(&self, n:NID, wr: &mut dyn std::fmt::Write) { macro_rules! w { ($x:expr $(,$xs:expr)*) => { writeln!(wr, $x $(,$xs)*).unwrap(); }} w!("digraph bdd {{"); w!("subgraph head {{ h1[shape=plaintext; label=\"BDD\"] }}"); w!(" I[label=⊤; shape=square];"); w!(" O[label=⊥; shape=square];"); w!("node[shape=circle];"); self.walk(n, &mut |n,_,_,_| w!(" \"{}\"[label=\"{}\"];", n, n.vid())); w!("edge[style=solid];"); self.walk(n, &mut |n,_,t,_| w!(" \"{}\"->\"{}\";", n, t)); w!("edge[style=dashed];"); self.walk(n, &mut |n,_,_,e| w!(" \"{}\"->\"{}\";", n, e)); w!("}}"); } fn solution_set(&self, n: NID, nvars: usize)->hashbrown::HashSet<Reg> { self.solutions_pad(n, nvars).collect() }} include!("test-bdd.rs");
use std::collections::HashMap; use std::collections::HashSet; use std::cell::RefCell; extern crate num_cpus; use bincode; use base::{Base}; use io; use reg::Reg; use {vhl, vhl::{HiLo, Walkable}}; use nid::{NID,O,I}; use vid::{VID,VidOrdering,topmost_of3}; mod bdd_sols; mod bdd_swarm; use self::bdd_swarm::*; pub type BDDHashMap<K,V> = vhl::VHLHashMap<K,V>; #[derive(Debug, PartialEq, Eq, Hash, Serialize, Deserialize, Clone, Copy)] pub struct ITE {i:NID, t:NID, e:NID} impl ITE { pub fn new (i:NID, t:NID, e:NID)-> ITE { ITE { i, t, e } } pub fn top_vid(&self)->VID { let (i,t,e) = (self.i.vid(), self.t.vid(), self.e.vid()); topmost_of3(i,t,e) }} #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum Norm { Nid(NID), Ite(ITE), Not(ITE)} impl ITE { pub fn norm(f0:NID, g0:NID, h0:NID)->Norm { let mut f = f0; let mut g = g0; let mut h = h0; loop { if f.is_const() { return Norm::Nid(if f==I { g } else { h }) } if g==h { return Norm::Nid(g) } if g==f { if h.is_const() { return Norm::Nid(if h==I { I } else { f }) } else { g=I }} else if g.is_const() && h.is_const() { return if g==I { Norm::Nid(f) } else { Norm::Nid(!f) }} else { let nf = !f; if g==nf { g=O } else if h==nf { h=I } else if h==f { h=O } else { let (fv, fi) = (f.vid(), f.idx()); macro_rules! cmp { ($x0:expr,$x1:expr) => { { let x0=$x0; ((x0.is_above(&fv)) || ((x0==fv) && ($x1<fi))) }}} if g.is_const() && cmp!(h.vid(),h.idx()) { if g==I { g = f; f = h; h = g; g = I; } else { f = !h; g = O; h = nf; }} else if h.is_const() && cmp!(g.vid(),g.idx()) { if h==I { f = !g; g = nf; h = I; } else { h = f; f = g; g = h; h = O; }} else { let ng = !g; if (h==ng) && cmp!(g.vid(), g.idx()) { h=f; f=g; g=h; h=nf; } else if f.is_inv() { f=g; g=h; h=f; f=nf; } else if g.is_inv() { return match ITE::norm(f,ng,!h) { Norm::Nid(nid) => Norm::Nid(!nid), Norm::Not(ite) => Norm::Ite(ite), Norm::Ite(ite) => Norm::Not(ite)}} else { return Norm::Ite(ITE::new(f,g,h)) }}}}}} } #[derive(Debug, Serialize, Deserialize, Clone)] pub struct BddState { hilos: vhl::HiLoCache, xmemo: BDDHashMap<ITE, NID> } thread_local!{ pub static COUNT_XMEMO_TEST: RefCell<u64> = RefCell::new(0); pub static COUNT_XMEMO_FAIL: RefCell<u64> = RefCell::new(0); } impl BddState { fn new()->BddState { BddState { hilos: vhl::HiLoCache::new(), xmemo: BDDHashMap::default() }} #[inline] fn tup(&self, n:NID)-> (NID, NID) { if n.is_const() { if n==I { (I, O) } else { (O, I) } } else if n.is_vid() { if n.is_inv() { (O, I) } else { (I, O) }} else { let hilo = self.hilos.get_hilo(n); (hilo.hi, hilo.lo) }} #[inline] fn simple_node(&mut self, v:VID, hilo:HiLo)->NID { match self.get_simple_node(v, hilo) { Some(n) => n, None => { self.hilos.insert(v, hilo) }}} #[inline] fn get_memo(&self, ite:&ITE) -> Option<NID> { if ite.i.is_vid() { debug_assert!(!ite.i.is_inv()); let hilo = if ite.i.is_inv() { HiLo::new(ite.e,ite.t) } else { HiLo::new(ite.t,ite.e) }; self.get_simple_node(ite.i.vid(), hilo) } else { COUNT_XMEMO_TEST.with(|c| *c.borrow_mut() += 1 ); let test = self.xmemo.get(&ite).copied(); if test == None { COUNT_XMEMO_FAIL.with(|c| *c.borrow_mut() += 1 ); } test }} #[inline] fn get_simple_node(&self, v:VID, hl:HiLo)-> Option<NID> { self.hilos.get_node(v, hl) }} #[derive(Debug, Serialize, Deserialize)] pub struct BDDBase { pub tags: HashMap<String, NID>, swarm: BddSwarm} impl BDDBase { #[inline] fn tup(&self, n:NID)->(NID,NID) { self.swarm.tup(n) } pub fn load(path:&str)->::std::io::Result<BDDBase> { let s = io::get(path)?; Ok(bincode::deserialize(&s).unwrap()) } pub fn gt(&mut self, x:NID, y:NID)->NID { self.ite(x, !y, O) } pub fn lt(&mut self, x:NID, y:NID)->NID { self.ite(x, O, y) } #[inline] pub fn ite(&mut self, f:NID, g:NID, h:NID)->NID { self.swarm.ite(f,g,h) } pub fn swap(&mut self, n:NID, x:VID, y:VID)-> NID { if x.is_below(&y) { return self.swap(n,y,x) } /* x ____ x'____ : \ : \ y __ y __ => y'__ y'__ : \ : \ : \ : \ ll lh hl hh ll hl lh hh */ let (xlo, xhi) = (self.when_lo(x,n), self.when_hi(x,n)); let (xlo_ylo, xlo_yhi) = (self.when_lo(y,xlo), self.when_hi(y,xlo)); let (xhi_ylo, xhi_yhi) = (self.when_lo(y,xhi), self.when_hi(y,xhi)); let lo = self.ite(NID::from_vid(x), xlo_ylo, xhi_ylo); let hi = self.ite(NID::from_vid(y), xlo_yhi, xhi_yhi); self.ite(NID::from_vid(x), lo, hi) } pub fn node_count(&self, n:NID)->usize { let mut c = 0; self.walk(n, &mut |_,_,_,_| c+=1); c } fn tt_aux(&mut self, res:&mut Vec<u8>, n:NID, i:usize, level:u32) { if level == 0 { match n { O => {} I => { res[i] = 1; } x => panic!("expected a leaf nid, got {}", x) }} else { let v = VID::var(level-1); let lo = self.when_lo(v,n); self.tt_aux(res, lo, i*2, level-1); let hi = self.when_hi(v,n); self.tt_aux(res, hi, i*2+1, level-1); }}
} impl Base for BDDBase { fn new()->BDDBase { BDDBase{swarm: BddSwarm::new(), tags:HashMap::new()}} fn when_hi(&mut self, x:VID, y:NID)->NID { let yv = y.vid(); match x.cmp_depth(&yv) { VidOrdering::Level => self.tup(y).0, VidOrdering::Above => y, VidOrdering::Below => { let (yt, ye) = self.tup(y); let (th,el) = (self.when_hi(x,yt), self.when_hi(x,ye)); self.ite(NID::from_vid(yv), th, el) }}} fn when_lo(&mut self, x:VID, y:NID)->NID { let yv = y.vid(); match x.cmp_depth(&yv) { VidOrdering::Level => self.tup(y).1, VidOrdering::Above => y, VidOrdering::Below => { let (yt, ye) = self.tup(y); let (th,el) = (self.when_lo(x,yt), self.when_lo(x,ye)); self.ite(NID::from_vid(yv), th, el) }}} fn def(&mut self, _s:String, _i:VID)->NID { todo!("BDDBase::def()") } fn tag(&mut self, n:NID, s:String)->NID { self.tags.insert(s, n); n } fn get(&self, s:&str)->Option<NID> { Some(*self.tags.get(s)?) } fn and(&mut self, x:NID, y:NID)->NID { self.ite(x, y, O) } fn xor(&mut self, x:NID, y:NID)->NID { self.ite(x, !y, y) } fn or(&mut self, x:NID, y:NID)->NID { self.ite(x, I, y) } fn sub(&mut self, v:VID, n:NID, ctx:NID)->NID { if ctx.might_depend_on(v) { let (zt,ze) = self.tup(ctx); let zv = ctx.vid(); if v==zv { self.ite(n, zt, ze) } else { let th = self.sub(v, n, zt); let el = self.sub(v, n, ze); self.ite(NID::from_vid(zv), th, el) }} else { ctx }} fn save(&self, path:&str)->::std::io::Result<()> { let s = bincode::serialize(&self).unwrap(); io::put(path, &s) } fn dot(&self, n:NID, wr: &mut dyn std::fmt::Write) { macro_rules! w { ($x:expr $(,$xs:expr)*) => { writeln!(wr, $x $(,$xs)*).unwrap(); }} w!("digraph bdd {{"); w!("subgraph head {{ h1[shape=plaintext; label=\"BDD\"] }}"); w!(" I[label=⊤; shape=square];"); w!(" O[label=⊥; shape=square];"); w!("node[shape=circle];"); self.walk(n, &mut |n,_,_,_| w!(" \"{}\"[label=\"{}\"];", n, n.vid())); w!("edge[style=solid];"); self.walk(n, &mut |n,_,t,_| w!(" \"{}\"->\"{}\";", n, t)); w!("edge[style=dashed];"); self.walk(n, &mut |n,_,_,e| w!(" \"{}\"->\"{}\";", n, e)); w!("}}"); } fn solution_set(&self, n: NID, nvars: usize)->hashbrown::HashSet<Reg> { self.solutions_pad(n, nvars).collect() }} include!("test-bdd.rs");
pub fn tt(&mut self, n0:NID, num_vars:u32)->Vec<u8> { if !n0.vid().is_var() { todo!("tt only works for actual variables. got {:?}", n0); } if num_vars > 16 { panic!("refusing to generate a truth table of 2^{} bytes", num_vars) } if num_vars == 0 { panic!("num_vars should be > 0")} let mut res = vec![0;(1 << num_vars) as usize]; self.tt_aux(&mut res, n0, 0, num_vars); res }
function_block-full_function
[ { "content": "#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]\n\nstruct XHiLo { pub hi: XID, pub lo: XID }\n\nimpl std::ops::Not for XHiLo { type Output = XHiLo; fn not(self)->XHiLo { XHiLo { hi:!self.hi, lo:!self.lo }}}\n\nimpl XHiLo { fn as_tup(&self)->(XID,XID) { (self.hi, self.lo) }}\n\n\n\n#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]\n\npub struct XVHL { pub v: VID, pub hi: XID, pub lo: XID }\n\nimpl XVHL {\n\n fn hilo(&self)->XHiLo { XHiLo { hi:self.hi, lo:self.lo } }\n\n fn is_var(&self)->bool { self.v.is_var() && self.hi == XID_I && self.lo == XID_O }}\n\nimpl std::ops::Not for XVHL { type Output = XVHL; fn not(self)->XVHL { XVHL { v:self.v, hi:!self.hi, lo:!self.lo }}}\n\n\n\n/// Dummy value to stick into vhls[0]\n\nconst XVHL_O:XVHL = XVHL{ v: NOV, hi:XID_O, lo:XID_O };\n\n\n\n/// Dummy value to use when allocating a new node\n\nconst XVHL_NEW:XVHL = XVHL{ v: VID::top(), hi:XID_O, lo:XID_O };\n\n\n\n/// index + refcount\n\n#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]\n\npub struct IxRc { ix:XID, irc: usize, erc: usize }\n", "file_path": "src/swap.rs", "rank": 0, "score": 225053.19332480722 }, { "content": "/// convenience trait that allows us to mix vids and nids\n\n/// freely when constructing expressions.\n\npub trait ToNID { fn to_nid(&self)->NID; }\n\nimpl ToNID for NID { fn to_nid(&self)->NID { *self }}\n\nimpl ToNID for VID { fn to_nid(&self)->NID { NID::from_vid(*self) }}\n\n\n", "file_path": "src/ops.rs", "rank": 1, "score": 192758.3534147304 }, { "content": "pub fn gbase_tag(n:NID, s:String)->NID {\n\n GBASE.with(|gb| { gb.borrow_mut().tag(n,s) })}\n\n\n", "file_path": "src/int.rs", "rank": 2, "score": 172345.8644525339 }, { "content": "fn swarm_vhl_norm(state: &Arc<BddState>, ite:ITE)->R {\n\n let ITE{i:vv,t:hi,e:lo} = ite; let v = vv.vid();\n\n if let Some(n) = state.get_simple_node(v, HiLo{hi,lo}) { R::Nid(n) }\n\n else { R::Vhl{ v, hi, lo, invert:false } }}\n\n\n", "file_path": "src/bdd/bdd_swarm.rs", "rank": 3, "score": 160463.0775553417 }, { "content": "pub fn and(x:NID, y:NID)->Option<NID> {\n\n if x == O || y == O { Some(O) }\n\n else if x == I || x == y { Some(y) }\n\n else if y == I { Some(x) }\n\n else if x == !y { Some(O) }\n\n else { None }}\n\n\n", "file_path": "src/simp.rs", "rank": 4, "score": 150570.5472697749 }, { "content": "pub fn or(x:NID, y:NID)->Option<NID> {\n\n if x == O { Some(y) }\n\n else if y == O { Some(x) }\n\n else if x == I || y == I { Some(I) }\n\n else if x == y { Some(x) }\n\n else if x == !y { Some(I) }\n\n else { None }}\n", "file_path": "src/simp.rs", "rank": 5, "score": 150570.5472697749 }, { "content": "pub fn xor(x:NID, y:NID)->Option<NID> {\n\n if x == y { Some(O) }\n\n else if x == O { Some(y) }\n\n else if x == I { Some(!y) }\n\n else if y == O { Some(x) }\n\n else if y == I { Some(!x) }\n\n else if x == !y { Some(I) }\n\n else { None }}\n\n\n", "file_path": "src/simp.rs", "rank": 6, "score": 147231.0432697882 }, { "content": "/// attempt to parse the file at the specified path as a binary Vec<T>.\n\npub fn get<T:Sized+Clone>(path:&str) -> ::std::io::Result<Vec<T>> {\n\n let mut f = File::open(path)?;\n\n let mut uv:Vec<u8> = Vec::new();\n\n f.read_to_end(&mut uv).expect(\"couldn't read file\");\n\n let s:&[T] = unsafe { u8s_to_slice(&uv.as_slice())};\n\n Ok(s.to_vec()) }\n\n\n\n\f\n", "file_path": "src/io.rs", "rank": 7, "score": 146842.2275997582 }, { "content": "fn swarm_ite_norm(state: &Arc<BddState>, ite:ITE)->R {\n\n let ITE { i, t, e } = ite;\n\n let (vi, vt, ve) = (i.vid(), t.vid(), e.vid());\n\n let v = ite.top_vid();\n\n match state.get_memo(&ite) {\n\n Some(n) => R::Nid(n),\n\n None => {\n\n let (hi_i, lo_i) = if v == vi {state.tup(i)} else {(i,i)};\n\n let (hi_t, lo_t) = if v == vt {state.tup(t)} else {(t,t)};\n\n let (hi_e, lo_e) = if v == ve {state.tup(e)} else {(e,e)};\n\n // now construct and normalize the queries for the hi/lo branches:\n\n let hi = ITE::norm(hi_i, hi_t, hi_e);\n\n let lo = ITE::norm(lo_i, lo_t, lo_e);\n\n // if they're both simple nids, we're guaranteed to have a vhl, so check cache\n\n if let (Norm::Nid(hn), Norm::Nid(ln)) = (hi,lo) {\n\n match ITE::norm(NID::from_vid(v), hn, ln) {\n\n // first, it might normalize to a nid directly:\n\n Norm::Nid(n) => { R::Nid(n) }\n\n // otherwise, the normalized triple might already be in cache:\n\n Norm::Ite(ite) => swarm_vhl_norm(state, ite),\n\n Norm::Not(ite) => !swarm_vhl_norm(state, ite)}}\n\n // otherwise at least one side is not a simple nid yet, and we have to defer\n\n else { R::Wip{ v, hi, lo, invert:false } }}}}\n\n\n\n\f\n", "file_path": "src/bdd/bdd_swarm.rs", "rank": 8, "score": 145532.62557788307 }, { "content": "/// construct the expression `x AND y`\n\npub fn and<X:ToNID,Y:ToNID>(x:X,y:Y)->Ops { rpn(&[x.to_nid(), y.to_nid(), AND]) }\n\n\n", "file_path": "src/ops.rs", "rank": 9, "score": 141498.6565038344 }, { "content": "pub trait HiLoBase {\n\n fn get_hilo(&self, n:NID)->Option<HiLo>;\n\n}\n\n\n\n\f\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct HiLoCache {\n\n /// variable-agnostic hi/lo pairs for individual bdd nodes.\n\n hilos: Vec<HiLo>,\n\n /// reverse map for hilos.\n\n index: VHLHashMap<HiLo, IDX>,\n\n /// variable-specific memoization. These record (v,hilo) lookups.\n\n /// There shouldn't be any need for this, but an undiagnosed\n\n /// bug prevents me from removing it.\n\n vindex: VHLHashMap<(VID,HiLo), IDX>}\n\n\n\n// TODO: remove vindex. There's no reason to store (x1,y,z) separately from (y,z).\n\n// !! Previously, in test_nano_bdd, I wind up with a node branching on x2\n\n// to another node also branching on x2.\n\n// As of 2020-07-10, the new problem is just that test_multi_bdd\n", "file_path": "src/vhl.rs", "rank": 10, "score": 140028.6162825978 }, { "content": "pub fn tiny(b: &mut Bencher) {\n\n use bex::int::{X4,X8};\n\n b.iter(|| {\n\n find_factors!(BDD, X4, X8, 210, vec![(14,15)]); }); }\n\n\n", "file_path": "benches/bench-solve.rs", "rank": 11, "score": 139424.70073606333 }, { "content": "pub fn small(b: &mut Bencher) {\n\n use bex::int::{X8,X16};\n\n b.iter(|| {\n\n let expected = vec![(1,210), (2,105), ( 3,70), ( 5,42),\n\n (6, 35), (7, 30), (10,21), (14,15)];\n\n find_factors!(BDD, X8, X16, 210, expected);\n\n GBASE.with(|gb| gb.replace(bex::ast::ASTBase::empty()));\n\n }); }\n\n\n\nbenchmark_group!(both, tiny, small);\n\nbenchmark_main!(both);\n", "file_path": "benches/bench-solve.rs", "rank": 12, "score": 139424.70073606333 }, { "content": "pub fn no_var(x:NID)->bool { vid(x)==NOVAR }\n", "file_path": "src/nid.rs", "rank": 13, "score": 138278.79507335825 }, { "content": "/// Code run by each thread in the swarm. Isolated as a function without channels for testing.\n\nfn swarm_ite(state: &Arc<BddState>, ite0:ITE)->R {\n\n let ITE { i, t, e } = ite0;\n\n match ITE::norm(i,t,e) {\n\n Norm::Nid(n) => R::Nid(n),\n\n Norm::Ite(ite) => swarm_ite_norm(state, ite),\n\n Norm::Not(ite) => !swarm_ite_norm(state, ite) }}\n\n\n", "file_path": "src/bdd/bdd_swarm.rs", "rank": 14, "score": 136271.56024357583 }, { "content": "/// construct the expression `x VEL y` (\"x or y\")\n\npub fn vel<X:ToNID,Y:ToNID>(x:X,y:Y)->Ops { rpn(&[x.to_nid(), y.to_nid(), VEL]) }\n\n\n", "file_path": "src/ops.rs", "rank": 15, "score": 135699.5891048857 }, { "content": "/// construct the expression `x IMP y` (\"x implies y\")\n\npub fn imp<X:ToNID,Y:ToNID>(x:X,y:Y)->Ops { rpn(&[x.to_nid(), y.to_nid(), IMP]) }\n\n\n\n#[test] fn test_flip_and() {\n\n assert_eq!(AND.tbl().unwrap() & 0b1111, 0b0001 );\n\n assert_eq!(AND.fun_flip_inputs(1).tbl().unwrap() & 0b1111, 0b0010 );\n\n assert_eq!(AND.fun_flip_inputs(2).tbl().unwrap() & 0b1111, 0b0100 );\n\n assert_eq!(AND.fun_flip_inputs(3).tbl().unwrap() & 0b1111, 0b1000 );}\n\n\n\n#[test] fn test_flip_vel() {\n\n assert_eq!(VEL.tbl().unwrap() & 0b1111, 0b0111 );\n\n assert_eq!(VEL.fun_flip_inputs(1).tbl().unwrap() & 0b1111, 0b1011 );\n\n assert_eq!(VEL.fun_flip_inputs(2).tbl().unwrap() & 0b1111, 0b1101 );\n\n assert_eq!(VEL.fun_flip_inputs(3).tbl().unwrap() & 0b1111, 0b1110 );}\n\n\n\n#[test] fn test_flip_xor() {\n\n assert_eq!(XOR.tbl().unwrap() & 0b1111, 0b0110 );\n\n assert_eq!(XOR.fun_flip_inputs(1).tbl().unwrap() & 0b1111, 0b1001 );\n\n assert_eq!(XOR.fun_flip_inputs(2).tbl().unwrap() & 0b1111, 0b1001 );\n\n assert_eq!(XOR.fun_flip_inputs(3).tbl().unwrap() & 0b1111, 0b0110 );}\n\n\n\n#[test] fn test_norm() {\n\n assert_eq!(AND.tbl().unwrap() & 0b1111, 0b0001 );\n\n let ops = Ops::RPN(vec![NID::var(0), !NID::var(1), AND]);\n\n let mut rpn:Vec<NID> = ops.norm().to_rpn().cloned().collect();\n\n let f = rpn.pop().unwrap();\n\n assert_eq!(2, f.arity().unwrap());\n\n assert_eq!(f.tbl().unwrap() & 0b1111, 0b0100);\n\n assert_eq!(rpn, vec![NID::var(0), NID::var(1)]);}\n", "file_path": "src/ops.rs", "rank": 16, "score": 135699.5891048857 }, { "content": "/// construct the expression `x XOR y`\n\npub fn xor<X:ToNID,Y:ToNID>(x:X,y:Y)->Ops { rpn(&[x.to_nid(), y.to_nid(), XOR]) }\n\n\n", "file_path": "src/ops.rs", "rank": 17, "score": 135699.5891048857 }, { "content": "/// load a hashmap\n\npub fn get_map(path:&str) -> ::std::io::Result<HashMap<String,usize>> {\n\n let mut m = HashMap::new();\n\n let f = File::open(path)?; let r = BufReader::new(&f);\n\n for line in r.lines() {\n\n let line = line.unwrap();\n\n let v:Vec<&str> = line.split(',').collect();\n\n m.insert(v[0].to_string(), v[1].parse::<usize>().unwrap()); }\n\n Ok(m)}\n", "file_path": "src/io.rs", "rank": 18, "score": 134974.79448231834 }, { "content": "fn swap(data: &mut Vec<NID>) {\n\n let p = data.len()-1;\n\n if p > 0 { data.swap(p-1,p) }}\n\n\n", "file_path": "examples/shell/bex-shell.rs", "rank": 19, "score": 134012.0251999425 }, { "content": "/// write the vector, as bytes, to a file at the specified path.\n\npub fn put<T:Sized>(path:&str, v:&[T]) -> ::std::io::Result<()> {\n\n let mut f = File::create(path)?;\n\n f.write_all( unsafe{ slice_to_u8s(v) }) }\n\n\n", "file_path": "src/io.rs", "rank": 20, "score": 133540.10484590102 }, { "content": "#[derive(Clone, Debug)]\n\nstruct XVHLRow { hm: HashMap<XHiLo, IxRc> }\n\nimpl XVHLRow {\n\n fn new()->Self {XVHLRow{ hm: HashMap::new() }}\n\n /// build a reverse index, mapping of xids to hilo pairs\n\n fn xid_map(&self)->HashMap<XID,XHiLo> { self.hm.iter().map(|(hl,ixrc)|(ixrc.ix,*hl)).collect() }}\n\n\n\n/// The scaffold itself contains the master list of records (vhls) and the per-row index\n\n#[derive(Clone)]\n\npub struct XVHLScaffold {\n\n vids: Vec<VID>,\n\n vhls: Vec<XVHL>,\n\n rows: HashMap<VID, XVHLRow>,\n\n /// tracks whether all workers have completed their work\n\n complete: HashMap<VID,WID>,\n\n /// tracks rows that are locked during the distributed regroup() operation\n\n locked: HashSet<VID>,\n\n /// tracks refcount changes that are pending for locked rows (\"deferred refcount delta\")\n\n drcd: HashMap<VID,HashMap<XID, i64>> }\n\n\n\n// snapshot used for debugging\n", "file_path": "src/swap.rs", "rank": 21, "score": 133015.1951780985 }, { "content": "/// return a nid that is not tied to a variable\n\npub fn ixn(ix:IDX)->NID { nvi(NOVAR, ix) }\n\n\n\nuse vid;\n\n\n", "file_path": "src/nid.rs", "rank": 22, "score": 131977.7145296609 }, { "content": "/// map a nid from the source to a (usually virtual) variable in the destination\n\npub fn convert_nid(sn:SrcNid)->DstNid {\n\n let SrcNid{ n } = sn;\n\n let r = if nid::is_const(n) { n }\n\n else {\n\n let r0 = if n.is_vid() { NID::var(nid::vid(n) as u32) } // TODO: probably want\n\n else if nid::no_var(n) { NID::vir(nid::idx(n) as u32) }\n\n else { todo!(\"convert_nid({:?})\", n) };\n\n if nid::is_inv(n) { !r0 } else { r0 }};\n\n DstNid{ n: r } }\n\n\n", "file_path": "src/solve.rs", "rank": 23, "score": 128188.0565780414 }, { "content": "fn hs<T: Eq+Hash>(xs: Vec<T>)->HashSet<T> { <HashSet<T>>::from_iter(xs) }\n\n\n\n// basic test suite\n\n\n\n#[test] fn test_base() {\n\n let mut base = BDDBase::new();\n\n let (v1, v2, v3) = (NID::var(1), NID::var(2), NID::var(3));\n\n assert_eq!((I,O), base.tup(I));\n\n assert_eq!((O,I), base.tup(O));\n\n assert_eq!((I,O), base.tup(v1));\n\n assert_eq!((I,O), base.tup(v2));\n\n assert_eq!((I,O), base.tup(v3));\n\n assert_eq!(I, base.when_hi(VID::var(3),v3));\n\n assert_eq!(O, base.when_lo(VID::var(3),v3))}\n\n\n\n#[test] fn test_and() {\n\n let mut base = BDDBase::new();\n\n let (v1, v2) = (NID::var(1), NID::var(2));\n\n let a = base.and(v1, v2);\n\n assert_eq!(O, base.when_lo(VID::var(1),a));\n", "file_path": "src/test-bdd.rs", "rank": 24, "score": 125567.3710722917 }, { "content": "/// This is the core algorithm for solving by substitution. We are given a (presumably empty)\n\n/// destination (the `SubSolver`), a source ASTBase (`src0`), and a source nid (`sn`),\n\n/// pointing to a node inside the ASTBase.\n\n///\n\n/// The source nids we encounter are indices into the ASTBase. We begin by sorting/rewriting\n\n/// the ASTBase in terms of \"cost\", so that a node at index k is only dependent on nodes\n\n/// with indices < k. We also filter out any nodes that are not actually used (for example,\n\n/// there may be nodes in the middle of the AST that are expensive to calculate on their own,\n\n/// but get canceled out later on (perhaps by XORing with itself, or ANDing with 0) -- there's\n\n/// no point including these at all as we work backwards.\n\n///\n\n/// After this sorting and filtering, we map each nid in the AST to a `VID::vir` with\n\n/// the corresponding index. We then initialize `dst` with the highest vid (the one\n\n/// corresponding to the topmost/highest cost node in the AST).\n\n///\n\n/// We then replace each VID in turn with its definition. The definition of each intermediate\n\n/// node is always in terms of either other AST nodes (mapped to `VID::vir` in the destination,\n\n/// or actual input variables (`VID::var`), which are added to the destination directly).\n\n///\n\n/// The dependency ordering ensures that we never re-introduce a node after substitution,\n\n/// so the number of substitution steps is equal to the number of AST nodes.\n\n///\n\n/// Of course, the cost of each substitution is likely to increase as the destination\n\n/// becomes more and more detailed. Depending on the implementation, this cost may even\n\n/// grow exponentially. However, the hope is that by working \"backward\" from the final\n\n/// result, we will have access to the maximal number of constraints, and there\n\n/// will be opportunities to streamline and cancel out even more nodes. The hope is that\n\n/// no matter how slow this process is, it will be less slow that trying to fully solve\n\n/// each intermediate node by working \"forward\".\n\npub fn solve<S:SubSolver>(dst:&mut S, src0:&RawASTBase, sn:NID)->DstNid {\n\n // AST nids don't contain VIR nodes (they \"are\" vir nodes).\n\n // If it's already a const or a VID::var, though, there's nothing to do.\n\n if sn.is_lit() { DstNid{n:sn} }\n\n else {\n\n dst.init(sn.vid());\n\n // renumber and garbage collect, leaving only the AST nodes reachable from sn\n\n let (src, top) = sort_by_cost(&src0, SrcNid{n:sn});\n\n\n\n // step is just a number that counts downward.\n\n let mut step:usize = nid::idx(top.n);\n\n\n\n // !! These lines were a kludge to allow storing the step number in the dst,\n\n // with the idea of persisting the destination to disk to resume later.\n\n // The current solvers are so slow that I'm not actually using them for\n\n // anything but testing, though, so I don't need this yet.\n\n // TODO: re-enable the ability to save and load the destination mid-run.\n\n // let step_node = dst.get(&\"step\".to_string()).unwrap_or_else(||NID::var(0));\n\n // let mut step:usize = step_node.vid().var_ix();\n\n\n", "file_path": "src/solve.rs", "rank": 25, "score": 124143.42929891622 }, { "content": "pub trait Walkable {\n\n\n\n /// walk nodes in graph for nid n recursively, without revisiting shared nodes\n\n fn step<F>(&self, n:NID, f:&mut F, seen:&mut HashSet<NID>, topdown:bool)\n\n where F: FnMut(NID,VID,NID,NID);\n\n\n\n fn walk<F>(&self, n:NID, f:&mut F) where F: FnMut(NID,VID,NID,NID) {\n\n let mut seen = HashSet::new();\n\n self.step(n, f, &mut seen, true)}\n\n\n\n /// same as walk, but visit children before firing the function.\n\n /// note that this walks from \"left to right\" (\"lo' to \"hi\")\n\n /// and bottom to top, starting from the leftmost node.\n\n /// if you want the bottommost nodes to come first, use self.as_heap(n)\n\n fn walk_up<F>(&self, n:NID, f:&mut F) where F: FnMut(NID,VID,NID,NID) {\n\n let mut seen = HashSet::new();\n\n self.step(n, f, &mut seen, false)}\n\n\n\n /// this is meant for walking nodes ordered by variables from bottom to top.\n\n /// it's deprecated because the whole thing ought to be replaced by a nice iterator\n\n /// (also, it's not clear to me why the derived Ord for VHL doesn't require Reverse() here)\n\n #[deprecated]\n\n fn as_heap(&self, n:NID)->BinaryHeap<(VHL, NID)> {\n\n let mut result = BinaryHeap::new();\n\n self.walk_up(n, &mut |nid, v, hi, lo| result.push((VHL{ v, hi, lo }, nid)));\n\n result }}\n\n\n\n\f\n", "file_path": "src/vhl.rs", "rank": 26, "score": 120172.2077842464 }, { "content": "type R = wip::RMsg<Norm>;\n\n\n\n// Q::Cache() message could potentially be huge to print, so don't.\n\nimpl std::fmt::Debug for Q {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Q::Ite(ite) => { write!(f, \"Q::Ite({:?})\", ite) }\n\n Q::Cache(_) => { write!(f, \"Q::Cache(...)\") }\n\n Q::Halt => { write!(f, \"Q::Halt\")} } }}\n\n\n\n// ----------------------------------------------------------------\n\n\n", "file_path": "src/bdd/bdd_swarm.rs", "rank": 27, "score": 120168.28222866332 }, { "content": "/// save a hashmap\n\npub fn put_map<S:BuildHasher>(path:&str, m:&HashMap<String,usize,S>) -> ::std::io::Result<()> {\n\n let mut f = File::create(path)?;\n\n for (k,v) in m.iter() { writeln!(&mut f, \"{},{}\", k, v)? }\n\n Ok(())}\n\n\n", "file_path": "src/io.rs", "rank": 28, "score": 118609.0926302403 }, { "content": "#[test]\n\nfn test_reg_mut() {\n\n let mut reg = Reg::new(66);\n\n assert_eq!(reg.data.len(), 2);\n\n assert_eq!(reg.data[0], 0);\n\n assert_eq!(reg.get(0), false);\n\n reg.put(0, true);\n\n assert_eq!(reg.data[0], 1); // bit '0' is the least significant bit\n\n assert_eq!(reg.data[1], 0);\n\n assert_eq!(reg.get(0), true);\n\n assert_eq!(reg.get(1), false);\n\n // now\n\n assert_eq!(reg.as_usize(), 1, \"{:?}=1\", reg);\n\n reg.put(1, true);\n\n assert_eq!(reg.data[0], 3);\n\n assert_eq!(reg.get(1), true); }\n\n\f\n\n#[test] fn test_reg_inc_hitop() {\n\n let mut reg = Reg::new(2);\n\n assert_eq!(0, reg.as_usize());\n\n assert_eq!(Some(0), reg.increment(), \"00 -> 01\");\n", "file_path": "src/reg.rs", "rank": 29, "score": 116130.46088597663 }, { "content": "/// constructor for rpn\n\npub fn rpn(xs:&[NID])->Ops { Ops::RPN(xs.to_vec()) }\n\n\n\n/// x0 & x1\n\npub const AND:NID = NID::fun(2,0b0001);\n\n\n\n/// x0 ^ x1\n\npub const XOR:NID = NID::fun(2,0b0110);\n\n\n\n/// x0 | x1 (vel is the latin word for 'inclusive or', and the origin of the \"∨\" symbol in logic)\n\npub const VEL:NID = NID::fun(2,0b0111);\n\n\n\n/// !(x0 | x1)\n\npub const NOR:NID = NID::fun(2,0b1000);\n\n\n\n/// x0 implies x1 (x0 <= x1)\n\npub const IMP:NID = NID::fun(2,0b1011);\n\n\n", "file_path": "src/ops.rs", "rank": 30, "score": 115818.05946905947 }, { "content": "#[cfg(test)]\n\nfn check_swap(old:&str, new:&str) {\n\n let mut xsd = XSDebug::new(\"abcdvw\");\n\n let (v, x) = (xsd.vid('v'), xsd.xid(old));\n\n xsd.xs.swap(v);\n\n assert_eq!(xsd.fmt(x), new.to_string(), \"unexpected results after swap.\")}\n\n\n\n#[test] fn test_swap() {\n\n check_swap(\"abv? cdv? w?\", \"acw? bdw? v? \");\n\n check_swap(\"abv? acv? w?\", \"abcw? v? \");\n\n check_swap(\"a abv? w?\", \"aabw? v? \");\n\n check_swap(\"abv? b w?\", \"abw? bv? \"); }\n\n\n\n#[test] fn test_tbl() {\n\n let mut xsd = XSDebug::new(\"abcd\");\n\n let x = xsd.xid(\"a 1 b? 0 c?\");\n\n let o = XID_O; let i = XID_I;\n\n assert_eq!(xsd.xs.tbl(x, None), vec![o,i,i,i, o,o,o,o]);\n\n let a = xsd.xid(\"a\");\n\n assert_eq!(xsd.xs.tbl(x, Some(VID::var(0))), vec![a,i,o,o]);\n\n let y = xsd.xid(\"a 1 b?\");\n", "file_path": "src/test-swap.rs", "rank": 31, "score": 113633.20412826972 }, { "content": "/// This is the loop run by each thread in the swarm.\n\nfn swarm_loop(tx:RTx, rx:QRx, state:Arc<BddState>) {\n\n use swarm::{QID, Worker};\n\n let mut w:BddWorker = BddWorker{ state:Some(state) };\n\n for (oqid, q) in rx.iter() {\n\n let sqid:QID = match q {\n\n Q::Cache(_) => QID::STEP(0),\n\n Q::Ite(_) => QID::STEP(oqid.unwrap()),\n\n Q::Halt => QID::STEP(0)};\n\n if let Some(r) = w.work_step(&sqid, q.clone()) {\n\n if tx.send((oqid.unwrap_or(0), r)).is_err() { panic!(\"error sending result!\") }}\n\n if let Q::Halt = q { break }}}\n", "file_path": "src/bdd/bdd_swarm.rs", "rank": 32, "score": 106997.42049910373 }, { "content": "/// The real challenge: factor the 64-bit product of the first 15 primes.\n\npub fn main() {\n\n find_factors!(SwapSolver, X32, X64, K as usize, factors()); }\n", "file_path": "examples/solve/bdd-solve.rs", "rank": 33, "score": 105346.6098926141 }, { "content": "type Mod = (usize,XID,XID);\n\n\n", "file_path": "src/swap.rs", "rank": 34, "score": 102256.15883090803 }, { "content": "pub trait CursorPlan : HiLoBase {\n\n /// is the given (leaf) node a solution, given the current inversion state?\n\n fn includes_leaf(&self, n:NID)->bool { n == nid::I }\n\n fn includes_lo(&self, n:NID)->bool { n != nid::O }\n\n}\n\n\n\n\n\npub struct Cursor {\n\n pub nvars: usize, // number of input variables in context\n\n pub node: NID, // the current node.\n\n pub scope: Reg, // the current variable assignments\n\n pub nstack: Vec<NID>, // the path of nodes we have traversed\n\n pub istack: Vec<bool>, // the stack of node inversion states\n\n pub invert: bool, // whether to invert the results\n\n}\n\n\n\nimpl Cursor {\n\n\n\n pub fn new(nvars:usize, node:NID)->Self {\n\n Cursor {\n", "file_path": "src/cur.rs", "rank": 35, "score": 101388.3845908646 }, { "content": "pub fn gbase_i()->BaseBit { BaseBit{base:gbase_ref(), n:nid::I} }\n\n\n\n\f\n\n// --- lifted u32 type -----------------------------------------\n\n\n\n// TODO: implement iterators on the bits to simplify all these loops!!\n\n\n", "file_path": "src/int.rs", "rank": 36, "score": 98802.91711910043 }, { "content": "pub fn gbase_ref()->BaseRef {\n\n GBASE.with(|gb| gb.clone()) }\n\n\n", "file_path": "src/int.rs", "rank": 37, "score": 98346.50260296674 }, { "content": "/// This function renumbers the NIDs so that nodes with higher IDs \"cost\" more.\n\n/// Sorting your AST this way dramatically reduces the cost of converting to\n\n/// another form. (For example, the test_tiny benchmark drops from 5282 steps to 111 for BDDBase)\n\npub fn sort_by_cost(src:&RawASTBase, top:SrcNid)->(RawASTBase,SrcNid) {\n\n let (mut src0,kept0) = src.repack(vec![top.n]);\n\n src0.tag(kept0[0], \"-top-\".to_string());\n\n // m:mask (which input vars are required?); c:cost (in steps before we can calculate)\n\n let (_m0,c0) = src0.masks_and_costs(default_bitmask);\n\n let p = apl::gradeup(&c0); // p[new idx] = old idx\n\n let ast = src0.permute(&p);\n\n let n = ast.get(\"-top-\").expect(\"what? I just put it there.\");\n\n (ast,SrcNid{n}) }\n\n\n\n\f\n", "file_path": "src/solve.rs", "rank": 38, "score": 98272.73721473981 }, { "content": "pub fn gbase_o()->BaseBit { BaseBit{base:gbase_ref(), n:nid::O} }\n", "file_path": "src/int.rs", "rank": 39, "score": 96436.81376277123 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nenum XWIP1 { XID(XID), NEW(i64) }\n\n\n\n// 0: swap the rows. (lift row u above row d)\n\n// u was independent before, so we leave it alone except for gc.\n\n// (but we might wind up using it later, so we do the gc step last.)\n\n// 1: for each node n in row d:\n\n// - if n.rc=0, delete from hashmap and yield | Delete(n.nid)\n\n// - if either leg points to row u:\n\n// decref the old node(s) on row u\n\n// add new node(s) on w with rc=1 | Create() { or incref if duplicates? }\n\n// incref the hi/lo nodes.\n\n// move n to row u, copying n.rc, and yield | Update(n.nid, v,h,l)\n\n// - else, leave n alone.\n\n// 2: for n in row u:\n\n// if n.rc==0, Del(n.nid) and DecRef(n.hi, n.lo)\n\n\n\n\n", "file_path": "src/swap.rs", "rank": 40, "score": 96095.72057304575 }, { "content": "/// Map the indices in `ys` to the corresponding values from `xs`.\n\npub fn at<'a,T:Clone>(xs:&'a[T], ys:&'a[usize]) -> Vec<T> {\n\n ys.iter().map(|&i| xs[i].clone()).collect() }\n", "file_path": "src/apl.rs", "rank": 41, "score": 93546.10383385254 }, { "content": "pub fn gbase_var(v:u32)->BaseBit {\n\n GBASE.with(|gb| { BaseBit{base:gb.clone(), n:NID::var(v) }}) }\n\n\n", "file_path": "src/int.rs", "rank": 42, "score": 92309.29045912821 }, { "content": "/// replace node in destination with its definition form source\n\nfn refine_one(dst: &mut dyn SubSolver, v:VID, src:&RawASTBase, d:DstNid)->DstNid {\n\n // println!(\"refine_one({:?})\", d)\n\n let ctx = d.n;\n\n let ops = src.get_ops(nid::ixn(v.vir_ix() as u32));\n\n let cn = |x0:&NID|->NID { if x0.is_fun() { *x0 } else { convert_nid(SrcNid{n:*x0}).n }};\n\n let def:Ops = Ops::RPN( ops.to_rpn().map(cn).collect() );\n\n DstNid{n: dst.subst(ctx, v, &def) }}\n\n\n\n\f\n", "file_path": "src/solve.rs", "rank": 43, "score": 92252.60981967795 }, { "content": "fn repl(base:&mut ASTBase) {\n\n let mut scope = HashMap::new();\n\n let mut data: Vec<NID> = Vec::new();\n\n let mut bdds = BDDBase::new();\n\n let mut anfs = ANFBase::new();\n\n\n\n 'main: loop {\n\n print!(\"[ \"); for x in &data { print!(\"{} \", *x); } println!(\"]\");\n\n let line = readln();\n\n for word in line.split_whitespace() {\n\n match word {\n\n // bdd commands\n\n \"i\"|\"I\" => data.push(nid::I),\n\n \"o\"|\"O\" => data.push(nid::O),\n\n \"~\"|\"not\" => { let x = pop(&mut data); data.push(!x) }\n\n \"and\" => { let (x,y)=pop2(&mut data); data.push(base.and(x,y)) }\n\n \"xor\" => { let (x,y)=pop2(&mut data); data.push(base.xor(x,y)) }\n\n \"or\" => { let (x,y)=pop2(&mut data); data.push(base.or(x,y)) }\n\n //\"lt\" => { let (x,y)=pop2(&mut data); data.push(base.lt(x,y)) }\n\n // \"gt\" => { let (x,y)=pop2(&mut data); data.push(base.gt(x,y)) }\n", "file_path": "examples/shell/bex-shell.rs", "rank": 44, "score": 91894.83156223853 }, { "content": "pub fn botmost(x:VID, y:VID)->VID { if x.is_below(&y) { x } else { y }}\n", "file_path": "src/vid.rs", "rank": 45, "score": 91342.64558398878 }, { "content": "pub fn topmost(x:VID, y:VID)->VID { if x.is_above(&y) { x } else { y }}\n", "file_path": "src/vid.rs", "rank": 46, "score": 91342.64558398878 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nenum XWIP0 { XID(XID), HL(XID,XID) }\n\n\n\n/// in the second wip step, the hilo pairs are all resolved to existing\n\n/// xids or mapped to a new one\n", "file_path": "src/swap.rs", "rank": 47, "score": 90820.64139289173 }, { "content": "#[derive(Default)]\n\nstruct BddWorker { state:Option<Arc<BddState>> }\n\nimpl swarm::Worker<Q,R> for BddWorker {\n\n fn work_step(&mut self, _sqid:&swarm::QID, q:Q)->Option<R> {\n\n match q {\n\n Q::Cache(s) => { self.state = Some(s); None }\n\n Q::Ite(ite) => { Some(swarm_ite(self.state.as_ref().unwrap(), ite)) }\n\n Q::Halt => {\n\n let tests = COUNT_XMEMO_TEST.with(|c| c.replace(0));\n\n let fails = COUNT_XMEMO_FAIL.with(|c| c.replace(0));\n\n Some(R::MemoStats{ tests, fails }) } }}}\n\n\n\n/// Sender for Q\n\npub type QTx = Sender<(Option<QID>, Q)>;\n\n/// Receiver for Q\n\npub type QRx = Receiver<(Option<QID>, Q)>;\n\n/// Sender for R\n\npub type RTx = Sender<(QID, R)>;\n\n/// Receiver for R\n\npub type RRx = Receiver<(QID, R)>;\n\n\n", "file_path": "src/bdd/bdd_swarm.rs", "rank": 48, "score": 90814.89670421148 }, { "content": "type VarMaskFn = fn(&RawASTBase,vid::VID)->u64;\n\n\n\n/// An ASTBase that does not use extra simplification rules.\n\nimpl RawASTBase {\n\n\n\n pub fn empty()->RawASTBase { RawASTBase{ bits:vec![], tags:HashMap::new(), hash:HashMap::new() }}\n\n pub fn len(&self)->usize { self.bits.len() }\n\n pub fn is_empty(&self)->bool { self.bits.is_empty() }\n\n\n\n fn nid(&mut self, ops:Ops)->NID {\n\n match self.hash.get(&ops) {\n\n Some(&n) => n,\n\n None => {\n\n let nid = nid::ixn(self.bits.len() as u32);\n\n self.bits.push(ops.clone());\n\n self.hash.insert(ops, nid);\n\n nid }}}\n\n\n\n pub fn load(path:&str)->::std::io::Result<RawASTBase> {\n\n let s = io::get(path)?;\n", "file_path": "src/ast.rs", "rank": 49, "score": 90564.66805080898 }, { "content": "/// (OLD) Variable ID: uniquely identifies an input variable in the BDD.\n\n/// This name is private to the nid module since vid::VID supercedes it.\n\ntype VID = usize;\n\n\n\n/// Index into a (usually VID-specific) vector.\n\npub type IDX = u32;\n\n\n\n/// A NID represents a node in a Base. Essentially, this acts like a tuple\n\n/// containing a VID and IDX, but for performance reasons, it is packed into a u64.\n\n/// See below for helper functions that manipulate and analyze the packed bits.\n\n#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize)]\n\npub struct NID { n: u64 }\n\n\n\n/// Just a constructor so I can add extra temp fields in development without breaking code.\n\nconst fn new (n:u64)->NID { NID{n} }\n\n\n\n\f\n\n// -- bits in the nid ---\n\n\n\n/// Single-bit mask representing that a NID is inverted.\n\nconst INV:u64 = 1<<63; // is inverted?\n\n\n", "file_path": "src/nid.rs", "rank": 50, "score": 90103.1537427481 }, { "content": "/// Return the unique items of `xs` (in order of appearance),\n\n/// and a mapping of those items to their indices.\n\npub fn group<T>(xs: &[T]) -> (Vec<&T>, HashMap<&T,Vec<usize>>)\n\nwhere T: std::hash::Hash, T: std::cmp::Eq {\n\n let mut map:HashMap<&T,Vec<usize>> = HashMap::new();\n\n let mut nub = vec![]; // unique xs, in the order in which they appeared\n\n for (i,k) in xs.iter().enumerate() {\n\n let kxs = map.entry(k).or_insert_with(Vec::new);\n\n nub.push(k); kxs.push(i) }\n\n (nub, map) }\n\n\n", "file_path": "src/apl.rs", "rank": 51, "score": 87647.99825341442 }, { "content": "pub fn gbase_def(s:String, i:VID)->BaseBit {\n\n GBASE.with(|gb| { let vn=gb.borrow_mut().def(s,i); BaseBit{base:gb.clone(), n:vn }}) }\n\n\n", "file_path": "src/int.rs", "rank": 52, "score": 87036.01873784314 }, { "content": "pub fn topmost_of3(x:VID, y:VID, z:VID)->VID { topmost(x, topmost(y, z)) }\n\n\n\n\f\n\nimpl VID {\n\n pub const fn top()->VID { VID { v:T }}\n\n pub const fn nov()->VID { VID { v:NoV }}\n\n pub const fn var(i:u32)->VID { VID { v: Var(i) }}\n\n pub const fn vir(i:u32)->VID { VID { v: Vir(i) }}\n\n pub fn is_top(&self)->bool { VID{ v:T } == *self }\n\n pub fn is_nov(&self)->bool { if let VID{ v:NoV } = self { true } else { false } }\n\n pub fn is_var(&self)->bool { if let VID{ v:Var(_) } = self { true } else { false } }\n\n pub fn is_vir(&self)->bool { if let VID{ v:Vir(_) } = self { true } else { false } }\n\n\n\n pub fn is_above(&self, other:&VID)->bool { self.cmp_depth(&other) == VidOrdering::Above }\n\n pub fn is_below(&self, other:&VID)->bool { self.cmp_depth(&other) == VidOrdering::Below }\n\n pub fn shift_up(&self)->VID {\n\n match self.v {\n\n NoV => panic!(\"VID::nov().shift_up() is undefined\"),\n\n T => panic!(\"VID::top().shift_up() is undefined\"), //VID::var(0),\n\n // these two might panic on over/underflow:\n", "file_path": "src/vid.rs", "rank": 53, "score": 85379.9789092757 }, { "content": "/// Calculate a permutation vector that sorts array `xs`.\n\npub fn gradeup<T>(xs: &[T]) -> Vec<usize>\n\nwhere T: std::cmp::Ord {\n\n let mut ixs:Vec<(usize,&T)> = xs.iter().enumerate().collect();\n\n ixs.sort_by_key(|ix|ix.1); ixs.iter().map(|ix|ix.0).collect()}\n\n\n", "file_path": "src/apl.rs", "rank": 54, "score": 82045.52601769118 }, { "content": "fn pop<T>(data: &mut Vec<T>)->T {\n\n data.pop().expect(\"underflow\")}\n\n\n", "file_path": "examples/shell/bex-shell.rs", "rank": 55, "score": 79669.80694883641 }, { "content": "type BDD = bdd::BDDBase;\n\n\n", "file_path": "benches/bench-solve.rs", "rank": 56, "score": 76008.71583563075 }, { "content": "fn pop2<T>(data: &mut Vec<T>)->(T,T){\n\n let y=pop(data); let x=pop(data); (x,y) }\n\n\n\n/*fn pop3<T>(data: &mut Vec<T>)->(T,T,T){\n\n let (y,z)=pop2(data); let x=pop(data); (x,y,z) }*/\n\n\n\n\f\n\n// forth-like REPL for the BDD (main loop)\n\n\n\n// fn to_io(b:bool)->NID { if b {Op::I} else {Op::O} }\n\n// enum Item { Vid(VID), Nid(NID), Int(u32) }\n\n\n", "file_path": "examples/shell/bex-shell.rs", "rank": 57, "score": 75868.04657091072 }, { "content": "fn vid_to_old(v:vid::VID)->VID {\n\n if v.is_nov() { NOVAR }\n\n else if v.is_top() { TOP }\n\n else if v.is_var() { v.var_ix() | (RVAR>>32) as VID }\n\n else if v.is_vir() { v.vir_ix() as VID }\n\n else { panic!(\"unknown vid::VID {:?}?\", v) }}\n\n\n", "file_path": "src/nid.rs", "rank": 58, "score": 74144.95214076019 }, { "content": "fn fun_tbl(f:NID)->Vec<XID> {\n\n assert!(f.is_fun(), \"can't convert non-fun nid to table\");\n\n let ar = f.arity().unwrap();\n\n let ft = f.tbl().unwrap();\n\n let mut tbl = vec![XID_O;(1<<ar) as usize];\n\n let end = (1<<ar)-1;\n\n for i in 0..=end { if ft & (1<<i) != 0 { tbl[end-i as usize] = XID_I; }}\n\n tbl }\n\n\n\nimpl SubSolver for SwapSolver {\n\n\n\n fn init(&mut self, v: VID)->NID {\n\n self.dst = XVHLScaffold::new(); self.dst.push(v);\n\n self.rv = v;\n\n self.dx = self.dst.add_ref(XVHL{ v, hi:XID_I, lo:XID_O}, 0, 1);\n\n self.dx.to_nid() }\n\n\n\n fn subst(&mut self, ctx: NID, v: VID, ops: &Ops)->NID {\n\n let Ops::RPN(mut rpn) = ops.norm();\n\n println!(\"@:sub {:>4} -> {:>24} -> {:>20}\",\n", "file_path": "src/swap.rs", "rank": 59, "score": 74144.95214076019 }, { "content": "fn old_to_vid(o:VID)->vid::VID {\n\n if o == TOP { vid::VID::top() }\n\n else if o == NOVAR { vid::VID::nov() }\n\n else if o & (RVAR>>32) as VID > 0 { vid::VID::var((o & !(RVAR>>32) as VID) as u32) }\n\n else { vid::VID::vir(o as u32) }}\n\n\n", "file_path": "src/nid.rs", "rank": 60, "score": 74144.95214076019 }, { "content": "#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash, Serialize, Deserialize)]\n\nenum VidEnum {\n\n // How I (eventually) want the ordering, to be (once biggest vars go on top:)\n\n T, // Special meta-constant on which I and O branch.\n\n NoV, // Special case for AST nodes not tied to a variable\n\n Var(u32), // Real Vars go in the middle, with biggest u32 on top.\n\n Vir(u32), // Virtual are \"biggest\", so go to the top.\n\n}\n\n\n\n#[derive(Eq, PartialEq)]\n\npub enum VidOrdering {\n\n Above,\n\n Level,\n\n Below }\n\n\n\nuse self::VidEnum::*;\n\n\n\n\f\n\n#[derive(Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]\n\npub struct VID { v:VidEnum }\n\npub const NOV:VID = VID::nov();\n\npub const TOP:VID = VID::top();\n\n\n", "file_path": "src/vid.rs", "rank": 61, "score": 71258.70249702017 }, { "content": "/// helper for 'fun' (function table) nids\n\n/// u32 x contains the bits to permute.\n\n/// pv is a permutation vector (the bytes 0..=31 in some order)\n\n// b=pv[i] means to grab bit b from x and move to position i in the result.\n\nfn permute_bits(x:u32, pv:&[u8])->u32 {\n\n let mut r:u32 = 0;\n\n for (i,b) in pv.iter().enumerate() { r |= ((x & (1<<b)) >> b) << i; }\n\n r }\n\n\f\n\n\n\n// TODO: add n.is_vid() to replace current is_var()\n\n// TODO: is_var() should only be true for vars, not both virs and vars.\n\n// TODO: probably also need is_nov() for consistency.\n\n\n\nimpl NID {\n\n pub fn var(v:u32)->Self { Self::from_vid(vid::VID::var(v)) }\n\n pub fn vir(v:u32)->Self { Self::from_vid(vid::VID::vir(v)) }\n\n pub fn from_var(v:vid::VID)->Self { NID::var(v.var_ix() as u32)}\n\n pub fn from_vir(v:vid::VID)->Self { NID::vir(v.vir_ix() as u32)}\n\n pub fn from_vid(v:vid::VID)->Self { nv(vid_to_old(v)) }\n\n pub fn from_vid_idx(v:vid::VID, i:IDX)->Self { nvi(vid_to_old(v), i) }\n\n pub fn vid(&self)->vid::VID { old_to_vid(vid(*self)) }\n\n pub fn is_const(&self)->bool { is_const(*self) }\n\n pub fn is_vid(&self)->bool { is_vid(*self)}\n", "file_path": "src/nid.rs", "rank": 62, "score": 70265.38227498624 }, { "content": "fn cmp_depth_idx(x:u32, y:&u32)->VidOrdering {\n\n match x.cmp(y) {\n\n Ordering::Less => VidOrdering::Below,\n\n Ordering::Equal => VidOrdering::Level,\n\n Ordering::Greater => VidOrdering::Above }}\n\n\n\nimpl VID {\n\n pub fn cmp_depth(&self, other: &Self) -> VidOrdering {\n\n use self::VidOrdering::*;\n\n match self.v {\n\n T => if other.v == T { Level } else { Below },\n\n NoV => match other.v {\n\n T => Above,\n\n NoV => Level,\n\n _ => Below },\n\n Var(x) => match other.v {\n\n Vir(_) => Below,\n\n Var(y) => cmp_depth_idx(x,&y),\n\n NoV|T => Above },\n\n Vir(x) => match other.v {\n\n Var(_) => Above,\n\n Vir(y) => cmp_depth_idx(x,&y),\n\n NoV|T => Above }}}}\n\n\n", "file_path": "src/vid.rs", "rank": 63, "score": 68439.20197922217 }, { "content": "#[derive(Debug)]\n\nenum Q {\n\n Init{ vu:VID, ru: XVHLRow },\n\n Step{ vd:VID, rd: XVHLRow },\n\n Stop,\n\n DRcD( HashMap<XID,i64> ),\n\n Xids( Vec<XID> )}\n\n\n", "file_path": "src/swap.rs", "rank": 64, "score": 63117.255826281915 }, { "content": "#[derive(Debug)]\n\nenum R {\n\n DRcD{ vu:VID },\n\n Alloc{ needed:usize },\n\n PutRD{ vu:VID, vd:VID, rd: XVHLRow, dnew:Vec<Mod>, umov:Vec<Mod>, dels:Vec<XID>, refs:HashMap<XID, i64> },\n\n PutRU{ vu:VID, ru: XVHLRow }}\n\n\n\n// -- graphviz ----------------------------------------------------------\n\n\n\nimpl GraphViz for XVHLScaffold {\n\n fn write_dot(&self, _:NID, wr: &mut dyn std::fmt::Write) {\n\n // TODO: show only the given nid, instead of the whole scaffold\n\n // assert_eq!(o, NID::o(), \"can't visualize individual nids yet. pass O for now\");\n\n macro_rules! w { ($x:expr $(,$xs:expr)*) => { writeln!(wr, $x $(,$xs)*).unwrap(); }}\n\n w!(\"digraph XVHL {{\");\n\n w!(\"subgraph head {{ h1[shape=plaintext; label=\\\"XVHL\\\"] }}\");\n\n w!(\" {{rank=same XO XI}}\");\n\n w!(\" XO[label=⊥; shape=square];\");\n\n w!(\" XI[label=⊤; shape=square];\");\n\n w!(\"node[shape=circle];\");\n\n for ev in self.vids.iter().rev() {\n", "file_path": "src/swap.rs", "rank": 65, "score": 63117.255826281915 }, { "content": "/// A simple RPN debugger to make testing easier.\n\nstruct XSDebug {\n\n /** scaffold */ xs: XVHLScaffold,\n\n /** vid->char */ vc: HashMap<VID,char>, // used in fmt for branch vars\n\n /** char->vid */ cv: HashMap<char,VID>, // used in run to map iden->vid\n\n /** data stack */ ds: Vec<XID>}\n\n\n\nimpl XSDebug {\n\n pub fn new(vars:&str)->Self {\n\n let mut this = XSDebug {\n\n xs: XVHLScaffold::new(), ds: vec![],\n\n vc:HashMap::new(), cv: HashMap::new() };\n\n for (i, c) in vars.chars().enumerate() { this.var(i, c) }\n\n this }\n\n fn var(&mut self, i:usize, c:char) {\n\n let v = VID::var(i as u32); self.xs.push(v); self.xs.add_ref(XVHL{v, hi:XID_I, lo:XID_O}, 0, 1);\n\n self.name_var(v, c); }\n\n fn vids(&self)->String { self.xs.vids.iter().map(|v| *self.vc.get(v).unwrap()).collect() }\n\n fn name_var(&mut self, v:VID, c:char) { self.vc.insert(v, c); self.cv.insert(c, v); }\n\n fn pop(&mut self)->XID { self.ds.pop().expect(\"stack underflow\") }\n\n fn xid(&mut self, s:&str)->XID { self.run(s); self.pop() }\n", "file_path": "src/swap.rs", "rank": 66, "score": 61467.176892992655 }, { "content": "struct SwapWorker {\n\n /// the upward-moving variable (only used for tracing)\n\n vu:VID,\n\n /// the downward-moving variable (only used for tracing)\n\n vd:VID,\n\n /// row u is the row that moves upward.\n\n ru:XVHLRow,\n\n /// row d is the row that moves downward.\n\n rd:XVHLRow,\n\n /// external reference counts to change\n\n refs: HashMap<XID, i64>,\n\n /// track any nodes we've deleted (so scaffold can recycle them)\n\n dels: Vec<XID>,\n\n /// xids we've recycled ourselves\n\n mods: Vec<XID>,\n\n // reverse map for row u (so we can see if a branch from d points to row u)\n\n ru_map:HashMap<XID,XHiLo>,\n\n // reverse map for row d (to detect when we need a new ref from a umov to an existing node on row d)\n\n rd_map:HashMap<XID,XHiLo>,\n\n /// wip for nodes moving to row u.\n", "file_path": "src/swap.rs", "rank": 67, "score": 61463.2110498548 }, { "content": "fn plan_regroup(vids:&Vec<VID>, groups:&Vec<HashSet<VID>>)->HashMap<VID,usize> {\n\n // vids are arranged from bottom to top\n\n let mut plan = HashMap::new();\n\n\n\n // if only one group, there's nothing to do:\n\n if groups.len() == 1 && groups[0].len() == vids.len() { return plan }\n\n\n\n // TODO: check for complete partition (set(vids)==set(U/groups)\n\n let mut sum = 0; for x in groups.iter() { sum+= x.len() }\n\n assert_eq!(vids.len(), sum, \"vids and groups had different total size\");\n\n\n\n // map each variable to its group number:\n\n let mut dest:HashMap<VID,usize> = HashMap::new();\n\n for (i, g) in groups.iter().enumerate() {\n\n for &v in g { dest.insert(v, i); }}\n\n\n\n // start position of each group:\n\n let mut start:Vec<usize> = groups.iter().scan(0, |a,x| {\n\n *a+=x.len(); Some(*a)}).collect();\n\n start.insert(0, 0);\n", "file_path": "src/swap.rs", "rank": 68, "score": 61338.814925056824 }, { "content": "pub trait Worker<Q,R>:Send+Sync+Default where R:Debug {\n\n\n\n fn new(_wid:WID)->Self { Self::default() }\n\n\n\n /// Generic worker lifecycle implementation.\n\n /// Hopefully, you won't need to override this.\n\n /// The worker receives a stream of Option(Q) structs (queries),\n\n /// and returns an R (result) for each one.\n\n fn work_loop(&mut self, wid:WID, rx:&Receiver<Option<QMsg<Q>>>, tx:&Sender<RMsg<R>>) {\n\n // any phase can send a message if it wants:\n\n macro_rules! work_phase {\n\n [$qid:expr, $x:expr] => {\n\n let (qid, r) = ($qid, $x);\n\n // println!(\"\\x1b[32mSENDING WORK_PHASE msg: qid:{:?} for wid: {:?} -> r:{:?}\\x1b[0m\", &qid, wid, &r);\n\n if tx.send(RMsg{ wid, qid, r }).is_err() { self.on_work_send_err($qid) }}}\n\n // and now the actual worker lifecycle:\n\n work_phase![QID::INIT, self.work_init(wid)];\n\n let mut stream = rx.iter();\n\n while let Some(Some(QMsg{qid, q})) = stream.next() {\n\n if let QID::STEP(_) = qid { work_phase![qid.clone(), self.work_step(&qid, q)]; }\n", "file_path": "src/swarm.rs", "rank": 69, "score": 60786.62559452205 }, { "content": "#[test]\n\nfn ast_and(){\n\n let mut b = ASTBase::empty();\n\n let x0 = NID::var(0); let x1 = NID::var(1);\n\n let x01 = b.and(x0,x1);\n\n let x10 = b.and(x1,x0);\n\n assert_eq!(x01, x10, \"expect $0 & $1 == $1 & $0\"); }\n", "file_path": "src/ast.rs", "rank": 70, "score": 59457.05527605703 }, { "content": "def test_ite_scaffold(label, before, after):\n\n RUST_TESTS.append({\n\n 'label': label,\n\n 'setup': '\\n'.join(rust_scaffold_setup(**before)),\n", "file_path": "doc/scaffold_graph_tests.py", "rank": 71, "score": 58161.78129885775 }, { "content": "/// Functions common to all expression databases.\n\npub trait Base {\n\n /// Create a new instance of the `Base`.\n\n fn new()->Self where Self:Sized; // Sized so we can use trait objects.\n\n\n\n /// Return the value of node `n` when `v=1`.\n\n fn when_hi(&mut self, v:VID, n:NID)->NID;\n\n /// Return the value of node `n` when `v=0`.\n\n fn when_lo(&mut self, v:VID, n:NID)->NID;\n\n\n\n /// Return a `NID` representing the logical AND of `x` and `y`.\n\n fn and(&mut self, x:NID, y:NID)->NID;\n\n\n\n /// Return a `NID` representing the logical XOR of `x` and `y`.\n\n fn xor(&mut self, x:NID, y:NID)->NID;\n\n\n\n /// Return a `NID` representing the logical OR of `x` and `y`.\n\n fn or(&mut self, x:NID, y:NID)->NID;\n\n\n\n /// Assign a name to variable `v`, and return its `NID`.\n\n fn def(&mut self, s:String, v:VID)->NID;\n", "file_path": "src/base.rs", "rank": 72, "score": 56227.10925642206 }, { "content": "fn main() {\n\n let mut base = ASTBase::empty();\n\n let args = ::std::env::args().skip(1);\n\n if args.count() == 0 { repl(&mut base) }\n\n else { for arg in ::std::env::args().skip(1) { match arg.as_str() {\n\n // \"norms\" => { gen_norms(); },\n\n \"repl\" => { repl(&mut base); },\n\n _ => repl(&mut base) }}}}\n", "file_path": "examples/shell/bex-shell.rs", "rank": 73, "score": 56169.14431229862 }, { "content": "// TBit : for use outside the Base, by types such as X32, below.\n\npub trait TBit\n\n : Sized + Clone\n\n + std::ops::Not<Output=Self>\n\n + std::ops::BitAnd<Self,Output=Self>\n\n + std::ops::BitXor<Self,Output=Self> { }\n\n\n", "file_path": "src/int.rs", "rank": 74, "score": 54668.32498902366 }, { "content": "/// trait for visualization using GraphViz\n\npub trait GraphViz {\n\n fn write_dot(&self, n:NID, wr: &mut dyn std::fmt::Write);\n\n\n\n /// render to graphviz *.dot file\n\n fn save_dot(&self, n:NID, path:&str) {\n\n let mut s = String::new(); self.write_dot(n, &mut s);\n\n let mut txt = File::create(path).expect(\"couldn't create dot file\");\n\n txt.write_all(s.as_bytes()).expect(\"failed to write text to dot file\"); }\n\n\n\n /// call save_dot, use graphviz to convert to svg, and open result in firefox\n\n fn show_named(&self, n:NID, s:&str) {\n\n self.save_dot(n, format!(\"{}.dot\", s).as_str());\n\n let out = Command::new(\"dot\").args(&[\"-Tsvg\",format!(\"{}.dot\",s).as_str()])\n\n .output().expect(\"failed to run 'dot' command\");\n\n let mut svg = File::create(format!(\"{}.svg\",s).as_str()).expect(\"couldn't create svg\");\n\n svg.write_all(&out.stdout).expect(\"couldn't write svg\");\n\n Command::new(\"firefox\").args(&[format!(\"{}.svg\",s).as_str()])\n\n .spawn().expect(\"failed to launch firefox\"); }\n\n\n\n fn show(&self, n:NID) { self.show_named(n, \"+bdd\") }\n", "file_path": "src/base.rs", "rank": 75, "score": 54664.29934179021 }, { "content": "/// protocol used by solve.rs. These allow the base to prepare itself for different steps\n\n/// in a substitution solver.\n\npub trait SubSolver {\n\n /// Initialize the solver by constructing the node corresponding to the final\n\n /// virtual variable in the expression. Return its nid.\n\n fn init(&mut self, top: VID)->NID { NID::from_vid(top) }\n\n /// tell the implementation to perform a substitution step.\n\n /// context NIDs are passed in and out so the implementation\n\n /// itself doesn't have to remember it.\n\n fn subst(&mut self, ctx:NID, vid:VID, ops:&Ops)->NID;\n\n /// fetch a solution, (if one exists)\n\n fn get_one(&self, ctx:NID, nvars:usize)->Option<Reg> {\n\n println!(\"Warning: default SubSolver::get_one() calls get_all(). Override this!\");\n\n self.get_all(ctx, nvars).iter().next().cloned() }\n\n /// fetch all solutions\n\n fn get_all(&self, ctx:NID, nvars:usize)->HashSet<Reg>;\n\n // a status message for the progress report\n\n fn status(&self)->String { \"\".to_string() }\n\n /// Dump the current internal state for inspection by some external process.\n\n /// Generally this means writing to a graphviz (*.dot) file.\n\n /// The step number, status note, and a copy of the arguments to the\n\n /// previous subst(), and the result are provided, in case the dump format\n", "file_path": "src/solve.rs", "rank": 76, "score": 54664.06020951232 }, { "content": "#[derive(PartialEq, Debug)]\n\nenum ROW { U, D }\n\n\n", "file_path": "src/swap.rs", "rank": 77, "score": 54401.95319722312 }, { "content": "#[cfg(test)]\n\nfn check_sub(vids:&str, dst_s:&str, v:char, src_s:&str, goal:&str) {\n\n\n\n let mut dst = XSDebug::new(\"\");\n\n let mut src = XSDebug::new(\"\");\n\n let mut expected_order = \"\";\n\n\n\n // global map of all variables for this test\n\n let mut cv:HashMap<char,usize> = HashMap::new();\n\n let mut phase = 0;\n\n for (i,c) in vids.char_indices() {\n\n if c == '|' { phase += 1 }\n\n else { match phase {\n\n 0 => { cv.insert(c, i); },\n\n 1 => dst.var(*cv.get(&c).expect(\"bad entry in dst vars\"), c),\n\n 2 => src.var(*cv.get(&c).expect(\"bad entry in src vars\"), c),\n\n 3 => {\n\n let mut parts = vids.split('|');\n\n expected_order = (if c=='=' { parts.next() } else { parts.last() }).unwrap();\n\n break },\n\n _ => panic!(\"too many '|' chars encountered!\") }}}\n", "file_path": "src/test-swap.rs", "rank": 78, "score": 54182.21311678314 }, { "content": "fn readln()->String {\n\n let mut buf = String::new();\n\n print!(\"> \");\n\n io::stdout().flush() .expect(\"couldn't flush stdout.\");\n\n io::stdin().read_line(&mut buf) .expect(\"failed to read line.\");\n\n buf}\n\n\n", "file_path": "examples/shell/bex-shell.rs", "rank": 79, "score": 52104.47642261869 }, { "content": "def add_ite(g,n, v, hi, lo):\n\n if hi!=\"*\": g.edge(n+v, n+hi, style='solid', color=edge_color(v))\n", "file_path": "doc/scaffold_graph_tests.py", "rank": 80, "score": 50017.25242957145 }, { "content": "def ite_scaffold(label, before, after):\n\n test_ite_scaffold(label, before, after)\n", "file_path": "doc/scaffold_graph_tests.py", "rank": 81, "score": 50017.25242957145 }, { "content": "pub trait Progress<S:SubSolver> {\n\n fn on_start(&self, ctx:&DstNid) { println!(\"INITIAL ctx: {:?}\", ctx) }\n\n fn on_step(&mut self, src:&RawASTBase, dest: &mut S, step:usize, millis:u128, oldtop:DstNid, newtop:DstNid);\n\n fn on_done(&self, src:&RawASTBase, dest: &mut S, newtop:DstNid); }\n\n\n\npub struct ProgressReport<'a> {\n\n pub millis: u128,\n\n pub save_dot: bool,\n\n pub save_dest: bool,\n\n pub prefix: &'a str }\n\n\n\n/// these are wrappers so the type system can help us keep the src and dest nids separate\n\n#[derive(Clone, Copy, Debug, PartialEq)] pub struct SrcNid { pub n: NID }\n\n#[derive(Clone, Copy, Debug, PartialEq)] pub struct DstNid { pub n: NID }\n\n\n\n\f\n\nimpl<'a, S:SubSolver> Progress<S> for ProgressReport<'a> {\n\n fn on_step(&mut self, src:&RawASTBase, dest: &mut S, step:usize, millis:u128, oldtop:DstNid, newtop:DstNid) {\n\n self.millis += millis;\n\n let DstNid{ n: new } = newtop;\n", "file_path": "src/solve.rs", "rank": 82, "score": 49694.811857526234 }, { "content": "enum BexErr { NegVar, NegVir }\n\nimpl std::convert::From<BexErr> for PyErr {\n\n fn from(err: BexErr) -> PyErr {\n\n match err {\n\n BexErr::NegVar => PyException::new_err(\"var(i) expects i >= 0\"),\n\n BexErr::NegVir => PyException::new_err(\"vir(i) expects i >= 0\") }}}\n\n\n\n#[pymethods]\n\nimpl PyNID {\n\n #[staticmethod]\n\n fn var(i:i32)->PyResult<Self> { if i<0 { Err(BexErr::NegVar.into()) } else { Ok(PyNID{ nid:NID::var(i as u32)}) }}\n\n #[staticmethod]\n\n fn vir(i:i32)->PyResult<Self> { if i<0 { Err(BexErr::NegVir.into()) } else { Ok(PyNID{ nid:NID::vir(i as u32)}) }}}\n\n\n\n#[pyproto]\n\nimpl PyObjectProtocol for PyNID {\n\n fn __str__(&self) -> String { self.nid.to_string() }\n\n fn __repr__(&self) -> String { format!(\"<NID({:?})>\", self.nid).to_string() }}\n\n\n\n#[pyproto]\n", "file_path": "py/src/lib.rs", "rank": 83, "score": 49356.43381276814 }, { "content": "def draw_ite_scaffold(label, before, after):\n\n d = graphviz.Digraph()\n\n d.attr(label=f\"diagram {len(RUST_TESTS)-1}. {label}\")\n\n with d.subgraph(name=\"cluster_before\") as g:\n\n draw_scaffold(g,'b', 'before', 'zdua', **before)\n\n with d.subgraph(name=\"cluster_after\") as g:\n\n g.attr(label='after', pencolor='blue')\n\n draw_scaffold(g,'a', 'after', 'zuda', **after)\n\n # print(d.source)\n", "file_path": "doc/scaffold_graph_tests.py", "rank": 84, "score": 48022.94901614521 }, { "content": "#[allow(clippy::unreadable_literal)]\n\nfn factors()->Vec<(u64,u64)> {\n\n vec![\n\n (429400657, 143197215), (429319798, 143224185), (429192489, 143266669),\n\n (429092163, 143300166), (428929966, 143354354), (428759840, 143411235),\n\n (428350132, 143548405), (427995090, 143667485), (427896131, 143700711),\n\n (427891905, 143702130), (427713143, 143762190), (427662391, 143779251),\n\n (427545970, 143818402), (427435437, 143855593), (426797553, 144070597),\n\n (426655128, 144118690), (426531359, 144160510), (426328023, 144229267),\n\n (426239813, 144259115), (426222854, 144264855), (426079933, 144313246),\n\n (425565809, 144487590), (425100439, 144645765), (424961733, 144692977),\n\n (424871196, 144723810), (424832565, 144736970), (424808137, 144745293),\n\n (424765183, 144759930), (424752566, 144764230), (424604622, 144814670),\n\n (424302378, 144917826), (423948201, 145038894), (423417603, 145220647),\n\n (423238297, 145282170), (423045486, 145348385), (423013691, 145359310),\n\n (422631649, 145490709), (422600194, 145501538), (422138067, 145660823),\n\n (421954460, 145724205), (421628699, 145836795), (421510316, 145877754),\n\n (421470549, 145891518), (421182463, 145991307), (421139876, 146006070),\n\n (420837796, 146110874), (420612792, 146189035), (420288901, 146301694),\n\n (420261695, 146311165), (419962543, 146415387), (419803797, 146470753),\n\n (419699049, 146507309), (419647728, 146525226), (419138301, 146703315),\n", "file_path": "examples/solve/bdd-solve.rs", "rank": 85, "score": 45672.81894608001 }, { "content": "pub trait BInt<T:TBit> : Sized {\n\n /// the number of bits\n\n fn n() -> u32;\n\n fn i(&self) -> T;\n\n fn o(&self) -> T;\n\n fn zero() -> Self;\n\n fn new(&self, u:usize) -> Self;\n\n fn get(&self, i:u32) -> T;\n\n fn set(&mut self, i:u32, v:T);\n\n fn rotate_right(&self, y:u32) -> Self {\n\n let mut res = Self::zero();\n\n for i in 0..Self::n() { res.set(i, self.get((i+y) % Self::n())) }\n\n res}\n\n\n\n // TODO: this doesn't actually wrap! (should it??)\n\n fn wrapping_add(&self, y:Self) -> Self {\n\n let mut res = Self::zero(); let mut carry = self.o();\n\n for i in 0..Self::n() {\n\n let (a,b,c) = (self.get(i), y.get(i), carry);\n\n res.set(i, a.clone() ^ b.clone() ^ c.clone());\n", "file_path": "src/int.rs", "rank": 86, "score": 45556.3617099589 }, { "content": "#[pymodule]\n\nfn bex(py:Python, m:&PyModule)->PyResult<()> {\n\n m.add_class::<PyVID>()?;\n\n m.add_class::<PyNID>()?;\n\n m.add_class::<PyAST>()?;\n\n m.setattr(\"O\", PyNID{nid:O}.into_py(py))?;\n\n m.setattr(\"I\", PyNID{nid:I}.into_py(py))?;\n\n\n\n #[pyfn(m, \"var\")] fn var(_py:Python, i:i32)->PyResult<PyNID> { PyNID::var(i) }\n\n #[pyfn(m, \"vir\")] fn vir(_py:Python, i:i32)->PyResult<PyNID> { PyNID::vir(i) }\n\n\n\n Ok(())}\n", "file_path": "py/src/lib.rs", "rank": 87, "score": 42083.087766783676 }, { "content": "// TODO: how can i merge with mj() below?\n\nfn bitmaj<T:TBit>(x:T, y:T, z:T) -> T {\n\n (x.clone()&y.clone()) ^ (x&z.clone()) ^ (y&z) }\n\n\n\n\n\n// BaseBit implementation (u32 references into a Base)\n\npub type BaseRef = Rc<RefCell<ASTBase>>;\n\n\f\n\n// -- basebit --\n\n#[derive(Clone)]\n\npub struct BaseBit {pub base:BaseRef, pub n:NID}\n\n\n\nimpl BaseBit {\n\n /// perform an arbitrary operation using the base\n\n fn op<F:FnMut(&mut ASTBase)->NID>(&self, mut op:F)->BaseBit {\n\n let r = op(&mut self.base.borrow_mut());\n\n BaseBit{base:self.base.clone(), n:r} }}\n\n\n\nimpl std::cmp::PartialEq for BaseBit {\n\n fn eq(&self, other:&Self)->bool {\n\n self.base.as_ptr() == other.base.as_ptr() && self.n==other.n }}\n", "file_path": "src/int.rs", "rank": 88, "score": 40771.953506574544 }, { "content": "fn default_bitmask(_src:&RawASTBase, v:VID) -> u64 { v.bitmask() }\n\n\n", "file_path": "src/solve.rs", "rank": 89, "score": 39860.64662998408 }, { "content": "/// binary io for hashmap<String,NID> and typed vectors\n\nuse std::fs::File;\n\nuse std::io::BufReader;\n\nuse std::io::prelude::*;\n\nuse std::{collections::HashMap, hash::BuildHasher};\n\n\n\n\f\n\n// these functions treat typed slices as raw bytes, making them easier to read/write\n\n// https://stackoverflow.com/questions/28127165/how-to-convert-struct-to-u8\n\n\n\n// adapted from the above, to deal with a slice:\n\nunsafe fn slice_to_u8s<T: Sized>(p: &[T]) -> &[u8] {\n\n ::std::slice::from_raw_parts(\n\n (p.as_ptr()) as *const u8,\n\n ::std::mem::size_of::<T>() * p.len()) }\n\n\n\nunsafe fn u8s_to_slice<T: Sized>(p: &[u8]) -> &[T] {\n\n ::std::slice::from_raw_parts(\n\n (p.as_ptr()) as *const T,\n\n p.len() / ::std::mem::size_of::<T>()) }\n\n\n\n\f\n\n/// write the vector, as bytes, to a file at the specified path.\n", "file_path": "src/io.rs", "rank": 90, "score": 35721.171373986814 }, { "content": "///! (Var, Hi, Lo) triples\n\nuse std::collections::BinaryHeap;\n\nuse std::collections::HashSet;\n\nuse nid::{NID, IDX};\n\nuse vid::VID;\n\n\n\npub type VHLHashMap<K,V> = hashbrown::hash_map::HashMap<K,V>;\n\n\n\n\f\n\n/// Simple Hi/Lo pair stored internally when representing nodes.\n\n/// All nodes with the same branching variable go in the same array, so there's\n\n/// no point duplicating it.\n\n#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug, Serialize, Deserialize)]\n\npub struct HiLo {pub hi:NID, pub lo:NID}\n\n\n\nimpl HiLo {\n\n /// constructor\n\n pub fn new(hi:NID, lo:NID)->HiLo { HiLo { hi, lo } }\n\n\n\n /// apply the not() operator to both branches\n", "file_path": "src/vhl.rs", "rank": 91, "score": 35716.102562863525 }, { "content": " #[inline] pub fn invert(self)-> HiLo { HiLo{ hi: !self.hi, lo: !self.lo }}\n\n\n\n pub fn get_part(&self, which:HiLoPart)->NID {\n\n if which == HiLoPart::HiPart { self.hi } else { self.lo }} }\n\n\n\nimpl std::ops::Not for HiLo {\n\n type Output = HiLo;\n\n fn not(self)-> HiLo {HiLo { hi:!self.hi, lo: !self.lo }}}\n\n\n\n\f\n\n/// VHL (for when we really do need the variable)\n\n#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Debug, Serialize, Deserialize)]\n\npub struct VHL {pub v:VID, pub hi:NID, pub lo:NID}\n\n\n\nimpl VHL {\n\n pub fn new(v: VID, hi:NID, lo:NID)->VHL { VHL{ v, hi, lo } }\n\n pub fn hilo(&self)->HiLo { HiLo{ hi:self.hi, lo: self.lo } }}\n\n\n\nimpl std::ops::Not for VHL {\n\n type Output = VHL;\n", "file_path": "src/vhl.rs", "rank": 92, "score": 35712.60813677849 }, { "content": " fn not(self)->VHL { VHL { v:self.v, hi:!self.hi, lo: !self.lo }}}\n\n\n\n\f\n\n/// Enum for referring to the parts of a HiLo (for WIP).\n\n#[derive(PartialEq,Debug,Copy,Clone)]\n\npub enum HiLoPart { HiPart, LoPart }\n\n\n\n/// a deconstructed VHL (for WIP)\n\n#[derive(PartialEq,Debug,Copy,Clone)]\n\npub struct VHLParts{\n\n pub v:VID,\n\n pub hi:Option<NID>,\n\n pub lo:Option<NID>,\n\n pub invert:bool}\n\n\n\nimpl VHLParts {\n\n pub fn hilo(&self)->Option<HiLo> {\n\n if let (Some(hi), Some(lo)) = (self.hi, self.lo) { Some(HiLo{hi,lo}) }\n\n else { None }}}\n\n\n\n\f\n", "file_path": "src/vhl.rs", "rank": 93, "score": 35708.64148487781 }, { "content": "// and test_nano_bdd start taking minutes to run.\n\n// I can't currently think of a reason vindex[(vX,hilo)] shouldn't behave\n\n// exactly the same as vindex[(vY,hilo)] and thus == index[hilo], but I'm\n\n// obviously missing something. :/\n\n// It could be a bug in replace(), but that's a simple function.\n\n// More likely, it's something to do with the recent/stable dichotomy in BddSwarm,\n\n// or simply the fact that each worker has its own recent state and they're getting\n\n// out of sync.\n\n\f\n\n\n\nimpl HiLoCache {\n\n\n\n pub fn new()->Self {\n\n HiLoCache {\n\n hilos: vec![],\n\n index: VHLHashMap::default(),\n\n vindex: VHLHashMap::default()}}\n\n\n\n // TODO: ->Option<HiLo>, and then impl HiLoBase\n\n #[inline] pub fn get_hilo(&self, n:NID)->HiLo {\n", "file_path": "src/vhl.rs", "rank": 94, "score": 35707.59463091512 }, { "content": " assert!(!n.is_lit());\n\n let res = self.hilos[n.idx()];\n\n if n.is_inv() { res.invert() } else { res }}\n\n\n\n #[inline] pub fn get_node(&self, v:VID, hl0:HiLo)-> Option<NID> {\n\n let inv = hl0.lo.is_inv();\n\n let hl1 = if inv { hl0.invert() } else { hl0 };\n\n let to_nid = |&ix| NID::from_vid_idx(v, ix);\n\n let res = self.vindex.get(&(v, hl1)).map(to_nid);\n\n // let res = if res.is_none() { self.index.get(&hl1).map(to_nid) } else { res };\n\n if inv { res.map(|nid| !nid ) } else { res }}\n\n\n\n #[inline] pub fn insert(&mut self, v:VID, hl0:HiLo)->NID {\n\n let inv = hl0.lo.is_inv();\n\n let hilo = if inv { hl0.invert() } else { hl0 };\n\n let ix:IDX =\n\n if let Some(&ix) = self.index.get(&hilo) { ix }\n\n else {\n\n let ix = self.hilos.len() as IDX;\n\n self.hilos.push(hilo);\n\n self.index.insert(hilo, ix);\n\n self.vindex.insert((v,hilo), ix);\n\n ix };\n\n let res = NID::from_vid_idx(v, ix);\n\n if inv { !res } else { res } }}\n\n\n\nimpl Default for HiLoCache {\n\n fn default() -> Self { Self::new() }}\n", "file_path": "src/vhl.rs", "rank": 95, "score": 35698.52071205617 }, { "content": "\n\n pub fn send(&mut self, wid:WID, q:Q) {\n\n let qid = QID::STEP(self.nq); self.nq+=1;\n\n if self.get_worker(wid).send(Some(QMsg{ qid, q })).is_err() {\n\n panic!(\"couldn't send message to worker {:?}\", wid) }}\n\n\n\n /// pass in the swarm dispatch loop\n\n pub fn run<F,V>(&mut self, mut on_msg:F)->Option<V> where V:Debug, F:FnMut(WID, &QID, Option<R>)->SwarmCmd<Q,V> {\n\n let mut res = None;\n\n loop {\n\n let RMsg { wid, qid, r } = self.rx.recv().expect(\"failed to read RMsg from queue!\");\n\n // println!(\"Received RMSG:: wid:{:?}, qid:{:?}, r:{:?}\", wid, qid, &r );\n\n let cmd = on_msg(wid, &qid, r);\n\n // println!(\"-> cmd: {:?}\", cmd);\n\n match cmd {\n\n SwarmCmd::Pass => {},\n\n SwarmCmd::Halt => break,\n\n SwarmCmd::Kill(w) => { self.kill(w); if self.whs.is_empty() { break }},\n\n SwarmCmd::Send(q) => self.send(wid, q),\n\n SwarmCmd::Batch(wqs) => for (wid, q) in wqs { self.send(wid, q) },\n\n SwarmCmd::Panic(msg) => panic!(\"{}\", msg),\n\n SwarmCmd::Return(v) => { res = Some(v); break } }}\n\n while let Some(&w) = self.whs.keys().take(1).next() { self.kill(w); }\n\n while !self.threads.is_empty() { self.threads.pop().unwrap().join().unwrap() }\n\n res}}\n", "file_path": "src/swarm.rs", "rank": 96, "score": 35482.11567380967 }, { "content": " /// phantom reference to the Worker class. In practice, the workers are owned\n\n /// by their threads, so we don't actually touch them directly.\n\n _w: PhantomData<W>,\n\n /// query queue. query will be given to next available worker\n\n qq: VecDeque<(QID, Q)>,\n\n /// handles to the actual threads\n\n threads: Vec<thread::JoinHandle<()>> }\n\n\n\nimpl<Q,R,W> Swarm<Q,R,W> where Q:'static+Send+Debug, R:'static+Send+Debug, W:Default+Worker<Q, R> {\n\n\n\n pub fn new(num_workers:usize)->Self {\n\n let (me, rx) = channel();\n\n let n = if num_workers==0 { num_cpus::get() } else { num_workers };\n\n let mut this = Self { nq: 0, me, rx, whs:HashMap::new(), nw:0, qq:VecDeque::new(), _w:PhantomData, threads:vec![]};\n\n for _ in 0..n { this.spawn(); }\n\n this }\n\n\n\n fn spawn(&mut self)->WID {\n\n let wid = WID{ n: self.nw }; self.nw+=1;\n\n let me2 = self.me.clone();\n", "file_path": "src/swarm.rs", "rank": 97, "score": 35477.47150721613 }, { "content": "use std::{collections::VecDeque, marker::PhantomData, sync::mpsc::{Sender, Receiver, channel}, thread};\n\nuse std::fmt::Debug;\n\nuse hashbrown::HashMap;\n\n\n\n/// query id\n\n#[derive(Debug, Clone)]\n\npub enum QID { INIT, STEP(usize), DONE }\n\n\n\npub struct QMsg<Q> { qid:QID, q: Q }\n\n#[derive(Debug)]\n\npub struct RMsg<R> { wid: WID, qid:QID, r:Option<R> }\n\n\n\n/// worker id\n\n#[derive(Debug,PartialEq,Eq,Hash,Clone,Copy)]\n\npub struct WID { n:usize }\n\n\n", "file_path": "src/swarm.rs", "rank": 98, "score": 35475.755615696755 }, { "content": " else { panic!(\"Worker {:?} got unexpected qid instead of STEP: {:?}\", wid, qid)}}\n\n work_phase![QID::DONE, self.work_done()]; }\n\n\n\n /// What to do if a message send fails. By default, just print to stdout.\n\n fn on_work_send_err(&mut self, qid:QID) {\n\n println!(\"failed to send response for qid:{:?}\", qid); }\n\n\n\n /// Override this to implement your worker's query-handling logic.\n\n fn work_step(&mut self, _qid:&QID, _q:Q)->Option<R> { None }\n\n\n\n /// Override this if you need to send a message to the swarm before the worker starts.\n\n fn work_init(&mut self, _wid:WID)->Option<R> { None }\n\n\n\n /// Override this if you need to send a message to the swarm after the work loop finishes.\n\n fn work_done(&mut self)->Option<R> { None }}\n\n\n\n#[derive(Debug)]\n\npub enum SwarmCmd<Q:Debug,V:Debug> {\n\n Pass,\n\n Halt,\n", "file_path": "src/swarm.rs", "rank": 99, "score": 35474.64686057958 } ]
Rust
crates/tools/src/setup.rs
RetricSu/godwoken
88df30ffad824b3b3b2980d67ece79ca31ab0b47
use crate::deploy_genesis::deploy_genesis; use crate::deploy_scripts::deploy_scripts; use crate::generate_config::generate_config; use crate::prepare_scripts::{self, prepare_scripts, ScriptsBuildMode}; use crate::utils; use ckb_types::{ core::ScriptHashType, packed as ckb_packed, prelude::Builder as CKBBuilder, prelude::Pack as CKBPack, prelude::Unpack as CKBUnpack, }; use gw_types::prelude::Entity as GwEntity; use rand::Rng; use serde::Serialize; use serde_json::json; use std::fs; use std::{ collections::HashMap, path::{Path, PathBuf}, thread, time, }; pub const TRANSFER_CAPACITY: &str = "200000"; const MIN_WALLET_CAPACITY: f64 = 100000.0f64; #[derive(Debug)] pub struct NodeWalletInfo { pub testnet_address: String, pub lock_hash: String, pub lock_arg: String, pub block_assembler_code_hash: String, } #[allow(clippy::too_many_arguments)] pub fn setup( ckb_rpc_url: &str, indexer_url: &str, mode: ScriptsBuildMode, scripts_path: &Path, privkey_path: &Path, nodes_count: u8, server_url: &str, output_dir: &Path, ) { let prepare_scripts_result = utils::make_path(output_dir, vec!["scripts-deploy.json"]); prepare_scripts( mode, scripts_path, Path::new(prepare_scripts::REPOS_DIR_PATH), Path::new(prepare_scripts::SCRIPTS_DIR_PATH), &prepare_scripts_result, ) .expect("prepare scripts"); let scripts_deployment_result = utils::make_path(output_dir, vec!["scripts-deploy-result.json"]); deploy_scripts( privkey_path, ckb_rpc_url, &prepare_scripts_result, &scripts_deployment_result, ) .expect("deploy scripts"); let poa_config_path = utils::make_path(output_dir, vec!["poa-config.json"]); let rollup_config_path = utils::make_path(output_dir, vec!["rollup-config.json"]); let capacity = TRANSFER_CAPACITY.parse().expect("get capacity"); prepare_nodes_configs( privkey_path, capacity, nodes_count, output_dir, &poa_config_path, &rollup_config_path, ); let genesis_deploy_result = utils::make_path(output_dir, vec!["genesis-deploy-result.json"]); deploy_genesis( privkey_path, ckb_rpc_url, &scripts_deployment_result, &rollup_config_path, &poa_config_path, None, &genesis_deploy_result, false, ) .expect("deploy genesis"); (0..nodes_count).for_each(|index| { let node_name = format!("node{}", index + 1); let privkey_path = utils::make_path(output_dir, vec![&node_name, &"pk".to_owned()]); let output_file_path = utils::make_path(output_dir, vec![node_name, "config.toml".to_owned()]); generate_config( &genesis_deploy_result, &scripts_deployment_result, privkey_path.as_ref(), ckb_rpc_url.to_owned(), indexer_url.to_owned(), output_file_path.as_ref(), None, &prepare_scripts_result, server_url.to_string(), ) .expect("generate_config"); }); log::info!("Finish"); } fn prepare_nodes_configs( payer_privkey: &Path, capacity: u32, nodes_count: u8, output_dir: &Path, poa_config_path: &Path, rollup_config_path: &Path, ) { let nodes_privkeys = prepare_privkeys(output_dir, nodes_count); let nodes_info = check_wallets_info(nodes_privkeys, capacity, payer_privkey); generate_poa_config(&nodes_info, poa_config_path); generate_rollup_config(rollup_config_path); } fn prepare_privkeys(output_dir: &Path, nodes_count: u8) -> HashMap<String, PathBuf> { (0..nodes_count) .map(|index| { let node_name = format!("node{}", (index + 1).to_string()); let node_dir = utils::make_path(output_dir, vec![&node_name]); fs::create_dir_all(&node_dir).expect("create node dir"); let privkey_file = utils::make_path(&node_dir, vec!["pk"]); let privkey = fs::read_to_string(&privkey_file) .map(|s| s.trim().into()) .unwrap_or_else(|_| Vec::new()); if !privkey.starts_with(b"0x") || privkey.len() != 66 || hex::decode(&privkey[2..]).is_err() { log::info!("Generate privkey file..."); generate_privkey_file(&privkey_file); } (node_name, privkey_file) }) .collect() } fn check_wallets_info( nodes_privkeys: HashMap<String, PathBuf>, capacity: u32, payer_privkey_path: &Path, ) -> HashMap<String, NodeWalletInfo> { nodes_privkeys .into_iter() .map(|(node, privkey)| { let wallet_info = get_wallet_info(&privkey); let mut current_capacity = query_wallet_capacity(&wallet_info.testnet_address); log::info!("{}'s wallet capacity: {}", node, current_capacity); if current_capacity < MIN_WALLET_CAPACITY { log::info!("Start to transfer ckb, and it will take 30 seconds..."); transfer_ckb(&wallet_info, payer_privkey_path, capacity); thread::sleep(time::Duration::from_secs(30)); current_capacity = query_wallet_capacity(&wallet_info.testnet_address); assert!( current_capacity >= MIN_WALLET_CAPACITY, "wallet haven't received ckb, please try again" ); log::info!("{}'s wallet capacity: {}", node, current_capacity); } (node, wallet_info) }) .collect() } fn generate_poa_config(nodes_info: &HashMap<String, NodeWalletInfo>, poa_config_path: &Path) { let identities: Vec<&str> = nodes_info .iter() .map(|(_, node)| node.lock_hash.as_str()) .collect(); let poa_config = json!({ "poa_setup" : { "identity_size": 32, "round_interval_uses_seconds": true, "identities": identities, "aggregator_change_threshold": identities.len(), "round_intervals": 24, "subblocks_per_round": 1 } }); generate_json_file(&poa_config, poa_config_path); } fn generate_rollup_config(rollup_config_path: &Path) { let burn_lock_script = ckb_packed::Script::new_builder() .code_hash(CKBPack::pack(&[0u8; 32])) .hash_type(ScriptHashType::Data.into()) .build(); let burn_lock_script_hash: [u8; 32] = burn_lock_script.calc_script_hash().unpack(); let rollup_config = json!({ "l1_sudt_script_type_hash": "0x0000000000000000000000000000000000000000000000000000000000000000", "burn_lock_hash": format!("0x{}", hex::encode(burn_lock_script_hash)), "required_staking_capacity": 10000000000u64, "challenge_maturity_blocks": 5, "finality_blocks": 20, "reward_burn_rate": 50, "compatible_chain_id": 0, "allowed_eoa_type_hashes": [] }); generate_json_file(&rollup_config, rollup_config_path); log::info!("Finish"); } fn generate_privkey_file(privkey_file_path: &Path) { let key = rand::thread_rng().gen::<[u8; 32]>(); let privkey = format!("0x{}", hex::encode(key)); fs::write(&privkey_file_path, &privkey).expect("create pk file"); } pub fn get_wallet_info(privkey_path: &Path) -> NodeWalletInfo { let (stdout, stderr) = utils::run_in_output_mode( "ckb-cli", vec![ "util", "key-info", "--privkey-path", &privkey_path.display().to_string(), ], ) .expect("get key info"); NodeWalletInfo { testnet_address: look_after_in_line(&stdout, "testnet:"), lock_hash: look_after_in_line(&stdout, "lock_hash:"), lock_arg: look_after_in_line(&stdout, "lock_arg:"), block_assembler_code_hash: look_after_in_line(&stderr, "code_hash ="), } } fn query_wallet_capacity(address: &str) -> f64 { let (stdout, _) = utils::run_in_output_mode( "ckb-cli", vec!["wallet", "get-capacity", "--address", address], ) .expect("query wallet capacity"); look_after_in_line(&stdout, "total:") .split(' ') .collect::<Vec<&str>>()[0] .parse::<f64>() .expect("parse capacity") } fn transfer_ckb(node_wallet: &NodeWalletInfo, payer_privkey_path: &Path, capacity: u32) { utils::run( "ckb-cli", vec![ "wallet", "transfer", "--to-address", &node_wallet.testnet_address, "--capacity", &capacity.to_string(), "--tx-fee", "1", "--privkey-path", &payer_privkey_path.display().to_string(), ], ) .expect("transfer ckb"); } fn look_after_in_line(text: &str, key: &str) -> String { text.split(key).collect::<Vec<&str>>()[1] .split('\n') .collect::<Vec<&str>>()[0] .trim_matches(&['"', ' '][..]) .to_owned() } fn generate_json_file<T>(value: &T, json_file_path: &Path) where T: Serialize, { let output_content = serde_json::to_string_pretty(value).expect("serde json to string pretty"); let output_dir = json_file_path.parent().expect("get output dir"); fs::create_dir_all(&output_dir).expect("create output dir"); fs::write(json_file_path, output_content.as_bytes()).expect("generate json file"); }
use crate::deploy_genesis::deploy_genesis; use crate::deploy_scripts::deploy_scripts; use crate::generate_config::generate_config; use crate::prepare_scripts::{self, prepare_scripts, ScriptsBuildMode}; use crate::utils; use ckb_types::{ core::ScriptHashType, packed as ckb_packed, prelude::Builder as CKBBuilder, prelude::Pack as CKBPack, prelude::Unpack as CKBUnpack, }; use gw_types::prelude::Entity as GwEntity; use rand::Rng; use serde::Serialize; use serde_json::json; use std::fs; use std::{ collections::HashMap, path::{Path, PathBuf}, thread, time, }; pub const TRANSFER_CAPACITY: &str = "200000"; const MIN_WALLET_CAPACITY: f64 = 100000.0f64; #[derive(Debug)] pub struct NodeWalletInfo { pub testnet_address: String, pub lock_hash: String, pub lock_arg: String, pub block_assembler_code_hash: String, } #[allow(clippy::too_many_arguments)] pub fn setup( ckb_rpc_url: &str, indexer_url: &str, mode: ScriptsBuildMode, scripts_path: &Path, privkey_path: &Path, nodes_count: u8, server_url: &str, output_dir: &Path, ) { let prepare_scripts_result = utils::make_path(output_dir, vec!["scripts-deploy.json"]); prepare_scripts( mode, scripts_path, Path::new(prepare_scripts::REPOS_DIR_PATH), Path::new(prepare_scripts::SCRIPTS_DIR_PATH), &prepare_scripts_result, ) .expect("prepare scripts"); let scripts_deployment_result = utils::make_path(output_dir, vec!["scripts-deploy-result.json"]); deploy_scripts( privkey_path, ckb_rpc_url, &prepare_scripts_result, &scripts_deployment_result, ) .expect("deploy scripts"); let poa_config_path = utils::make_path(output_dir, vec!["poa-config.json"]); let rollup_config_path = utils::make_path(output_dir, vec!["rollup-config.json"]); let capacity = TRANSFER_CAPACITY.parse().expect("get capacity"); prepare_nodes_configs( privkey_path, capacity, nodes_count, output_dir, &poa_config_path, &rollup_config_path, ); let genesis_deploy_result = utils::make_path(output_dir, vec!["genesis-deploy-result.json"]); deploy_genesis( privkey_path, ckb_rpc_url, &scripts_deployment_result, &rollup_config_path, &poa_config_path, None, &genesis_deploy_result, false, ) .expect("deploy genesis"); (0..nodes_count).for_each(|index| { let node_name = format!("node{}", index + 1); let privkey_path = utils::make_path(output_dir, vec![&node_name, &"pk".to_owned()]); let output_file_path = utils::make_path(output_dir, vec![node_name, "config.toml".to_owned()]); generate_config( &genesis_deploy_result, &scripts_deployment_result, privkey_path.as_ref(), ckb_rpc_url.to_owned(), indexer_url.to_owned(), output_file_path.as_ref(), None, &prepare_scripts_result, server_url.to_string(), ) .expect("generate_config"); }); log::info!("Finish"); } fn prepare_nodes_configs( payer_privkey: &Path, capacity: u32, nodes_count: u8, output_dir: &Path, poa_config_path: &Path, rollup_config_path: &Path, ) { let nodes_privkeys = prepare_privkeys(output_dir, nodes_count); let nodes_info = check_wallets_info(nodes_privkeys, capacity, payer_privkey); generate_poa_config(&nodes_info, poa_config_path); generate_rollup_config(rollup_config_path); } fn prepare_privkeys(output_dir: &Path, nodes_count: u8) -> HashMap<String, PathBuf> { (0..nodes_count) .map(|index| { let node_name = format!("node{}", (index + 1).to_string()); let node_dir = utils::make_path(output_dir, vec![&node_name]); fs::create_dir_all(&node_dir).expect("create node dir"); let privkey_file = u
) } fn transfer_ckb(node_wallet: &NodeWalletInfo, payer_privkey_path: &Path, capacity: u32) { utils::run( "ckb-cli", vec![ "wallet", "transfer", "--to-address", &node_wallet.testnet_address, "--capacity", &capacity.to_string(), "--tx-fee", "1", "--privkey-path", &payer_privkey_path.display().to_string(), ], ) .expect("transfer ckb"); } fn look_after_in_line(text: &str, key: &str) -> String { text.split(key).collect::<Vec<&str>>()[1] .split('\n') .collect::<Vec<&str>>()[0] .trim_matches(&['"', ' '][..]) .to_owned() } fn generate_json_file<T>(value: &T, json_file_path: &Path) where T: Serialize, { let output_content = serde_json::to_string_pretty(value).expect("serde json to string pretty"); let output_dir = json_file_path.parent().expect("get output dir"); fs::create_dir_all(&output_dir).expect("create output dir"); fs::write(json_file_path, output_content.as_bytes()).expect("generate json file"); }
tils::make_path(&node_dir, vec!["pk"]); let privkey = fs::read_to_string(&privkey_file) .map(|s| s.trim().into()) .unwrap_or_else(|_| Vec::new()); if !privkey.starts_with(b"0x") || privkey.len() != 66 || hex::decode(&privkey[2..]).is_err() { log::info!("Generate privkey file..."); generate_privkey_file(&privkey_file); } (node_name, privkey_file) }) .collect() } fn check_wallets_info( nodes_privkeys: HashMap<String, PathBuf>, capacity: u32, payer_privkey_path: &Path, ) -> HashMap<String, NodeWalletInfo> { nodes_privkeys .into_iter() .map(|(node, privkey)| { let wallet_info = get_wallet_info(&privkey); let mut current_capacity = query_wallet_capacity(&wallet_info.testnet_address); log::info!("{}'s wallet capacity: {}", node, current_capacity); if current_capacity < MIN_WALLET_CAPACITY { log::info!("Start to transfer ckb, and it will take 30 seconds..."); transfer_ckb(&wallet_info, payer_privkey_path, capacity); thread::sleep(time::Duration::from_secs(30)); current_capacity = query_wallet_capacity(&wallet_info.testnet_address); assert!( current_capacity >= MIN_WALLET_CAPACITY, "wallet haven't received ckb, please try again" ); log::info!("{}'s wallet capacity: {}", node, current_capacity); } (node, wallet_info) }) .collect() } fn generate_poa_config(nodes_info: &HashMap<String, NodeWalletInfo>, poa_config_path: &Path) { let identities: Vec<&str> = nodes_info .iter() .map(|(_, node)| node.lock_hash.as_str()) .collect(); let poa_config = json!({ "poa_setup" : { "identity_size": 32, "round_interval_uses_seconds": true, "identities": identities, "aggregator_change_threshold": identities.len(), "round_intervals": 24, "subblocks_per_round": 1 } }); generate_json_file(&poa_config, poa_config_path); } fn generate_rollup_config(rollup_config_path: &Path) { let burn_lock_script = ckb_packed::Script::new_builder() .code_hash(CKBPack::pack(&[0u8; 32])) .hash_type(ScriptHashType::Data.into()) .build(); let burn_lock_script_hash: [u8; 32] = burn_lock_script.calc_script_hash().unpack(); let rollup_config = json!({ "l1_sudt_script_type_hash": "0x0000000000000000000000000000000000000000000000000000000000000000", "burn_lock_hash": format!("0x{}", hex::encode(burn_lock_script_hash)), "required_staking_capacity": 10000000000u64, "challenge_maturity_blocks": 5, "finality_blocks": 20, "reward_burn_rate": 50, "compatible_chain_id": 0, "allowed_eoa_type_hashes": [] }); generate_json_file(&rollup_config, rollup_config_path); log::info!("Finish"); } fn generate_privkey_file(privkey_file_path: &Path) { let key = rand::thread_rng().gen::<[u8; 32]>(); let privkey = format!("0x{}", hex::encode(key)); fs::write(&privkey_file_path, &privkey).expect("create pk file"); } pub fn get_wallet_info(privkey_path: &Path) -> NodeWalletInfo { let (stdout, stderr) = utils::run_in_output_mode( "ckb-cli", vec![ "util", "key-info", "--privkey-path", &privkey_path.display().to_string(), ], ) .expect("get key info"); NodeWalletInfo { testnet_address: look_after_in_line(&stdout, "testnet:"), lock_hash: look_after_in_line(&stdout, "lock_hash:"), lock_arg: look_after_in_line(&stdout, "lock_arg:"), block_assembler_code_hash: look_after_in_line(&stderr, "code_hash ="), } } fn query_wallet_capacity(address: &str) -> f64 { let (stdout, _) = utils::run_in_output_mode( "ckb-cli", vec!["wallet", "get-capacity", "--address", address], ) .expect("query wallet capacity"); look_after_in_line(&stdout, "total:") .split(' ') .collect::<Vec<&str>>()[0] .parse::<f64>() .expect("parse capacity"
random
[ { "content": "pub fn hex(raw: &[u8]) -> Result<String> {\n\n Ok(format!(\"0x{}\", faster_hex::hex_string(raw)?))\n\n}\n", "file_path": "crates/web3-indexer/src/helper.rs", "rank": 4, "score": 274876.3472011481 }, { "content": "fn prepare_scripts_in_copy_mode(prebuild_image: &PathBuf, scripts_dir: &Path) {\n\n log::info!(\"Copy scritps from prebuild image...\");\n\n let dummy = \"dummy\";\n\n utils::run(\n\n \"docker\",\n\n vec![\n\n \"create\",\n\n \"-ti\",\n\n \"--name\",\n\n dummy,\n\n &prebuild_image.display().to_string(),\n\n \"bash\",\n\n ],\n\n )\n\n .expect(\"docker create container\");\n\n let src_path_container = format!(\"{}:/scripts/.\", dummy);\n\n utils::run(\n\n \"docker\",\n\n vec![\n\n \"cp\",\n\n &src_path_container,\n\n &scripts_dir.display().to_string(),\n\n ],\n\n )\n\n .expect(\"docker cp files\");\n\n utils::run(\"docker\", vec![\"rm\", \"-f\", dummy]).expect(\"docker rm container\");\n\n}\n\n\n", "file_path": "crates/tools/src/prepare_scripts.rs", "rank": 6, "score": 268912.0181115135 }, { "content": "pub fn account_id_to_eth_address(account_script_hash: H256, id: u32) -> [u8; 20] {\n\n let mut data = [0u8; 20];\n\n data[..16].copy_from_slice(&account_script_hash.as_slice()[0..16]);\n\n data[16..20].copy_from_slice(&id.to_le_bytes()[..]);\n\n data\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum GwLog {\n\n SudtTransfer {\n\n sudt_id: u32,\n\n from_address: [u8; 20],\n\n to_address: [u8; 20],\n\n amount: u128,\n\n },\n\n SudtPayFee {\n\n sudt_id: u32,\n\n from_address: [u8; 20],\n\n block_producer_address: [u8; 20],\n\n amount: u128,\n", "file_path": "crates/web3-indexer/src/helper.rs", "rank": 7, "score": 266873.52961551433 }, { "content": "pub fn run_in_output_mode<I, S>(bin: &str, args: I) -> Result<(String, String), String>\n\nwhere\n\n I: IntoIterator<Item = S> + std::fmt::Debug,\n\n S: AsRef<OsStr>,\n\n{\n\n log::debug!(\"[Execute]: {} {:?}\", bin, args);\n\n let init_output = Command::new(bin.to_owned())\n\n .env(\"RUST_BACKTRACE\", \"full\")\n\n .args(args)\n\n .output()\n\n .expect(\"Run command failed\");\n\n\n\n if !init_output.status.success() {\n\n Err(format!(\n\n \"{}\",\n\n String::from_utf8_lossy(init_output.stderr.as_slice())\n\n ))\n\n } else {\n\n let stdout = String::from_utf8_lossy(init_output.stdout.as_slice()).to_string();\n\n let stderr = String::from_utf8_lossy(init_output.stderr.as_slice()).to_string();\n\n log::debug!(\"stdout: {}\", stdout);\n\n log::debug!(\"stderr: {}\", stderr);\n\n Ok((stdout, stderr))\n\n }\n\n}\n\n\n", "file_path": "crates/tools/src/utils.rs", "rank": 8, "score": 260231.7873898583 }, { "content": "pub fn get_balance(godwoken_rpc_url: &str, account: &str, sudt_id: u32) -> Result<(), String> {\n\n let mut godwoken_rpc_client = GodwokenRpcClient::new(godwoken_rpc_url);\n\n let short_address = parse_account_short_address(&mut godwoken_rpc_client, account)?;\n\n let addr = JsonBytes::from_bytes(short_address);\n\n let balance = godwoken_rpc_client.get_balance(addr, sudt_id)?;\n\n log::info!(\"Balance: {}\", balance);\n\n\n\n Ok(())\n\n}\n", "file_path": "crates/tools/src/get_balance.rs", "rank": 9, "score": 259743.5535452877 }, { "content": "pub fn read_privkey(privkey_path: &Path) -> Result<H256, String> {\n\n let privkey_string = fs::read_to_string(privkey_path)\n\n .map_err(|err| err.to_string())?\n\n .split_whitespace()\n\n .next()\n\n .map(ToOwned::to_owned)\n\n .ok_or_else(|| \"Privkey file is empty\".to_string())?;\n\n let privkey = H256::from_str(&privkey_string.trim()[2..]).map_err(|err| err.to_string())?;\n\n Ok(privkey)\n\n}\n", "file_path": "crates/tools/src/account.rs", "rank": 10, "score": 257902.77413810216 }, { "content": "fn build_godwoken_scripts(repos_dir: &Path, repo_name: &str) {\n\n let repo_dir = utils::make_path(repos_dir, vec![repo_name])\n\n .display()\n\n .to_string();\n\n let target_dir = format!(\"{}/c\", repo_dir);\n\n utils::run(\"make\", vec![\"-C\", &target_dir]).expect(\"run make\");\n\n utils::run_in_dir(\n\n \"capsule\",\n\n vec![\"build\", \"--release\", \"--debug-output\"],\n\n &repo_dir,\n\n )\n\n .expect(\"run capsule build\");\n\n}\n\n\n", "file_path": "crates/tools/src/prepare_scripts.rs", "rank": 11, "score": 256596.79946835147 }, { "content": "fn build_clerkb(repos_dir: &Path, repo_name: &str) {\n\n let target_dir = utils::make_path(repos_dir, vec![repo_name])\n\n .display()\n\n .to_string();\n\n utils::run(\"yarn\", vec![\"--cwd\", &target_dir]).expect(\"run yarn\");\n\n utils::run(\"make\", vec![\"-C\", &target_dir, \"all-via-docker\"]).expect(\"run make\");\n\n}\n\n\n", "file_path": "crates/tools/src/prepare_scripts.rs", "rank": 12, "score": 247814.46492950886 }, { "content": "fn check_scripts(target_dir: &Path, scripts_info: &HashMap<String, ScriptsInfo>) {\n\n scripts_info.iter().for_each(|(_, v)| {\n\n let target_path = v.target_script_path(target_dir);\n\n assert!(\n\n target_path.exists(),\n\n \"script does not exist: {:?}\",\n\n target_path\n\n );\n\n });\n\n}\n\n\n", "file_path": "crates/tools/src/prepare_scripts.rs", "rank": 13, "score": 246045.3598519048 }, { "content": "fn build_godwoken_polyjuice(repos_dir: &Path, repo_name: &str) {\n\n let target_dir = utils::make_path(repos_dir, vec![repo_name])\n\n .display()\n\n .to_string();\n\n utils::run(\"make\", vec![\"-C\", &target_dir, \"all-via-docker\"]).expect(\"run make\");\n\n}\n\n\n", "file_path": "crates/tools/src/prepare_scripts.rs", "rank": 14, "score": 242981.22404522845 }, { "content": "pub fn serialize_poa_setup(setup: &PoASetup) -> Bytes {\n\n let mut buffer = BytesMut::new();\n\n if setup.round_interval_uses_seconds {\n\n buffer.extend_from_slice(&[1]);\n\n } else {\n\n buffer.extend_from_slice(&[0]);\n\n }\n\n if setup.identities.len() > 255 {\n\n panic!(\"Too many identities!\");\n\n }\n\n buffer.extend_from_slice(&[\n\n setup.identity_size,\n\n setup.identities.len() as u8,\n\n setup.aggregator_change_threshold,\n\n ]);\n\n buffer.extend_from_slice(&setup.round_intervals.to_le_bytes()[..]);\n\n buffer.extend_from_slice(&setup.subblocks_per_round.to_le_bytes()[..]);\n\n for identity in &setup.identities {\n\n if identity.len() < setup.identity_size as usize {\n\n panic!(\"Invalid identity!\");\n", "file_path": "crates/tools/src/deploy_genesis.rs", "rank": 17, "score": 232576.62288524513 }, { "content": "pub fn build_sudt_key(key_flag: u32, short_address: &[u8]) -> Vec<u8> {\n\n let mut key = Vec::with_capacity(short_address.len() + 8);\n\n key.extend(&key_flag.to_le_bytes());\n\n key.extend(&(short_address.len() as u32).to_le_bytes());\n\n key.extend(short_address);\n\n key\n\n}\n\n\n", "file_path": "crates/common/src/state.rs", "rank": 18, "score": 230539.98934392442 }, { "content": "fn parse_capacity(capacity: &str) -> Result<u64, String> {\n\n let human_capacity = HumanCapacity::from_str(capacity)?;\n\n Ok(human_capacity.into())\n\n}\n", "file_path": "crates/tools/src/withdraw.rs", "rank": 19, "score": 229159.3087661165 }, { "content": "pub fn make_path<P: AsRef<Path>>(parent_dir_path: &Path, paths: Vec<P>) -> PathBuf {\n\n let mut target = PathBuf::from(parent_dir_path);\n\n for p in paths {\n\n target.push(p);\n\n }\n\n target\n\n}\n\n\n", "file_path": "crates/tools/src/utils.rs", "rank": 20, "score": 226838.9164496936 }, { "content": "// Read config.toml\n\npub fn read_config<P: AsRef<Path>>(path: P) -> Result<Config, String> {\n\n let content = fs::read(&path).map_err(|err| err.to_string())?;\n\n let config = toml::from_slice(&content).map_err(|err| err.to_string())?;\n\n Ok(config)\n\n}\n\n\n", "file_path": "crates/tools/src/utils.rs", "rank": 21, "score": 224816.21964996247 }, { "content": "pub fn build_account_key(id: u32, key: &[u8]) -> H256 {\n\n let mut raw_key = [0u8; 32];\n\n let mut hasher = new_blake2b();\n\n hasher.update(&id.to_le_bytes());\n\n hasher.update(&[GW_ACCOUNT_KV_TYPE]);\n\n hasher.update(key);\n\n hasher.finalize(&mut raw_key);\n\n raw_key.into()\n\n}\n\n\n", "file_path": "crates/common/src/state.rs", "rank": 22, "score": 223055.64238608518 }, { "content": "pub fn setup_chain(rollup_type_script: Script) -> Chain {\n\n let mut account_lock_manage = AccountLockManage::default();\n\n let rollup_config = RollupConfig::new_builder()\n\n .allowed_eoa_type_hashes(vec![ALWAYS_SUCCESS_CODE_HASH.clone()].pack())\n\n .finality_blocks(6.pack())\n\n .build();\n\n account_lock_manage.register_lock_algorithm(\n\n ALWAYS_SUCCESS_CODE_HASH.clone().into(),\n\n Box::new(AlwaysSuccess),\n\n );\n\n setup_chain_with_account_lock_manage(rollup_type_script, rollup_config, account_lock_manage)\n\n}\n\n\n", "file_path": "crates/tests/src/testing_tool/chain.rs", "rank": 23, "score": 221511.38572100317 }, { "content": "/// NOTE: the length `20` is a hard-coded value, may be `16` for some LockAlgorithm.\n\npub fn to_short_address(script_hash: &H256) -> &[u8] {\n\n &script_hash.as_slice()[0..20]\n\n}\n\n\n\npub struct PrepareWithdrawalRecord {\n\n pub withdrawal_lock_hash: H256,\n\n pub amount: u128,\n\n pub block_number: u64,\n\n}\n\n\n", "file_path": "crates/common/src/state.rs", "rank": 24, "score": 220003.0705574443 }, { "content": "pub fn build_account_field_key(id: u32, type_: u8) -> H256 {\n\n let mut key: [u8; 32] = H256::zero().into();\n\n key[..size_of::<u32>()].copy_from_slice(&id.to_le_bytes());\n\n key[size_of::<u32>()] = type_;\n\n key.into()\n\n}\n\n\n", "file_path": "crates/common/src/state.rs", "rank": 25, "score": 219193.1352240413 }, { "content": "pub fn build_short_script_hash_to_script_hash_key(short_script_hash: &[u8]) -> H256 {\n\n let mut key: [u8; 32] = H256::zero().into();\n\n let mut hasher = new_blake2b();\n\n hasher.update(&GW_NON_ACCOUNT_PLACEHOLDER);\n\n hasher.update(&[GW_SHORT_SCRIPT_HASH_TO_SCRIPT_HASH_TYPE]);\n\n let len = short_script_hash.len() as u32;\n\n hasher.update(&len.to_le_bytes());\n\n hasher.update(&short_script_hash);\n\n hasher.finalize(&mut key);\n\n key.into()\n\n}\n\n\n", "file_path": "crates/common/src/state.rs", "rank": 26, "score": 218513.67682403332 }, { "content": "pub fn run_in_dir<I, S>(bin: &str, args: I, target_dir: &str) -> Result<()>\n\nwhere\n\n I: IntoIterator<Item = S> + std::fmt::Debug,\n\n S: AsRef<OsStr>,\n\n{\n\n let working_dir = env::current_dir().expect(\"get working dir\");\n\n env::set_current_dir(&target_dir).expect(\"set target dir\");\n\n let result = run(bin, args);\n\n env::set_current_dir(&working_dir).expect(\"set working dir\");\n\n result\n\n}\n\n\n", "file_path": "crates/tools/src/utils.rs", "rank": 27, "score": 218111.18000598255 }, { "content": "pub fn build_script_hash_to_account_id_key(script_hash: &[u8]) -> H256 {\n\n let mut key: [u8; 32] = H256::zero().into();\n\n let mut hasher = new_blake2b();\n\n hasher.update(&GW_NON_ACCOUNT_PLACEHOLDER);\n\n hasher.update(&[GW_SCRIPT_HASH_TO_ID_TYPE]);\n\n hasher.update(script_hash);\n\n hasher.finalize(&mut key);\n\n key.into()\n\n}\n\n\n", "file_path": "crates/common/src/state.rs", "rank": 28, "score": 217704.874517861 }, { "content": "fn insert_to_script_column(db: &Store, block_number: u64, tx_index: u32, key: &[u8], value: &[u8]) {\n\n insert_to_state_db(db, COLUMN_SCRIPT, block_number, tx_index, key, value);\n\n}\n\n\n", "file_path": "crates/store/src/tests/transaction_clear_block_state.rs", "rank": 30, "score": 208628.41556479497 }, { "content": "pub fn eth_sign(msg: &H256, privkey: H256) -> Result<[u8; 65], String> {\n\n let mut signature = sign_message(msg, privkey)?;\n\n let v = &mut signature[64];\n\n if *v >= 27 {\n\n *v -= 27;\n\n }\n\n Ok(signature)\n\n}\n\n\n", "file_path": "crates/tools/src/account.rs", "rank": 31, "score": 205062.2026278049 }, { "content": "fn run_git_checkout(repo_dir: &str, commit: &str) -> Result<()> {\n\n utils::run(\"git\", vec![\"-C\", repo_dir, \"fetch\"])?;\n\n utils::run(\"git\", vec![\"-C\", repo_dir, \"checkout\", commit])?;\n\n utils::run(\n\n \"git\",\n\n vec![\"-C\", &repo_dir, \"submodule\", \"update\", \"--recursive\"],\n\n )\n\n}\n", "file_path": "crates/tools/src/prepare_scripts.rs", "rank": 32, "score": 204721.22253768193 }, { "content": "pub fn init_genesis(\n\n store: &Store,\n\n config: &GenesisConfig,\n\n genesis_committed_info: L2BlockCommittedInfo,\n\n secp_data: Bytes,\n\n) -> Result<()> {\n\n let rollup_script_hash: H256 = {\n\n let rollup_script_hash: [u8; 32] = config.rollup_type_hash.clone().into();\n\n rollup_script_hash.into()\n\n };\n\n if store.has_genesis()? {\n\n let chain_id = store.get_chain_id()?;\n\n if chain_id == rollup_script_hash {\n\n return Ok(());\n\n } else {\n\n panic!(\n\n \"The store is already initialized by rollup_type_hash: 0x{}!\",\n\n hex::encode(chain_id.as_slice())\n\n );\n\n }\n", "file_path": "crates/generator/src/genesis.rs", "rank": 33, "score": 200746.26898184727 }, { "content": "fn run_pull_code(mut repo_url: Url, is_recursive: bool, repos_dir: &Path, repo_name: &str) {\n\n let commit = repo_url\n\n .fragment()\n\n .expect(\"valid branch, tag, or commit\")\n\n .to_owned();\n\n repo_url.set_fragment(None);\n\n let target_dir = utils::make_path(repos_dir, vec![repo_name]);\n\n if target_dir.exists() {\n\n if run_git_checkout(&target_dir.display().to_string(), &commit).is_ok() {\n\n return;\n\n }\n\n log::info!(\"Run git checkout failed, the repo will re-init...\");\n\n fs::remove_dir_all(&target_dir).expect(\"clean repo dir\");\n\n }\n\n fs::create_dir_all(&target_dir).expect(\"create repo dir\");\n\n run_git_clone(repo_url, is_recursive, &target_dir.display().to_string())\n\n .expect(\"run git clone\");\n\n run_git_checkout(&target_dir.display().to_string(), &commit).expect(\"run git checkout\");\n\n}\n\n\n", "file_path": "crates/tools/src/prepare_scripts.rs", "rank": 34, "score": 198321.80763958616 }, { "content": "pub fn deploy_scripts(\n\n privkey_path: &Path,\n\n ckb_rpc_url: &str,\n\n input_path: &Path,\n\n output_path: &Path,\n\n) -> Result<(), String> {\n\n if let Err(err) = run_cmd(vec![\"--version\"]) {\n\n return Err(format!(\n\n \"Please install ckb-cli (cargo install ckb-cli) first: {}\",\n\n err\n\n ));\n\n }\n\n\n\n let input = fs::read_to_string(input_path).map_err(|err| err.to_string())?;\n\n let deployment_index: DeploymentIndex =\n\n serde_json::from_str(input.as_str()).map_err(|err| err.to_string())?;\n\n\n\n let mut rpc_client = HttpRpcClient::new(ckb_rpc_url.to_string());\n\n let network_type = get_network_type(&mut rpc_client)?;\n\n let target_lock = packed::Script::from(deployment_index.lock);\n", "file_path": "crates/tools/src/deploy_scripts.rs", "rank": 35, "score": 196783.77476368006 }, { "content": "pub fn prepare_scripts(\n\n mode: ScriptsBuildMode,\n\n input_path: &Path,\n\n repos_dir: &Path,\n\n scripts_dir: &Path,\n\n output_path: &Path,\n\n) -> Result<()> {\n\n let scripts_build_config = read_script_build_config(input_path);\n\n match mode {\n\n ScriptsBuildMode::Build => {\n\n prepare_scripts_in_build_mode(&scripts_build_config, repos_dir, scripts_dir);\n\n }\n\n ScriptsBuildMode::Copy => {\n\n prepare_scripts_in_copy_mode(&scripts_build_config.prebuild_image, scripts_dir);\n\n }\n\n }\n\n check_scripts(&scripts_dir, &scripts_build_config.scripts);\n\n generate_script_deploy_config(scripts_dir, &scripts_build_config.scripts, output_path)\n\n}\n\n\n", "file_path": "crates/tools/src/prepare_scripts.rs", "rank": 36, "score": 196783.77476368006 }, { "content": "/// build genesis from store\n\n/// This function initialize db to genesis state\n\npub fn build_genesis_from_store(\n\n db: StoreTransaction,\n\n config: &GenesisConfig,\n\n secp_data: Bytes,\n\n) -> Result<(StoreTransaction, GenesisWithGlobalState)> {\n\n let rollup_context = RollupContext {\n\n rollup_script_hash: {\n\n let rollup_script_hash: [u8; 32] = config.rollup_type_hash.clone().into();\n\n rollup_script_hash.into()\n\n },\n\n rollup_config: config.rollup_config.clone().into(),\n\n };\n\n // initialize store\n\n db.set_account_smt_root(H256::zero())?;\n\n db.set_block_smt_root(H256::zero())?;\n\n db.set_reverted_block_smt_root(H256::zero())?;\n\n db.set_account_count(0)?;\n\n let state_db =\n\n StateDBTransaction::from_checkpoint(&db, CheckPoint::from_genesis(), StateDBMode::Genesis)?;\n\n let mut tree = state_db.account_state_tree()?;\n", "file_path": "crates/generator/src/genesis.rs", "rank": 37, "score": 196782.93793621124 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn deploy_genesis(\n\n privkey_path: &Path,\n\n ckb_rpc_url: &str,\n\n deployment_result_path: &Path,\n\n user_rollup_config_path: &Path,\n\n poa_config_path: &Path,\n\n timestamp: Option<u64>,\n\n output_path: &Path,\n\n skip_config_check: bool,\n\n) -> Result<(), String> {\n\n let deployment_result_string =\n\n std::fs::read_to_string(deployment_result_path).map_err(|err| err.to_string())?;\n\n let deployment_result: ScriptsDeploymentResult =\n\n serde_json::from_str(&deployment_result_string).map_err(|err| err.to_string())?;\n\n let user_rollup_config_string =\n\n std::fs::read_to_string(user_rollup_config_path).map_err(|err| err.to_string())?;\n\n let user_rollup_config: UserRollupConfig =\n\n serde_json::from_str(&user_rollup_config_string).map_err(|err| err.to_string())?;\n\n let poa_config_string =\n\n std::fs::read_to_string(poa_config_path).map_err(|err| err.to_string())?;\n", "file_path": "crates/tools/src/deploy_genesis.rs", "rank": 38, "score": 196776.52932949195 }, { "content": "fn run_git_clone(repo_url: Url, is_recursive: bool, path: &str) -> Result<()> {\n\n let mut args = vec![\"clone\", repo_url.as_str(), path];\n\n if is_recursive {\n\n args.push(\"--recursive\");\n\n }\n\n utils::run(\"git\", args)\n\n}\n\n\n", "file_path": "crates/tools/src/prepare_scripts.rs", "rank": 39, "score": 187620.4422452919 }, { "content": "pub fn run_cmd<I, S>(args: I) -> Result<String, String>\n\nwhere\n\n I: IntoIterator<Item = S> + std::fmt::Debug,\n\n S: AsRef<OsStr>,\n\n{\n\n let bin = \"ckb-cli\";\n\n log::debug!(\"[Execute]: {} {:?}\", bin, args);\n\n let init_output = Command::new(bin.to_owned())\n\n .env(\"RUST_BACKTRACE\", \"full\")\n\n .args(args)\n\n .output()\n\n .expect(\"Run command failed\");\n\n\n\n if !init_output.status.success() {\n\n Err(format!(\n\n \"{}\",\n\n String::from_utf8_lossy(init_output.stderr.as_slice())\n\n ))\n\n } else {\n\n let stdout = String::from_utf8_lossy(init_output.stdout.as_slice()).to_string();\n\n log::debug!(\"stdout: {}\", stdout);\n\n Ok(stdout)\n\n }\n\n}\n\n\n", "file_path": "crates/tools/src/utils.rs", "rank": 40, "score": 185732.909199065 }, { "content": "pub fn deploy_program(\n\n privkey_path: &Path,\n\n rpc_client: &mut HttpRpcClient,\n\n binary_path: &Path,\n\n target_lock: &packed::Script,\n\n target_address: &Address,\n\n) -> Result<DeployItem, String> {\n\n log::info!(\"deploy binary {:?}\", binary_path);\n\n let file_size = fs::metadata(binary_path)\n\n .map_err(|err| err.to_string())?\n\n .len();\n\n let min_output_capacity = {\n\n let data_capacity = Capacity::bytes(file_size as usize).map_err(|err| err.to_string())?;\n\n let type_script = packed::Script::new_builder()\n\n .code_hash(TYPE_ID_CODE_HASH.pack())\n\n .hash_type(ScriptHashType::Type.into())\n\n .args(Bytes::from(vec![0u8; 32]).pack())\n\n .build();\n\n let output = packed::CellOutput::new_builder()\n\n .lock(target_lock.clone())\n", "file_path": "crates/tools/src/deploy_scripts.rs", "rank": 41, "score": 183703.59980448143 }, { "content": "fn insert_to_leaf_column(db: &Store, block_number: u64, tx_index: u32, key: &[u8], value: &[u8]) {\n\n insert_to_state_db(\n\n db,\n\n COLUMN_ACCOUNT_SMT_LEAF,\n\n block_number,\n\n tx_index,\n\n key,\n\n value,\n\n );\n\n}\n\n\n", "file_path": "crates/store/src/tests/transaction_clear_block_state.rs", "rank": 43, "score": 181096.45124293756 }, { "content": "fn insert_to_branch_column(db: &Store, block_number: u64, tx_index: u32, key: &[u8], value: &[u8]) {\n\n insert_to_state_db(\n\n db,\n\n COLUMN_ACCOUNT_SMT_BRANCH,\n\n block_number,\n\n tx_index,\n\n key,\n\n value,\n\n );\n\n}\n\n\n", "file_path": "crates/store/src/tests/transaction_clear_block_state.rs", "rank": 44, "score": 181096.45124293756 }, { "content": "pub fn privkey_to_l2_script_hash(\n\n privkey: &H256,\n\n rollup_type_hash: &H256,\n\n deployment_result: &ScriptsDeploymentResult,\n\n) -> Result<H256, String> {\n\n let eth_address = privkey_to_eth_address(privkey)?;\n\n\n\n let code_hash = Byte32::from_slice(\n\n deployment_result\n\n .eth_account_lock\n\n .script_type_hash\n\n .as_bytes(),\n\n )\n\n .map_err(|err| err.to_string())?;\n\n\n\n let mut args_vec = rollup_type_hash.as_bytes().to_vec();\n\n args_vec.append(&mut eth_address.to_vec());\n\n let args = GwPack::pack(&GwBytes::from(args_vec));\n\n\n\n let script = Script::new_builder()\n\n .code_hash(code_hash)\n\n .hash_type(ScriptHashType::Type.into())\n\n .args(args)\n\n .build();\n\n\n\n let script_hash = CkbHasher::new().update(script.as_slice()).finalize();\n\n\n\n Ok(script_hash)\n\n}\n\n\n", "file_path": "crates/tools/src/account.rs", "rank": 45, "score": 179802.18300769513 }, { "content": "pub fn get_secp_data(\n\n rpc_client: &mut HttpRpcClient,\n\n) -> Result<(Bytes, gw_jsonrpc_types::blockchain::CellDep), String> {\n\n let mut cell_dep = None;\n\n rpc_client\n\n .get_block_by_number(0)?\n\n .expect(\"get CKB genesis block\")\n\n .transactions\n\n .iter()\n\n .for_each(|tx| {\n\n tx.inner\n\n .outputs_data\n\n .iter()\n\n .enumerate()\n\n .for_each(|(output_index, data)| {\n\n let data_hash = ckb_types::packed::CellOutput::calc_data_hash(data.as_bytes());\n\n if data_hash.as_slice() == CODE_HASH_SECP256K1_DATA.as_bytes() {\n\n let out_point = gw_jsonrpc_types::blockchain::OutPoint {\n\n tx_hash: tx.hash.clone(),\n\n index: (output_index as u32).into(),\n", "file_path": "crates/tools/src/deploy_genesis.rs", "rank": 46, "score": 179796.86204099096 }, { "content": "fn parse_sudt_log_data(data: &[u8]) -> ([u8; 20], [u8; 20], u128) {\n\n assert_eq!(data[0], 20);\n\n let mut from_address = [0u8; 20];\n\n from_address.copy_from_slice(&data[1..21]);\n\n\n\n let mut to_address = [0u8; 20];\n\n to_address.copy_from_slice(&data[21..41]);\n\n\n\n let mut u128_bytes = [0u8; 16];\n\n u128_bytes.copy_from_slice(&data[41..57]);\n\n let amount = u128::from_le_bytes(u128_bytes);\n\n (from_address, to_address, amount)\n\n}\n\n\n", "file_path": "crates/web3-indexer/src/helper.rs", "rank": 47, "score": 177723.5649151982 }, { "content": "// Get max mature block number\n\npub fn get_max_mature_number(rpc_client: &mut HttpRpcClient) -> Result<u64, String> {\n\n let tip_epoch = rpc_client\n\n .get_tip_header()\n\n .map(|header| EpochNumberWithFraction::from_full_value(header.inner.epoch.0))?;\n\n let tip_epoch_number = tip_epoch.number();\n\n if tip_epoch_number < 4 {\n\n // No cellbase live cell is mature\n\n Ok(0)\n\n } else {\n\n let max_mature_epoch = rpc_client\n\n .get_epoch_by_number(tip_epoch_number - 4)?\n\n .ok_or_else(|| \"Can not get epoch less than current epoch number\".to_string())?;\n\n let start_number = max_mature_epoch.start_number;\n\n let length = max_mature_epoch.length;\n\n Ok(calc_max_mature_number(\n\n tip_epoch,\n\n Some((start_number, length)),\n\n CELLBASE_MATURITY,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "crates/tools/src/deploy_genesis.rs", "rank": 48, "score": 177543.56432312343 }, { "content": "pub fn build_data_hash_key(data_hash: &[u8]) -> H256 {\n\n let mut key: [u8; 32] = H256::zero().into();\n\n let mut hasher = new_blake2b();\n\n hasher.update(&GW_NON_ACCOUNT_PLACEHOLDER);\n\n hasher.update(&[GW_DATA_HASH_TYPE]);\n\n hasher.update(data_hash);\n\n hasher.finalize(&mut key);\n\n key.into()\n\n}\n\n\n", "file_path": "crates/common/src/state.rs", "rank": 49, "score": 176300.2851777135 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn generate_config(\n\n genesis_path: &Path,\n\n scripts_results_path: &Path,\n\n privkey_path: &Path,\n\n ckb_url: String,\n\n indexer_url: String,\n\n output_path: &Path,\n\n database_url: Option<&str>,\n\n scripts_config_path: &Path,\n\n server_url: String,\n\n) -> Result<()> {\n\n let genesis: GenesisDeploymentResult = {\n\n let content = fs::read(genesis_path)?;\n\n serde_json::from_slice(&content)?\n\n };\n\n let scripts_results: ScriptsDeploymentResult = {\n\n let content = fs::read(scripts_results_path)?;\n\n serde_json::from_slice(&content)?\n\n };\n\n let scripts_built: ScriptsBuilt = {\n", "file_path": "crates/tools/src/generate_config.rs", "rank": 50, "score": 175113.21221693145 }, { "content": "pub fn build_l2_sudt_script(rollup_context: &RollupContext, l1_sudt_script_hash: &H256) -> Script {\n\n let args = {\n\n let mut args = Vec::with_capacity(64);\n\n args.extend(rollup_context.rollup_script_hash.as_slice());\n\n args.extend(l1_sudt_script_hash.as_slice());\n\n Bytes::from(args)\n\n };\n\n Script::new_builder()\n\n .args(args.pack())\n\n .code_hash(\n\n rollup_context\n\n .rollup_config\n\n .l2_sudt_validator_script_type_hash(),\n\n )\n\n .hash_type(ScriptHashType::Type.into())\n\n .build()\n\n}\n", "file_path": "crates/generator/src/sudt.rs", "rank": 51, "score": 172638.37120628275 }, { "content": "pub fn setup_chain_with_account_lock_manage(\n\n rollup_type_script: Script,\n\n rollup_config: RollupConfig,\n\n account_lock_manage: AccountLockManage,\n\n) -> Chain {\n\n let store = Store::open_tmp().unwrap();\n\n let rollup_script_hash = rollup_type_script.hash();\n\n let genesis_config = GenesisConfig {\n\n timestamp: 0,\n\n meta_contract_validator_type_hash: Default::default(),\n\n rollup_config: rollup_config.clone().into(),\n\n rollup_type_hash: rollup_script_hash.into(),\n\n secp_data_dep: Default::default(),\n\n };\n\n let genesis_committed_info = L2BlockCommittedInfo::default();\n\n let backend_manage = build_backend_manage(&rollup_config);\n\n let rollup_context = RollupContext {\n\n rollup_script_hash: rollup_script_hash.into(),\n\n rollup_config: rollup_config.clone(),\n\n };\n", "file_path": "crates/tests/src/testing_tool/chain.rs", "rank": 52, "score": 169506.81961040094 }, { "content": "fn delete_from_branch_column(db: &Store, block_number: u64, tx_index: u32, key: &[u8]) {\n\n delete_from_state_db(db, COLUMN_ACCOUNT_SMT_BRANCH, block_number, tx_index, key);\n\n}\n\n\n", "file_path": "crates/store/src/tests/transaction_clear_block_state.rs", "rank": 53, "score": 169116.15827807368 }, { "content": "fn read_script_build_config<P: AsRef<Path>>(input_path: P) -> ScriptsBuildConfig {\n\n let input = fs::read_to_string(input_path).expect(\"read config file\");\n\n let mut scripts_build_config: ScriptsBuildConfig =\n\n serde_json::from_str(&input).expect(\"parse scripts build config\");\n\n let default_build_config: ScriptsBuildConfig = ScriptsBuildConfig::default();\n\n default_build_config\n\n .scripts\n\n .iter()\n\n .for_each(\n\n |(key, default_value)| match scripts_build_config.scripts.get(key) {\n\n Some(value) => {\n\n if PathBuf::default() == value.source {\n\n let mut new = value.to_owned();\n\n new.source.clone_from(&default_value.source);\n\n scripts_build_config.scripts.insert(key.to_owned(), new);\n\n }\n\n }\n\n None => {\n\n scripts_build_config\n\n .scripts\n\n .insert(key.to_owned(), default_value.to_owned());\n\n }\n\n },\n\n );\n\n scripts_build_config\n\n}\n\n\n", "file_path": "crates/tools/src/prepare_scripts.rs", "rank": 54, "score": 167870.97401819084 }, { "content": "pub fn run<I, S>(bin: &str, args: I) -> Result<()>\n\nwhere\n\n I: IntoIterator<Item = S> + std::fmt::Debug,\n\n S: AsRef<OsStr>,\n\n{\n\n log::debug!(\"[Execute]: {} {:?}\", bin, args);\n\n let status = Command::new(bin.to_owned())\n\n .env(\"RUST_BACKTRACE\", \"full\")\n\n .args(args)\n\n .status()\n\n .expect(\"run command\");\n\n if !status.success() {\n\n Err(anyhow::anyhow!(\n\n \"Exited with status code: {:?}\",\n\n status.code()\n\n ))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "crates/tools/src/utils.rs", "rank": 55, "score": 165527.27834183752 }, { "content": "fn prepare_scripts_in_build_mode(\n\n scripts_build_config: &ScriptsBuildConfig,\n\n repos_dir: &Path,\n\n target_dir: &Path,\n\n) {\n\n log::info!(\"Build scripts...\");\n\n run_pull_code(\n\n scripts_build_config.repos.godwoken_scripts.clone(),\n\n true,\n\n repos_dir,\n\n GODWOKEN_SCRIPTS,\n\n );\n\n run_pull_code(\n\n scripts_build_config.repos.godwoken_polyjuice.clone(),\n\n true,\n\n repos_dir,\n\n GODWOKEN_POLYJUICE,\n\n );\n\n run_pull_code(\n\n scripts_build_config.repos.clerkb.clone(),\n\n true,\n\n repos_dir,\n\n CLERKB,\n\n );\n\n build_godwoken_scripts(repos_dir, GODWOKEN_SCRIPTS);\n\n build_godwoken_polyjuice(repos_dir, GODWOKEN_POLYJUICE);\n\n build_clerkb(repos_dir, CLERKB);\n\n collect_scripts_to_target(repos_dir, target_dir, &scripts_build_config.scripts);\n\n}\n\n\n", "file_path": "crates/tools/src/prepare_scripts.rs", "rank": 56, "score": 165216.80028443952 }, { "content": "// Calculate compacted account root\n\npub fn calculate_state_checkpoint(root: &H256, count: u32) -> H256 {\n\n let mut hash = [0u8; 32];\n\n let mut hasher = new_blake2b();\n\n hasher.update(root.as_slice());\n\n hasher.update(&count.to_le_bytes());\n\n hasher.finalize(&mut hash);\n\n hash.into()\n\n}\n\n\n", "file_path": "crates/common/src/merkle_utils.rs", "rank": 57, "score": 163722.01073343924 }, { "content": "pub fn privkey_to_eth_address(privkey: &H256) -> Result<CKBBytes, String> {\n\n let privkey = secp256k1::SecretKey::from_slice(privkey.as_bytes())\n\n .map_err(|err| format!(\"Invalid secp256k1 secret key format, error: {}\", err))?;\n\n let pubkey = secp256k1::PublicKey::from_secret_key(&SECP256K1, &privkey);\n\n let pubkey_hash = {\n\n let mut hasher = Keccak256::new();\n\n hasher.update(&pubkey.serialize_uncompressed()[1..]);\n\n let buf = hasher.finalize();\n\n let mut pubkey_hash = [0u8; 20];\n\n pubkey_hash.copy_from_slice(&buf[12..]);\n\n pubkey_hash\n\n };\n\n let s = CKBBytes::from(pubkey_hash.to_vec());\n\n Ok(s)\n\n}\n\n\n", "file_path": "crates/tools/src/account.rs", "rank": 58, "score": 163677.49088704816 }, { "content": "pub fn is_mature(number: u64, tx_index: u64, max_mature_number: u64) -> bool {\n\n // Not cellbase cell\n\n tx_index > 0\n\n // Live cells in genesis are all mature\n\n || number == 0\n\n || number <= max_mature_number\n\n}\n\n\n", "file_path": "crates/tools/src/deploy_genesis.rs", "rank": 59, "score": 161537.82917557398 }, { "content": "pub fn l2_script_hash_to_short_address(script_hash: &H256) -> GwBytes {\n\n let short_address = &script_hash.as_bytes()[..20];\n\n\n\n GwBytes::from(short_address.to_vec())\n\n}\n\n\n", "file_path": "crates/tools/src/account.rs", "rank": 62, "score": 159767.99657693325 }, { "content": "fn delete_from_state_db(db: &Store, col: Col, block_number: u64, tx_index: u32, key: &[u8]) {\n\n let store_txn = db.begin_transaction();\n\n let state_db_txn = StateDBTransaction::from_checkpoint(\n\n &store_txn,\n\n CheckPoint::new(block_number, SubState::Tx(tx_index)),\n\n StateDBMode::Write(WriteContext::new(0)),\n\n )\n\n .unwrap();\n\n state_db_txn.delete(col, key).unwrap();\n\n state_db_txn.commit().unwrap();\n\n}\n\n\n", "file_path": "crates/store/src/tests/transaction_clear_block_state.rs", "rank": 63, "score": 158205.6933447555 }, { "content": "/// Build genesis block\n\npub fn build_genesis(config: &GenesisConfig, secp_data: Bytes) -> Result<GenesisWithGlobalState> {\n\n let store = Store::open_tmp()?;\n\n let db = store.begin_transaction();\n\n build_genesis_from_store(db, config, secp_data)\n\n .map(|(_db, genesis_with_state)| genesis_with_state)\n\n}\n\n\n\npub struct GenesisWithGlobalState {\n\n pub genesis: L2Block,\n\n pub global_state: GlobalState,\n\n}\n\n\n", "file_path": "crates/generator/src/genesis.rs", "rank": 64, "score": 158134.06058266398 }, { "content": "fn sign_message(msg: &H256, privkey_data: H256) -> Result<[u8; 65], String> {\n\n let privkey = Privkey::from(privkey_data);\n\n let signature = privkey\n\n .sign_recoverable(msg)\n\n .map_err(|err| err.to_string())?;\n\n let mut inner = [0u8; 65];\n\n inner.copy_from_slice(&signature.serialize());\n\n Ok(inner)\n\n}\n\n\n", "file_path": "crates/tools/src/account.rs", "rank": 65, "score": 156855.37431576653 }, { "content": "pub fn serialize_poa_data(data: &PoAData) -> Bytes {\n\n let mut buffer = BytesMut::new();\n\n buffer.extend_from_slice(&data.round_initial_subtime.to_le_bytes()[..]);\n\n buffer.extend_from_slice(&data.subblock_subtime.to_le_bytes()[..]);\n\n buffer.extend_from_slice(&data.subblock_index.to_le_bytes()[..]);\n\n buffer.extend_from_slice(&data.aggregator_index.to_le_bytes()[..]);\n\n buffer.freeze()\n\n}\n\n\n\n#[derive(Clone, Serialize, Deserialize, PartialEq, Eq, Debug, Default)]\n\npub struct GenesisDeploymentResult {\n\n pub tx_hash: H256,\n\n pub timestamp: u64,\n\n pub rollup_type_hash: H256,\n\n pub rollup_type_script: ckb_jsonrpc_types::Script,\n\n pub rollup_config: gw_jsonrpc_types::godwoken::RollupConfig,\n\n pub rollup_config_cell_dep: ckb_jsonrpc_types::CellDep,\n\n pub layer2_genesis_hash: H256,\n\n pub genesis_config: GenesisConfig,\n\n}\n\n\n", "file_path": "crates/tools/src/deploy_genesis.rs", "rank": 66, "score": 151763.13688166824 }, { "content": "pub fn get_network_type(rpc_client: &mut HttpRpcClient) -> Result<NetworkType, String> {\n\n let chain_info = rpc_client.get_blockchain_info()?;\n\n NetworkType::from_raw_str(chain_info.chain.as_str())\n\n .ok_or_else(|| format!(\"Unexpected network type: {}\", chain_info.chain))\n\n}\n\n\n", "file_path": "crates/tools/src/utils.rs", "rank": 67, "score": 149592.30620433632 }, { "content": "fn fit_output_capacity(output: ckb_packed::CellOutput, data_size: usize) -> ckb_packed::CellOutput {\n\n let data_capacity = Capacity::bytes(data_size).expect(\"data capacity\");\n\n let capacity = output\n\n .occupied_capacity(data_capacity)\n\n .expect(\"occupied_capacity\");\n\n output\n\n .as_builder()\n\n .capacity(CKBPack::pack(&capacity.as_u64()))\n\n .build()\n\n}\n\n\n", "file_path": "crates/tools/src/deploy_genesis.rs", "rank": 68, "score": 149390.30610722347 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn deploy(\n\n godwoken_rpc_url: &str,\n\n config_path: &Path,\n\n deployment_results_path: &Path,\n\n privkey_path: &Path,\n\n creator_account_id: u32,\n\n gas_limit: u64,\n\n gas_price: u128,\n\n data: &str,\n\n value: u128,\n\n) -> Result<(), String> {\n\n let data = GwBytes::from(hex::decode(data[2..].as_bytes()).map_err(|err| err.to_string())?);\n\n\n\n let deployment_result_string =\n\n std::fs::read_to_string(deployment_results_path).map_err(|err| err.to_string())?;\n\n let deployment_result: ScriptsDeploymentResult =\n\n serde_json::from_str(&deployment_result_string).map_err(|err| err.to_string())?;\n\n\n\n let config = read_config(config_path)?;\n\n let rollup_type_hash = &config.genesis.rollup_type_hash;\n", "file_path": "crates/tools/src/polyjuice.rs", "rank": 69, "score": 148368.2732572966 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn withdraw(\n\n godwoken_rpc_url: &str,\n\n privkey_path: &Path,\n\n capacity: &str,\n\n amount: &str,\n\n sudt_script_hash: &str,\n\n owner_ckb_address: &str,\n\n config_path: &Path,\n\n deployment_results_path: &Path,\n\n) -> Result<(), String> {\n\n let sudt_script_hash =\n\n H256::from_str(&sudt_script_hash.trim()[2..]).map_err(|err| err.to_string())?;\n\n let capacity = parse_capacity(capacity)?;\n\n let amount: u128 = amount.parse().expect(\"sUDT amount format error\");\n\n\n\n let deployment_result_string =\n\n fs::read_to_string(deployment_results_path).map_err(|err| err.to_string())?;\n\n let deployment_result: ScriptsDeploymentResult =\n\n serde_json::from_str(&deployment_result_string).map_err(|err| err.to_string())?;\n\n\n", "file_path": "crates/tools/src/withdraw.rs", "rank": 70, "score": 148368.2732572966 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn transfer(\n\n godwoken_rpc_url: &str,\n\n privkey_path: &Path,\n\n to: &str,\n\n sudt_id: u32,\n\n amount: &str,\n\n fee: &str,\n\n config_path: &Path,\n\n deployment_results_path: &Path,\n\n) -> Result<(), String> {\n\n let amount: u128 = amount.parse().expect(\"sUDT amount format error\");\n\n let fee: u128 = fee.parse().expect(\"fee format error\");\n\n\n\n let deployment_result_string =\n\n std::fs::read_to_string(deployment_results_path).map_err(|err| err.to_string())?;\n\n let deployment_result: ScriptsDeploymentResult =\n\n serde_json::from_str(&deployment_result_string).map_err(|err| err.to_string())?;\n\n\n\n let mut godwoken_rpc_client = GodwokenRpcClient::new(godwoken_rpc_url);\n\n\n", "file_path": "crates/tools/src/transfer.rs", "rank": 71, "score": 148368.2732572966 }, { "content": "pub fn parse_log(item: &LogItem) -> Result<GwLog> {\n\n let service_flag: u8 = item.service_flag().into();\n\n let raw_data = item.data().raw_data();\n\n let data = raw_data.as_ref();\n\n match service_flag {\n\n GW_LOG_SUDT_TRANSFER => {\n\n let sudt_id: u32 = item.account_id().unpack();\n\n if data.len() != (1 + 20 + 20 + 16) {\n\n return Err(anyhow!(\"Invalid data length: {}\", data.len()));\n\n }\n\n let (from_address, to_address, amount) = parse_sudt_log_data(data);\n\n Ok(GwLog::SudtTransfer {\n\n sudt_id,\n\n from_address,\n\n to_address,\n\n amount,\n\n })\n\n }\n\n GW_LOG_SUDT_PAY_FEE => {\n\n let sudt_id: u32 = item.account_id().unpack();\n", "file_path": "crates/web3-indexer/src/helper.rs", "rank": 72, "score": 145412.7296550411 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn polyjuice_call(\n\n godwoken_rpc_url: &str,\n\n gas_limit: u64,\n\n gas_price: u128,\n\n data: &str,\n\n value: u128,\n\n to_address: &str,\n\n from: &str,\n\n) -> Result<(), String> {\n\n let data = GwBytes::from(hex::decode(data[2..].as_bytes()).map_err(|err| err.to_string())?);\n\n\n\n let mut godwoken_rpc_client = GodwokenRpcClient::new(godwoken_rpc_url);\n\n\n\n let to_address_str = to_address;\n\n let to_address =\n\n GwBytes::from(hex::decode(to_address_str[2..].as_bytes()).map_err(|err| err.to_string())?);\n\n\n\n let from_address = parse_account_short_address(&mut godwoken_rpc_client, &from)?;\n\n let from_id = short_address_to_account_id(&mut godwoken_rpc_client, &from_address)?;\n\n let from_id = from_id.expect(\"from account not found!\");\n", "file_path": "crates/tools/src/polyjuice.rs", "rank": 73, "score": 145400.44863023126 }, { "content": "pub fn revert(\n\n rollup_context: &RollupContext,\n\n block_producer_config: &BlockProducerConfig,\n\n custodian_cells: Vec<CellInfo>,\n\n) -> Result<Option<RevertedDeposits>> {\n\n if custodian_cells.is_empty() {\n\n return Ok(None);\n\n }\n\n\n\n let mut custodian_inputs = vec![];\n\n let mut custodian_witness = vec![];\n\n let mut deposit_outputs = vec![];\n\n\n\n let rollup_type_hash = rollup_context.rollup_script_hash.as_slice().iter();\n\n for revert_custodian in custodian_cells.into_iter() {\n\n let deposit_lock = {\n\n let args: Bytes = revert_custodian.output.lock().args().unpack();\n\n let custodian_lock_args = CustodianLockArgs::from_slice(&args.slice(32..))?;\n\n\n\n let deposit_lock_args = custodian_lock_args.deposit_lock_args();\n", "file_path": "crates/block-producer/src/deposit.rs", "rank": 74, "score": 145400.44863023126 }, { "content": "pub fn revert(\n\n rollup_context: &RollupContext,\n\n block_producer_config: &BlockProducerConfig,\n\n withdrawal_cells: Vec<CellInfo>,\n\n) -> Result<Option<RevertedWithdrawals>> {\n\n if withdrawal_cells.is_empty() {\n\n return Ok(None);\n\n }\n\n\n\n let mut withdrawal_inputs = vec![];\n\n let mut withdrawal_witness = vec![];\n\n let mut custodian_outputs = vec![];\n\n\n\n let timestamp = SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .expect(\"unexpected timestamp\")\n\n .as_millis() as u64;\n\n\n\n // We use timestamp plus idx and rollup_type_hash to create different custodian lock\n\n // hash for every reverted withdrawal input. Withdrawal lock use custodian lock hash to\n", "file_path": "crates/block-producer/src/withdrawal.rs", "rank": 75, "score": 145400.44863023126 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn send_transaction(\n\n godwoken_rpc_url: &str,\n\n config_path: &Path,\n\n deployment_results_path: &Path,\n\n privkey_path: &Path,\n\n creator_account_id: u32,\n\n gas_limit: u64,\n\n gas_price: u128,\n\n data: &str,\n\n value: u128,\n\n to_address: &str,\n\n) -> Result<(), String> {\n\n let data = GwBytes::from(hex::decode(data[2..].as_bytes()).map_err(|err| err.to_string())?);\n\n\n\n let deployment_result_string =\n\n std::fs::read_to_string(deployment_results_path).map_err(|err| err.to_string())?;\n\n let deployment_result: ScriptsDeploymentResult =\n\n serde_json::from_str(&deployment_result_string).map_err(|err| err.to_string())?;\n\n\n\n let config = read_config(config_path)?;\n", "file_path": "crates/tools/src/polyjuice.rs", "rank": 76, "score": 145400.44863023126 }, { "content": "pub fn wait_for_tx(\n\n rpc_client: &mut HttpRpcClient,\n\n tx_hash: &H256,\n\n timeout_secs: u64,\n\n) -> Result<TransactionView, String> {\n\n let retry_timeout = Duration::from_secs(timeout_secs);\n\n let start_time = Instant::now();\n\n while start_time.elapsed() < retry_timeout {\n\n std::thread::sleep(Duration::from_secs(2));\n\n match rpc_client.get_transaction(tx_hash.clone())? {\n\n Some(tx_with_status) if tx_with_status.tx_status.status == Status::Pending => {\n\n log::info!(\"tx pending\");\n\n }\n\n Some(tx_with_status) if tx_with_status.tx_status.status == Status::Proposed => {\n\n log::info!(\"tx proposed\");\n\n }\n\n Some(tx_with_status) if tx_with_status.tx_status.status == Status::Committed => {\n\n log::info!(\"tx commited\");\n\n return Ok(tx_with_status.transaction);\n\n }\n\n _ => {\n\n log::error!(\"error\")\n\n }\n\n }\n\n }\n\n Err(format!(\"Timeout: {:?}\", retry_timeout))\n\n}\n\n\n", "file_path": "crates/tools/src/utils.rs", "rank": 77, "score": 145400.44863023126 }, { "content": "pub fn to_eth_eoa_address(\n\n godwoken_rpc_url: &str,\n\n godwoken_short_address: &str,\n\n) -> Result<(), String> {\n\n if godwoken_short_address.len() != 42 || !godwoken_short_address.starts_with(\"0x\") {\n\n return Err(\"godwoken short address format error!\".to_owned());\n\n }\n\n\n\n let mut godwoken_rpc_client = GodwokenRpcClient::new(godwoken_rpc_url);\n\n\n\n let short_address = GwBytes::from(\n\n hex::decode(godwoken_short_address[2..].as_bytes()).map_err(|err| err.to_string())?,\n\n );\n\n\n\n let script_hash = godwoken_rpc_client\n\n .get_script_hash_by_short_address(JsonBytes::from_bytes(short_address))?;\n\n\n\n let script = match script_hash {\n\n Some(h) => godwoken_rpc_client.get_script(h)?,\n\n None => return Err(\"script hash not found!\".to_owned()),\n", "file_path": "crates/tools/src/address.rs", "rank": 78, "score": 142627.29340156956 }, { "content": "pub fn to_godwoken_short_address(\n\n eth_eoa_address: &str,\n\n config_path: &Path,\n\n deployment_results_path: &Path,\n\n) -> Result<(), String> {\n\n if eth_eoa_address.len() != 42 || !eth_eoa_address.starts_with(\"0x\") {\n\n return Err(\"eth eoa address format error!\".to_owned());\n\n }\n\n\n\n let eth_eoa_addr =\n\n GwBytes::from(hex::decode(eth_eoa_address[2..].as_bytes()).map_err(|err| err.to_string())?);\n\n\n\n let config = read_config(&config_path)?;\n\n let rollup_type_hash = &config.genesis.rollup_type_hash;\n\n\n\n let deployment_result_string =\n\n std::fs::read_to_string(deployment_results_path).map_err(|err| err.to_string())?;\n\n let deployment_result: ScriptsDeploymentResult =\n\n serde_json::from_str(&deployment_result_string).map_err(|err| err.to_string())?;\n\n\n", "file_path": "crates/tools/src/address.rs", "rank": 79, "score": 142627.29340156956 }, { "content": "/// NOTE: Caller should rollback db, only update reverted_block_smt in L1ActionContext::Revert\n\npub fn build_revert_context(\n\n db: &StoreTransaction,\n\n reverted_blocks: &[L2Block],\n\n) -> Result<RevertContext> {\n\n // Build main chain block proof\n\n let reverted_blocks = reverted_blocks.iter();\n\n let reverted_raw_blocks: Vec<RawL2Block> = reverted_blocks.map(|rb| rb.raw()).collect();\n\n let (_, block_proof) = build_block_proof(db, &reverted_raw_blocks)?;\n\n log::debug!(\"build main chain block proof\");\n\n\n\n // Build reverted block proof\n\n let (post_reverted_block_root, reverted_block_proof) = {\n\n let mut smt = db.reverted_block_smt()?;\n\n let to_key = |b: &RawL2Block| H256::from(b.hash());\n\n let to_leave = |b: &RawL2Block| (to_key(b), H256::one());\n\n\n\n let keys: Vec<H256> = reverted_raw_blocks.iter().map(to_key).collect();\n\n for key in keys.iter() {\n\n smt.update(key.to_owned(), H256::one())?;\n\n }\n", "file_path": "crates/chain/src/challenge.rs", "rank": 80, "score": 142627.29340156956 }, { "content": "pub fn wait_for_l2_tx(\n\n godwoken_rpc_client: &mut GodwokenRpcClient,\n\n tx_hash: &H256,\n\n timeout_secs: u64,\n\n) -> Result<Option<TxReceipt>, String> {\n\n let retry_timeout = Duration::from_secs(timeout_secs);\n\n let start_time = Instant::now();\n\n while start_time.elapsed() < retry_timeout {\n\n std::thread::sleep(Duration::from_secs(2));\n\n\n\n let receipt = godwoken_rpc_client.get_transaction_receipt(tx_hash)?;\n\n\n\n match receipt {\n\n Some(_) => {\n\n log::info!(\"tx committed\");\n\n return Ok(receipt);\n\n }\n\n None => {\n\n log::info!(\"waiting for {} secs.\", start_time.elapsed().as_secs());\n\n }\n\n }\n\n }\n\n Err(format!(\"Timeout: {:?}\", retry_timeout))\n\n}\n", "file_path": "crates/tools/src/utils.rs", "rank": 81, "score": 142627.29340156956 }, { "content": "pub fn build_verify_context(\n\n generator: Arc<Generator>,\n\n db: &StoreTransaction,\n\n target: &ChallengeTarget,\n\n) -> Result<VerifyContext> {\n\n let challenge_type = target.target_type().try_into();\n\n let block_hash: [u8; 32] = target.block_hash().unpack();\n\n let target_index = target.target_index().unpack();\n\n\n\n match challenge_type.map_err(|_| anyhow!(\"invalid challenge type\"))? {\n\n ChallengeTargetType::TxExecution => {\n\n build_verify_transaction_witness(generator, db, block_hash.into(), target_index)\n\n }\n\n ChallengeTargetType::TxSignature => {\n\n build_verify_transaction_signature_witness(db, block_hash.into(), target_index)\n\n }\n\n ChallengeTargetType::Withdrawal => {\n\n build_verify_withdrawal_witness(db, block_hash.into(), target_index)\n\n }\n\n }\n", "file_path": "crates/chain/src/challenge.rs", "rank": 82, "score": 142627.29340156956 }, { "content": "pub fn privkey_to_short_address(\n\n privkey: &H256,\n\n rollup_type_hash: &H256,\n\n deployment_result: &ScriptsDeploymentResult,\n\n) -> Result<GwBytes, String> {\n\n let script_hash = privkey_to_l2_script_hash(privkey, rollup_type_hash, deployment_result)?;\n\n\n\n let short_address = l2_script_hash_to_short_address(&script_hash);\n\n Ok(short_address)\n\n}\n\n\n", "file_path": "crates/tools/src/account.rs", "rank": 83, "score": 142627.29340156956 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn deposit_ckb(\n\n privkey_path: &Path,\n\n deployment_results_path: &Path,\n\n config_path: &Path,\n\n capacity: &str,\n\n fee: &str,\n\n ckb_rpc_url: &str,\n\n eth_address: Option<&str>,\n\n godwoken_rpc_url: &str,\n\n) -> Result<(), String> {\n\n let deployment_result_string =\n\n std::fs::read_to_string(deployment_results_path).map_err(|err| err.to_string())?;\n\n let deployment_result: ScriptsDeploymentResult =\n\n serde_json::from_str(&deployment_result_string).map_err(|err| err.to_string())?;\n\n\n\n let config = read_config(&config_path)?;\n\n\n\n let privkey = read_privkey(privkey_path)?;\n\n\n\n // Using private key to calculate eth address when eth_address not provided.\n", "file_path": "crates/tools/src/deposit_ckb.rs", "rank": 84, "score": 142627.29340156956 }, { "content": "pub fn build_challenge_context(\n\n db: &StoreTransaction,\n\n target: ChallengeTarget,\n\n) -> Result<ChallengeContext> {\n\n let block_hash: H256 = target.block_hash().unpack();\n\n let block = {\n\n let opt_ = db.get_block(&block_hash)?;\n\n opt_.ok_or_else(|| anyhow!(\"bad block {} not found\", hex::encode(block_hash.as_slice())))?\n\n };\n\n\n\n let block_smt = db.block_smt()?;\n\n let block_proof = block_smt\n\n .merkle_proof(vec![block.smt_key().into()])?\n\n .compile(vec![(block.smt_key().into(), block.hash().into())])?;\n\n\n\n let witness = ChallengeWitness::new_builder()\n\n .raw_l2block(block.raw())\n\n .block_proof(block_proof.0.pack())\n\n .build();\n\n\n", "file_path": "crates/chain/src/challenge.rs", "rank": 85, "score": 142627.29340156956 }, { "content": "pub fn construct_block(\n\n chain: &Chain,\n\n mem_pool: &MemPool,\n\n deposit_requests: Vec<DepositRequest>,\n\n) -> anyhow::Result<ProduceBlockResult> {\n\n let block_producer_id = 0u32;\n\n let timestamp = 0;\n\n let stake_cell_owner_lock_hash = H256::zero();\n\n let max_withdrawal_capacity = std::u128::MAX;\n\n let db = chain.store().begin_transaction();\n\n let generator = chain.generator();\n\n let parent_block = chain.store().get_tip_block().unwrap();\n\n let rollup_config_hash = chain.rollup_config_hash().clone().into();\n\n let mut txs = Vec::new();\n\n let mut withdrawal_requests = Vec::new();\n\n let mut available_custodians = AvailableCustodians::default();\n\n for (_, entry) in mem_pool.pending() {\n\n // notice we either choice txs or withdrawals from an entry to avoid nonce conflict\n\n if !entry.txs.is_empty() {\n\n txs.extend(entry.txs.iter().cloned());\n", "file_path": "crates/tests/src/testing_tool/chain.rs", "rank": 86, "score": 140030.26228346134 }, { "content": "// address: 0x... / id: 1\n\npub fn parse_account_short_address(\n\n godwoken: &mut GodwokenRpcClient,\n\n account: &str,\n\n) -> Result<GwBytes, String> {\n\n // if match short address\n\n if account.starts_with(\"0x\") && account.len() == 42 {\n\n let r = GwBytes::from(hex::decode(account[2..].as_bytes()).map_err(|err| err.to_string())?);\n\n return Ok(r);\n\n }\n\n\n\n // if match id\n\n let account_id: u32 = match account.parse() {\n\n Ok(a) => a,\n\n Err(_) => return Err(\"account id parse error!\".to_owned()),\n\n };\n\n let script_hash = godwoken.get_script_hash(account_id)?;\n\n let short_address = GwBytes::from((&script_hash.as_bytes()[..20]).to_vec());\n\n Ok(short_address)\n\n}\n\n\n", "file_path": "crates/tools/src/account.rs", "rank": 87, "score": 140030.26228346134 }, { "content": "pub fn generate_transaction_message_to_sign(\n\n raw_l2transaction: &RawL2Transaction,\n\n rollup_type_hash: &H256,\n\n sender_script_hash: &H256,\n\n receiver_script_hash: &H256,\n\n) -> H256 {\n\n let raw_data = raw_l2transaction.as_slice();\n\n let rollup_type_hash_data = rollup_type_hash.as_bytes();\n\n\n\n let digest = CkbHasher::new()\n\n .update(rollup_type_hash_data)\n\n .update(sender_script_hash.as_bytes())\n\n .update(receiver_script_hash.as_bytes())\n\n .update(raw_data)\n\n .finalize();\n\n\n\n let message = EthHasher::new()\n\n .update(\"\\x19Ethereum Signed Message:\\n32\")\n\n .update(digest.as_bytes())\n\n .finalize();\n\n\n\n message\n\n}\n", "file_path": "crates/tools/src/transfer.rs", "rank": 88, "score": 140030.26228346134 }, { "content": "pub fn short_address_to_account_id(\n\n godwoken_rpc_client: &mut GodwokenRpcClient,\n\n short_address: &GwBytes,\n\n) -> Result<Option<u32>, String> {\n\n let bytes = JsonBytes::from_bytes(short_address.clone());\n\n let script_hash = match godwoken_rpc_client.get_script_hash_by_short_address(bytes)? {\n\n Some(h) => h,\n\n None => {\n\n return Err(format!(\n\n \"script hash by short address: 0x{} not found\",\n\n hex::encode(short_address.to_vec()),\n\n ))\n\n }\n\n };\n\n let account_id = godwoken_rpc_client.get_account_id_by_script_hash(script_hash)?;\n\n\n\n Ok(account_id)\n\n}\n\n\n", "file_path": "crates/tools/src/account.rs", "rank": 89, "score": 140030.26228346134 }, { "content": "pub fn create_creator_account(\n\n godwoken_rpc_url: &str,\n\n privkey_path: &Path,\n\n sudt_id: u32,\n\n fee_amount: &str,\n\n config_path: &Path,\n\n deployment_results_path: &Path,\n\n) -> Result<(), String> {\n\n let fee: u128 = fee_amount.parse().expect(\"fee format error\");\n\n\n\n let deployment_result_string =\n\n std::fs::read_to_string(deployment_results_path).map_err(|err| err.to_string())?;\n\n let deployment_result: ScriptsDeploymentResult =\n\n serde_json::from_str(&deployment_result_string).map_err(|err| err.to_string())?;\n\n\n\n let mut godwoken_rpc_client = GodwokenRpcClient::new(godwoken_rpc_url);\n\n\n\n let config = read_config(config_path)?;\n\n let rollup_type_hash = &config.genesis.rollup_type_hash;\n\n\n", "file_path": "crates/tools/src/create_creator_account.rs", "rank": 90, "score": 137593.09312282284 }, { "content": "pub fn apply_block_result(\n\n chain: &mut Chain,\n\n rollup_cell: CellOutput,\n\n block_result: ProduceBlockResult,\n\n deposit_requests: Vec<DepositRequest>,\n\n deposit_asset_scripts: HashSet<Script>,\n\n) {\n\n let l2block = block_result.block.clone();\n\n let transaction = build_sync_tx(rollup_cell, block_result);\n\n let l2block_committed_info = L2BlockCommittedInfo::default();\n\n\n\n let update = L1Action {\n\n context: L1ActionContext::SubmitBlock {\n\n l2block,\n\n deposit_requests,\n\n deposit_asset_scripts,\n\n },\n\n transaction,\n\n l2block_committed_info,\n\n };\n\n let param = SyncParam {\n\n updates: vec![update],\n\n reverts: Default::default(),\n\n };\n\n chain.sync(param).unwrap();\n\n assert_eq!(chain.last_sync_event().is_success(), true);\n\n}\n\n\n", "file_path": "crates/tests/src/testing_tool/chain.rs", "rank": 91, "score": 137593.09312282284 }, { "content": "pub fn build_output(\n\n rollup_context: &RollupContext,\n\n prev_global_state: GlobalState,\n\n challenge_cell: &CellInfo,\n\n burn_lock: Script,\n\n owner_lock: Script,\n\n context: VerifyContext,\n\n) -> Result<CancelChallengeOutput> {\n\n match context.verify_witness {\n\n VerifyWitness::Withdrawal(witness) => {\n\n let verifier_lock = context.sender_script;\n\n\n\n let verifier_witness = {\n\n let signature = witness.withdrawal_request().signature();\n\n WitnessArgs::new_builder()\n\n .lock(Some(signature).pack())\n\n .build()\n\n };\n\n\n\n let cancel: CancelChallenge<VerifyWithdrawalWitness> = CancelChallenge::new(\n", "file_path": "crates/block-producer/src/challenger/cancel_challenge.rs", "rank": 92, "score": 137593.09312282284 }, { "content": "pub fn build_sync_tx(\n\n rollup_cell: CellOutput,\n\n produce_block_result: ProduceBlockResult,\n\n) -> Transaction {\n\n let ProduceBlockResult {\n\n block,\n\n global_state,\n\n unused_transactions,\n\n unused_withdrawal_requests,\n\n l2tx_offchain_used_cycles: _,\n\n } = produce_block_result;\n\n assert!(unused_transactions.is_empty());\n\n assert!(unused_withdrawal_requests.is_empty());\n\n let rollup_action = {\n\n let submit_block = RollupSubmitBlock::new_builder().block(block).build();\n\n RollupAction::new_builder()\n\n .set(RollupActionUnion::RollupSubmitBlock(submit_block))\n\n .build()\n\n };\n\n let witness = WitnessArgs::new_builder()\n", "file_path": "crates/tests/src/testing_tool/chain.rs", "rank": 93, "score": 137593.09312282284 }, { "content": "fn calculate_type_id(first_cell_input: &ckb_packed::CellInput, first_output_index: u64) -> Bytes {\n\n let mut blake2b = new_blake2b();\n\n blake2b.update(first_cell_input.as_slice());\n\n blake2b.update(&first_output_index.to_le_bytes());\n\n let mut ret = [0; 32];\n\n blake2b.finalize(&mut ret);\n\n Bytes::from(ret.to_vec())\n\n}\n\n\n", "file_path": "crates/tools/src/deploy_genesis.rs", "rank": 94, "score": 136988.37363853768 }, { "content": "pub fn new_blake2b() -> Blake2b {\n\n Blake2bBuilder::new(32)\n\n .personal(CKB_PERSONALIZATION)\n\n .build()\n\n}\n", "file_path": "crates/hash/src/blake2b.rs", "rank": 95, "score": 136820.98932940004 }, { "content": "// block_number(8 bytes) | tx_index(4 bytes) | col (1 byte) | key (n bytes)\n\nstruct BlockStateRecordKey(Vec<u8>);\n\n\n\nimpl BlockStateRecordKey {\n\n fn new(block_number: u64, tx_index: u32, col: Col, key: &[u8]) -> Self {\n\n let mut record_key = Vec::new();\n\n record_key.resize(13 + key.len(), 0);\n\n record_key[..8].copy_from_slice(&block_number.to_be_bytes());\n\n record_key[8..12].copy_from_slice(&tx_index.to_be_bytes());\n\n record_key[12] = col;\n\n record_key[13..].copy_from_slice(key);\n\n BlockStateRecordKey(record_key)\n\n }\n\n\n\n fn state_key(&self) -> &[u8] {\n\n &self.0[13..]\n\n }\n\n\n\n fn from_vec(record_key: Vec<u8>) -> Self {\n\n BlockStateRecordKey(record_key)\n\n }\n", "file_path": "crates/store/src/transaction.rs", "rank": 96, "score": 135534.94873183218 }, { "content": "pub trait Pack<T: Entity> {\n\n fn pack(&self) -> T;\n\n}\n\n\n", "file_path": "crates/types/src/prelude.rs", "rank": 97, "score": 134936.04271439626 }, { "content": "#[derive(Clone, Serialize, Deserialize, PartialEq, Eq, Debug)]\n\nstruct ScriptsInfo {\n\n #[serde(default)]\n\n source: PathBuf,\n\n\n\n #[serde(default)]\n\n always_success: bool,\n\n}\n\n\n\nimpl ScriptsInfo {\n\n fn source_script_path(&self, repos_dir: &Path) -> PathBuf {\n\n utils::make_path(repos_dir, vec![self.source.as_path()])\n\n }\n\n\n\n fn target_script_path(&self, target_root_dir: &Path) -> PathBuf {\n\n let script_name = self.source.file_name().expect(\"get script name\");\n\n let repo_name = self\n\n .source\n\n .components()\n\n .next()\n\n .expect(\"get repo name\")\n\n .as_os_str();\n\n utils::make_path(target_root_dir, vec![repo_name, script_name])\n\n }\n\n}\n\n\n", "file_path": "crates/tools/src/prepare_scripts.rs", "rank": 98, "score": 133932.38444349356 }, { "content": "pub fn store_data<Mac: SupportMachine>(machine: &mut Mac, data: &[u8]) -> Result<u64, VMError> {\n\n let addr = machine.registers()[A0].to_u64();\n\n let size_addr = machine.registers()[A1].clone();\n\n let data_len = data.len() as u64;\n\n let offset = cmp::min(data_len, machine.registers()[A2].to_u64());\n\n\n\n let size = machine.memory_mut().load64(&size_addr)?.to_u64();\n\n let full_size = data_len - offset;\n\n let real_size = cmp::min(size, full_size);\n\n machine\n\n .memory_mut()\n\n .store64(&size_addr, &Mac::REG::from_u64(full_size))?;\n\n machine\n\n .memory_mut()\n\n .store_bytes(addr, &data[offset as usize..(offset + real_size) as usize])?;\n\n Ok(real_size)\n\n}\n\n\n\nimpl<'a, S: State, C: ChainStore, Mac: SupportMachine> Syscalls<Mac> for L2Syscalls<'a, S, C> {\n\n fn initialize(&mut self, _machine: &mut Mac) -> Result<(), VMError> {\n", "file_path": "crates/generator/src/syscalls/mod.rs", "rank": 99, "score": 133495.93938369898 } ]
Rust
src/rule_finder.rs
ciphergoth/rerast
2abd359242dee27ab09919259154274cca612bb5
use std::marker; use syntax::ast::NodeId; use syntax::symbol::Symbol; use syntax::ext::quote::rt::Span; use std::vec::Vec; use rustc::hir::{self, intravisit}; use rustc::ty::{self, TyCtxt}; use definitions::RerastDefinitions; use rule_matcher::{Matchable, OperatorPrecedence}; use rules::{Rule, Rules}; use errors::ErrorWithSpan; use super::DeclaredNamesFinder; pub(crate) struct RuleFinder<'a, 'gcx: 'a> { tcx: TyCtxt<'a, 'gcx, 'gcx>, rerast_definitions: RerastDefinitions<'gcx>, rules_mod_symbol: Symbol, rules: Rules<'gcx>, body_id: Option<hir::BodyId>, in_rules_module: bool, errors: Vec<ErrorWithSpan>, } impl<'a, 'gcx> RuleFinder<'a, 'gcx> { pub(crate) fn find_rules( tcx: TyCtxt<'a, 'gcx, 'gcx>, rerast_definitions: RerastDefinitions<'gcx>, krate: &'gcx hir::Crate, ) -> Result<Rules<'gcx>, Vec<ErrorWithSpan>> { let mut rule_finder = RuleFinder { tcx, rerast_definitions, rules_mod_symbol: Symbol::intern(super::RULES_MOD_NAME), rules: Rules::new(), body_id: None, in_rules_module: false, errors: Vec::new(), }; intravisit::walk_crate(&mut rule_finder, krate); if rule_finder.errors.is_empty() { Ok(rule_finder.rules) } else { Err(rule_finder.errors) } } fn maybe_add_rule( &mut self, arg_ty: ty::Ty<'gcx>, arms: &'gcx [hir::Arm], body_id: hir::BodyId, arg_ty_span: Span, ) -> Result<(), Vec<ErrorWithSpan>> { if self.maybe_add_typed_rule::<hir::Expr>(arg_ty, arms, body_id)? || self.maybe_add_typed_rule::<hir::Pat>(arg_ty, arms, body_id)? || self.maybe_add_typed_rule::<hir::TraitRef>(arg_ty, arms, body_id)? || self.maybe_add_typed_rule::<hir::Ty>(arg_ty, arms, body_id)? { Ok(()) } else { Err(vec![ ErrorWithSpan::new("Unexpected code found in rule function", arg_ty_span), ]) } } fn maybe_add_typed_rule<T: 'gcx + StartMatch>( &mut self, arg_ty: ty::Ty<'gcx>, arms: &'gcx [hir::Arm], body_id: hir::BodyId, ) -> Result<bool, Vec<ErrorWithSpan>> { fn get_arm(arms: &[hir::Arm], arm_name: Symbol) -> Option<&hir::Block> { for arm in arms { if let hir::PatKind::Path(hir::QPath::Resolved(None, ref path)) = arm.pats[0].node { if let Some(segment) = path.segments.last() { if segment.name == arm_name { if let hir::Expr_::ExprBlock(ref block) = arm.body.node { return Some(block); } } } } } None } if arg_ty != T::replace_marker_type(&self.rerast_definitions) { return Ok(false); } if let (Some(search_block), Some(replace_block)) = ( get_arm(arms, self.rerast_definitions.search_symbol), get_arm(arms, self.rerast_definitions.replace_symbol), ) { let search = T::extract_root(search_block)?; let replace = T::extract_root(replace_block)?; let placeholder_ids = self.tcx .hir .body(body_id) .arguments .iter() .map(|arg| arg.pat.id) .collect(); let rule = Rule { search, replace, body_id, declared_name_node_ids: DeclaredNamesFinder::find(self.tcx, search), placeholder_ids, }; rule.validate(self.tcx)?; T::add_rule(rule, &mut self.rules); } else { panic!("Missing search/replace pattern"); } Ok(true) } } impl<'a, 'gcx, 'tcx> intravisit::Visitor<'gcx> for RuleFinder<'a, 'gcx> { fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'gcx> { intravisit::NestedVisitorMap::All(&self.tcx.hir) } fn visit_item(&mut self, item: &'gcx hir::Item) { use hir::Item_::*; if let ItemMod(_) = item.node { if item.name == self.rules_mod_symbol { self.in_rules_module = true; intravisit::walk_item(self, item); self.in_rules_module = false; return; } else if !self.in_rules_module { return; } } intravisit::walk_item(self, item); } fn visit_expr(&mut self, expr: &'gcx hir::Expr) { if !self.in_rules_module { return; } use hir::Expr_::*; if let ExprMatch(ref match_expr, ref arms, _) = expr.node { if let ExprMethodCall(ref _name, ref _tys, ref args) = match_expr.node { if let Some(body_id) = self.body_id { let type_tables = self.tcx .typeck_tables_of(self.tcx.hir.body_owner_def_id(body_id)); let arg0 = &args[0]; let arg_ty = type_tables.node_id_to_type(self.tcx.hir.node_to_hir_id(arg0.id)); if let Err(errors) = self.maybe_add_rule(arg_ty, arms, body_id, arg0.span) { self.errors.extend(errors); } return; } } } intravisit::walk_expr(self, expr) } fn visit_body(&mut self, body: &'gcx hir::Body) { if !self.in_rules_module { return; } let old_body_id = self.body_id; self.body_id = Some(body.id()); intravisit::walk_body(self, body); self.body_id = old_body_id; } } pub(crate) trait StartMatch: Matchable { fn span(&self) -> Span; fn walk<'gcx, V: intravisit::Visitor<'gcx>>(visitor: &mut V, node: &'gcx Self); fn needs_parenthesis(_parent: Option<&Self>, _child: &Self) -> bool { false } fn extract_root(block: &hir::Block) -> Result<&Self, ErrorWithSpan>; fn add_rule<'gcx>(rule: Rule<'gcx, Self>, rules: &mut Rules<'gcx>) where Self: marker::Sized; fn replace_marker_type<'gcx>(rerast_definitions: &RerastDefinitions<'gcx>) -> ty::Ty<'gcx>; fn bindings_can_match_patterns() -> bool { false } fn node_id(&self) -> NodeId; } impl StartMatch for hir::Expr { fn span(&self) -> Span { self.span } fn walk<'gcx, V: intravisit::Visitor<'gcx>>(visitor: &mut V, node: &'gcx Self) { intravisit::walk_expr(visitor, node); } fn needs_parenthesis(parent: Option<&Self>, child: &Self) -> bool { OperatorPrecedence::needs_parenthesis(parent, child) } fn extract_root(block: &hir::Block) -> Result<&Self, ErrorWithSpan> { if block.stmts.len() == 1 && block.expr.is_none() { if let hir::Stmt_::StmtSemi(ref addr_expr, _) = block.stmts[0].node { if let hir::Expr_::ExprAddrOf(_, ref expr) = addr_expr.node { return Ok(&**expr); } } } Err(ErrorWithSpan::new( "replace! macro didn't produce expected structure", block.span, )) } fn add_rule<'gcx>(rule: Rule<'gcx, Self>, rules: &mut Rules<'gcx>) { rules.expr_rules.push(rule); } fn replace_marker_type<'gcx>(rerast_definitions: &RerastDefinitions<'gcx>) -> ty::Ty<'gcx> { rerast_definitions.expr_rule_marker } fn node_id(&self) -> NodeId { self.id } } impl StartMatch for hir::Ty { fn span(&self) -> Span { self.span } fn walk<'gcx, V: intravisit::Visitor<'gcx>>(visitor: &mut V, node: &'gcx Self) { intravisit::walk_ty(visitor, node); } fn extract_root(block: &hir::Block) -> Result<&Self, ErrorWithSpan> { if block.stmts.len() == 1 && block.expr.is_none() { if let hir::Stmt_::StmtDecl(ref decl, _) = block.stmts[0].node { if let hir::Decl_::DeclLocal(ref local) = decl.node { if let Some(ref ref_ty) = local.ty { if let hir::Ty_::TyRptr(_, ref mut_ty) = ref_ty.node { return Ok(&*mut_ty.ty); } } } } } Err(ErrorWithSpan::new( "replace_type! macro didn't produce expected structure", block.span, )) } fn add_rule<'gcx>(rule: Rule<'gcx, Self>, rules: &mut Rules<'gcx>) { rules.type_rules.push(rule); } fn replace_marker_type<'gcx>(rerast_definitions: &RerastDefinitions<'gcx>) -> ty::Ty<'gcx> { rerast_definitions.type_rule_marker } fn node_id(&self) -> NodeId { self.id } } impl StartMatch for hir::TraitRef { fn span(&self) -> Span { self.path.span } fn walk<'gcx, V: intravisit::Visitor<'gcx>>(visitor: &mut V, node: &'gcx Self) { intravisit::walk_trait_ref(visitor, node); } fn extract_root(block: &hir::Block) -> Result<&Self, ErrorWithSpan> { let ty = <hir::Ty as StartMatch>::extract_root(block)?; if let hir::Ty_::TyTraitObject(ref bounds, _) = ty.node { if bounds.len() == 1 { return Ok(&bounds[0].trait_ref); } else { return Err(ErrorWithSpan::new( "replace_trait_ref! requires exactly one trait", ty.span, )); } } else { return Err(ErrorWithSpan::new( "replace_trait_ref! requires a trait", ty.span, )); } } fn add_rule<'gcx>(rule: Rule<'gcx, Self>, rules: &mut Rules<'gcx>) { rules.trait_ref_rules.push(rule); } fn replace_marker_type<'gcx>(rerast_definitions: &RerastDefinitions<'gcx>) -> ty::Ty<'gcx> { rerast_definitions.trait_ref_rule_marker } fn node_id(&self) -> NodeId { self.ref_id } } impl StartMatch for hir::Pat { fn span(&self) -> Span { self.span } fn walk<'gcx, V: intravisit::Visitor<'gcx>>(visitor: &mut V, node: &'gcx Self) { intravisit::walk_pat(visitor, node); } fn extract_root(block: &hir::Block) -> Result<&Self, ErrorWithSpan> { if block.stmts.len() == 1 && block.expr.is_none() { if let hir::Stmt_::StmtSemi(ref expr, _) = block.stmts[0].node { if let hir::Expr_::ExprMatch(_, ref arms, _) = expr.node { if let hir::PatKind::TupleStruct(_, ref patterns, _) = arms[0].pats[0].node { return Ok(&patterns[0]); } } } } Err(ErrorWithSpan::new( "replace_pattern! macro didn't produce expected structure", block.span, )) } fn add_rule<'gcx>(rule: Rule<'gcx, Self>, rules: &mut Rules<'gcx>) { rules.pattern_rules.push(rule); } fn replace_marker_type<'gcx>(rerast_definitions: &RerastDefinitions<'gcx>) -> ty::Ty<'gcx> { rerast_definitions.pattern_rule_marker } fn bindings_can_match_patterns() -> bool { true } fn node_id(&self) -> NodeId { self.id } }
use std::marker; use syntax::ast::NodeId; use syntax::symbol::Symbol; use syntax::ext::quote::rt::Span; use std::vec::Vec; use rustc::hir::{self, intravisit}; use rustc::ty::{self, TyCtxt}; use definitions::RerastDefinitions; use rule_matcher::{Matchable, OperatorPrecedence}; use rules::{Rule, Rules}; use errors::ErrorWithSpan; use super::DeclaredNamesFinder; pub(crate) struct RuleFinder<'a, 'gcx: 'a> { tcx: TyCtxt<'a, 'gcx, 'gcx>, rerast_definitions: RerastDefinitions<'gcx>, rules_mod_symbol: Symbol, rules: Rules<'gcx>, body_id: Option<hir::BodyId>, in_rules_module: bool, errors: Vec<ErrorWithSpan>, } impl<'a, 'gcx> RuleFinder<'a, 'gcx> { pub(crate) fn find_rules( tcx: TyCtxt<'a, 'gcx, 'gcx>, rerast_definitions: RerastDefinitions<'gcx>, krate: &'gcx hir::Crate, ) -> Result<Rules<'gcx>, Vec<ErrorWithSpan>> {
intravisit::walk_crate(&mut rule_finder, krate); if rule_finder.errors.is_empty() { Ok(rule_finder.rules) } else { Err(rule_finder.errors) } } fn maybe_add_rule( &mut self, arg_ty: ty::Ty<'gcx>, arms: &'gcx [hir::Arm], body_id: hir::BodyId, arg_ty_span: Span, ) -> Result<(), Vec<ErrorWithSpan>> { if self.maybe_add_typed_rule::<hir::Expr>(arg_ty, arms, body_id)? || self.maybe_add_typed_rule::<hir::Pat>(arg_ty, arms, body_id)? || self.maybe_add_typed_rule::<hir::TraitRef>(arg_ty, arms, body_id)? || self.maybe_add_typed_rule::<hir::Ty>(arg_ty, arms, body_id)? { Ok(()) } else { Err(vec![ ErrorWithSpan::new("Unexpected code found in rule function", arg_ty_span), ]) } } fn maybe_add_typed_rule<T: 'gcx + StartMatch>( &mut self, arg_ty: ty::Ty<'gcx>, arms: &'gcx [hir::Arm], body_id: hir::BodyId, ) -> Result<bool, Vec<ErrorWithSpan>> { fn get_arm(arms: &[hir::Arm], arm_name: Symbol) -> Option<&hir::Block> { for arm in arms { if let hir::PatKind::Path(hir::QPath::Resolved(None, ref path)) = arm.pats[0].node { if let Some(segment) = path.segments.last() { if segment.name == arm_name { if let hir::Expr_::ExprBlock(ref block) = arm.body.node { return Some(block); } } } } } None } if arg_ty != T::replace_marker_type(&self.rerast_definitions) { return Ok(false); } if let (Some(search_block), Some(replace_block)) = ( get_arm(arms, self.rerast_definitions.search_symbol), get_arm(arms, self.rerast_definitions.replace_symbol), ) { let search = T::extract_root(search_block)?; let replace = T::extract_root(replace_block)?; let placeholder_ids = self.tcx .hir .body(body_id) .arguments .iter() .map(|arg| arg.pat.id) .collect(); let rule = Rule { search, replace, body_id, declared_name_node_ids: DeclaredNamesFinder::find(self.tcx, search), placeholder_ids, }; rule.validate(self.tcx)?; T::add_rule(rule, &mut self.rules); } else { panic!("Missing search/replace pattern"); } Ok(true) } } impl<'a, 'gcx, 'tcx> intravisit::Visitor<'gcx> for RuleFinder<'a, 'gcx> { fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'gcx> { intravisit::NestedVisitorMap::All(&self.tcx.hir) } fn visit_item(&mut self, item: &'gcx hir::Item) { use hir::Item_::*; if let ItemMod(_) = item.node { if item.name == self.rules_mod_symbol { self.in_rules_module = true; intravisit::walk_item(self, item); self.in_rules_module = false; return; } else if !self.in_rules_module { return; } } intravisit::walk_item(self, item); } fn visit_expr(&mut self, expr: &'gcx hir::Expr) { if !self.in_rules_module { return; } use hir::Expr_::*; if let ExprMatch(ref match_expr, ref arms, _) = expr.node { if let ExprMethodCall(ref _name, ref _tys, ref args) = match_expr.node { if let Some(body_id) = self.body_id { let type_tables = self.tcx .typeck_tables_of(self.tcx.hir.body_owner_def_id(body_id)); let arg0 = &args[0]; let arg_ty = type_tables.node_id_to_type(self.tcx.hir.node_to_hir_id(arg0.id)); if let Err(errors) = self.maybe_add_rule(arg_ty, arms, body_id, arg0.span) { self.errors.extend(errors); } return; } } } intravisit::walk_expr(self, expr) } fn visit_body(&mut self, body: &'gcx hir::Body) { if !self.in_rules_module { return; } let old_body_id = self.body_id; self.body_id = Some(body.id()); intravisit::walk_body(self, body); self.body_id = old_body_id; } } pub(crate) trait StartMatch: Matchable { fn span(&self) -> Span; fn walk<'gcx, V: intravisit::Visitor<'gcx>>(visitor: &mut V, node: &'gcx Self); fn needs_parenthesis(_parent: Option<&Self>, _child: &Self) -> bool { false } fn extract_root(block: &hir::Block) -> Result<&Self, ErrorWithSpan>; fn add_rule<'gcx>(rule: Rule<'gcx, Self>, rules: &mut Rules<'gcx>) where Self: marker::Sized; fn replace_marker_type<'gcx>(rerast_definitions: &RerastDefinitions<'gcx>) -> ty::Ty<'gcx>; fn bindings_can_match_patterns() -> bool { false } fn node_id(&self) -> NodeId; } impl StartMatch for hir::Expr { fn span(&self) -> Span { self.span } fn walk<'gcx, V: intravisit::Visitor<'gcx>>(visitor: &mut V, node: &'gcx Self) { intravisit::walk_expr(visitor, node); } fn needs_parenthesis(parent: Option<&Self>, child: &Self) -> bool { OperatorPrecedence::needs_parenthesis(parent, child) } fn extract_root(block: &hir::Block) -> Result<&Self, ErrorWithSpan> { if block.stmts.len() == 1 && block.expr.is_none() { if let hir::Stmt_::StmtSemi(ref addr_expr, _) = block.stmts[0].node { if let hir::Expr_::ExprAddrOf(_, ref expr) = addr_expr.node { return Ok(&**expr); } } } Err(ErrorWithSpan::new( "replace! macro didn't produce expected structure", block.span, )) } fn add_rule<'gcx>(rule: Rule<'gcx, Self>, rules: &mut Rules<'gcx>) { rules.expr_rules.push(rule); } fn replace_marker_type<'gcx>(rerast_definitions: &RerastDefinitions<'gcx>) -> ty::Ty<'gcx> { rerast_definitions.expr_rule_marker } fn node_id(&self) -> NodeId { self.id } } impl StartMatch for hir::Ty { fn span(&self) -> Span { self.span } fn walk<'gcx, V: intravisit::Visitor<'gcx>>(visitor: &mut V, node: &'gcx Self) { intravisit::walk_ty(visitor, node); } fn extract_root(block: &hir::Block) -> Result<&Self, ErrorWithSpan> { if block.stmts.len() == 1 && block.expr.is_none() { if let hir::Stmt_::StmtDecl(ref decl, _) = block.stmts[0].node { if let hir::Decl_::DeclLocal(ref local) = decl.node { if let Some(ref ref_ty) = local.ty { if let hir::Ty_::TyRptr(_, ref mut_ty) = ref_ty.node { return Ok(&*mut_ty.ty); } } } } } Err(ErrorWithSpan::new( "replace_type! macro didn't produce expected structure", block.span, )) } fn add_rule<'gcx>(rule: Rule<'gcx, Self>, rules: &mut Rules<'gcx>) { rules.type_rules.push(rule); } fn replace_marker_type<'gcx>(rerast_definitions: &RerastDefinitions<'gcx>) -> ty::Ty<'gcx> { rerast_definitions.type_rule_marker } fn node_id(&self) -> NodeId { self.id } } impl StartMatch for hir::TraitRef { fn span(&self) -> Span { self.path.span } fn walk<'gcx, V: intravisit::Visitor<'gcx>>(visitor: &mut V, node: &'gcx Self) { intravisit::walk_trait_ref(visitor, node); } fn extract_root(block: &hir::Block) -> Result<&Self, ErrorWithSpan> { let ty = <hir::Ty as StartMatch>::extract_root(block)?; if let hir::Ty_::TyTraitObject(ref bounds, _) = ty.node { if bounds.len() == 1 { return Ok(&bounds[0].trait_ref); } else { return Err(ErrorWithSpan::new( "replace_trait_ref! requires exactly one trait", ty.span, )); } } else { return Err(ErrorWithSpan::new( "replace_trait_ref! requires a trait", ty.span, )); } } fn add_rule<'gcx>(rule: Rule<'gcx, Self>, rules: &mut Rules<'gcx>) { rules.trait_ref_rules.push(rule); } fn replace_marker_type<'gcx>(rerast_definitions: &RerastDefinitions<'gcx>) -> ty::Ty<'gcx> { rerast_definitions.trait_ref_rule_marker } fn node_id(&self) -> NodeId { self.ref_id } } impl StartMatch for hir::Pat { fn span(&self) -> Span { self.span } fn walk<'gcx, V: intravisit::Visitor<'gcx>>(visitor: &mut V, node: &'gcx Self) { intravisit::walk_pat(visitor, node); } fn extract_root(block: &hir::Block) -> Result<&Self, ErrorWithSpan> { if block.stmts.len() == 1 && block.expr.is_none() { if let hir::Stmt_::StmtSemi(ref expr, _) = block.stmts[0].node { if let hir::Expr_::ExprMatch(_, ref arms, _) = expr.node { if let hir::PatKind::TupleStruct(_, ref patterns, _) = arms[0].pats[0].node { return Ok(&patterns[0]); } } } } Err(ErrorWithSpan::new( "replace_pattern! macro didn't produce expected structure", block.span, )) } fn add_rule<'gcx>(rule: Rule<'gcx, Self>, rules: &mut Rules<'gcx>) { rules.pattern_rules.push(rule); } fn replace_marker_type<'gcx>(rerast_definitions: &RerastDefinitions<'gcx>) -> ty::Ty<'gcx> { rerast_definitions.pattern_rule_marker } fn bindings_can_match_patterns() -> bool { true } fn node_id(&self) -> NodeId { self.id } }
let mut rule_finder = RuleFinder { tcx, rerast_definitions, rules_mod_symbol: Symbol::intern(super::RULES_MOD_NAME), rules: Rules::new(), body_id: None, in_rules_module: false, errors: Vec::new(), };
assignment_statement
[ { "content": "struct Placeholder<'gcx> {\n\n expr: &'gcx hir::Expr,\n\n uses: Vec<Span>,\n\n}\n\n\n", "file_path": "src/change_to_rule.rs", "rank": 0, "score": 126178.62619493643 }, { "content": "struct RuleFinder<'a, 'gcx: 'a> {\n\n tcx: TyCtxt<'a, 'gcx, 'gcx>,\n\n changed_span: Span,\n\n candidate: Node<'gcx>,\n\n body_id: Option<hir::BodyId>,\n\n current_item: Option<&'gcx hir::Item>,\n\n}\n\n\n\nimpl<'a, 'gcx: 'a> intravisit::Visitor<'gcx> for RuleFinder<'a, 'gcx> {\n\n fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'gcx> {\n\n intravisit::NestedVisitorMap::All(&self.tcx.hir)\n\n }\n\n\n\n fn visit_item(&mut self, item: &'gcx hir::Item) {\n\n // TODO: Avoid visiting items that we know don't contain the changed code. Just need to make\n\n // sure we still visit mod items where the module code is in another file.\n\n let old_item = self.current_item;\n\n self.current_item = Some(item);\n\n intravisit::walk_item(self, item);\n\n self.current_item = old_item;\n", "file_path": "src/change_to_rule.rs", "rank": 1, "score": 124998.14030160866 }, { "content": "fn build_rule<'a, 'gcx: 'a>(\n\n tcx: TyCtxt<'a, 'gcx, 'gcx>,\n\n placeholders: &[Placeholder<'gcx>],\n\n expr: &'gcx hir::Expr,\n\n body_id: hir::BodyId,\n\n item: &'gcx hir::Item,\n\n right_paths: &HashSet<String>,\n\n replacement_span: Span,\n\n) -> String {\n\n let codemap = tcx.sess.codemap();\n\n let type_tables = tcx.body_tables(body_id);\n\n let mut uses_type_params = false;\n\n for ph in placeholders {\n\n let ph_ty = type_tables.expr_ty(ph.expr);\n\n for subtype in ph_ty.walk() {\n\n if let TypeVariants::TyParam(..) = subtype.sty {\n\n uses_type_params = true;\n\n }\n\n }\n\n }\n", "file_path": "src/change_to_rule.rs", "rank": 2, "score": 123315.33719108412 }, { "content": "fn after_analysis<'a, 'gcx>(\n\n state: &mut rustc_driver::driver::CompileState<'a, 'gcx>,\n\n find_rules_state: &mut FindRulesState,\n\n) {\n\n state.session.abort_if_errors();\n\n let tcx = state.tcx.unwrap();\n\n let codemap = tcx.sess.codemap();\n\n let maybe_filemap = codemap.get_filemap(&syntax_pos::FileName::Real(PathBuf::from(\n\n &find_rules_state.modified_file_name,\n\n )));\n\n let filemap = if let Some(f) = maybe_filemap {\n\n f\n\n } else {\n\n return;\n\n };\n\n let span = find_rules_state.changed_span.to_span(&filemap);\n\n let mut rule_finder = RuleFinder {\n\n tcx,\n\n changed_span: span,\n\n candidate: Node::NotFound,\n", "file_path": "src/change_to_rule.rs", "rank": 3, "score": 121257.86087652747 }, { "content": "#[derive(Debug)]\n\nstruct Placeholder<'r, 'gcx: 'r> {\n\n contents: PlaceholderContents<'gcx>,\n\n // Matches found within contents. Populated if and only if the rule that owns this placeholder\n\n // succeeds.\n\n matches: Matches<'r, 'gcx>,\n\n}\n\n\n\nimpl<'r, 'gcx: 'r> Placeholder<'r, 'gcx> {\n\n fn new(contents: PlaceholderContents<'gcx>) -> Placeholder<'r, 'gcx> {\n\n Placeholder {\n\n contents,\n\n matches: Matches::new(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(PartialEq, Eq, PartialOrd, Ord, Debug)]\n\npub(crate) enum OperatorPrecedence {\n\n Unary, // All unary operators\n\n MulDivMod, // * / %\n", "file_path": "src/rule_matcher.rs", "rank": 4, "score": 120355.45384864828 }, { "content": "fn find_and_apply_rules<'a, 'gcx>(\n\n state: &mut driver::CompileState<'a, 'gcx>,\n\n config: Config,\n\n) -> Result<RerastOutput, RerastErrors> {\n\n let tcx = state.tcx.unwrap();\n\n let krate = tcx.hir.krate();\n\n let rerast_definitions = RerastDefinitionsFinder::find_definitions(tcx, krate);\n\n let rules =\n\n rule_finder::RuleFinder::find_rules(tcx, rerast_definitions, krate).map_err(|errors| {\n\n RerastErrors::new(\n\n errors\n\n .into_iter()\n\n .map(|error| error.with_snippet(tcx))\n\n .collect(),\n\n )\n\n })?;\n\n if config.verbose {\n\n println!(\"Found {} rule(s)\", rules.len());\n\n }\n\n let replacer = Replacer::new(tcx, rerast_definitions, rules, config);\n\n Ok(RerastOutput {\n\n updated_files: replacer.apply_to_crate(krate),\n\n })\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 5, "score": 117383.75858561276 }, { "content": "#[derive(Debug)]\n\nstruct Match<'r, 'gcx: 'r, T: StartMatch + 'gcx> {\n\n rule: &'r Rule<'gcx, T>,\n\n node: &'gcx T,\n\n // Parent of the patched expression if the parent is also an expression. Used to determine if we\n\n // need to insert parenthesis.\n\n // TODO: For nested matches, this might not be quite what we want. We want to know what the\n\n // parent of the replacement will be. For a top-level match, the parent will always be the\n\n // parent of the matched code, but for a match within a placeholder, if the the top-level of the\n\n // placeholder matches, then the new parent will be from the replacement expression in the\n\n // parent rule.\n\n parent_node: Option<&'gcx T>,\n\n match_placeholders: MatchPlaceholders<'r, 'gcx>,\n\n original_span: Span,\n\n}\n\n\n\nimpl<'r, 'a, 'gcx, 'tcx, T: StartMatch> Match<'r, 'gcx, T> {\n\n fn get_replacement_source(&self, tcx: TyCtxt<'a, 'gcx, 'gcx>) -> String {\n\n let replacement = self.rule.replace;\n\n let mut replacement_visitor = ReplacementVisitor {\n\n tcx,\n", "file_path": "src/rule_matcher.rs", "rank": 6, "score": 116515.67335266538 }, { "content": "// Finds referenced item paths and builds use statements that import those paths.\n\nstruct ReferencedPathsFinder<'a, 'gcx: 'a> {\n\n tcx: TyCtxt<'a, 'gcx, 'gcx>,\n\n result: HashSet<String>,\n\n}\n\n\n\nimpl<'a, 'gcx: 'a> ReferencedPathsFinder<'a, 'gcx> {\n\n fn paths_in_expr(tcx: TyCtxt<'a, 'gcx, 'gcx>, expr: &'gcx hir::Expr) -> HashSet<String> {\n\n let mut finder = ReferencedPathsFinder {\n\n tcx,\n\n result: HashSet::new(),\n\n };\n\n intravisit::walk_expr(&mut finder, expr);\n\n finder.result\n\n }\n\n}\n\n\n\nimpl<'a, 'gcx: 'a> intravisit::Visitor<'gcx> for ReferencedPathsFinder<'a, 'gcx> {\n\n fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'gcx> {\n\n intravisit::NestedVisitorMap::All(&self.tcx.hir)\n\n }\n", "file_path": "src/change_to_rule.rs", "rank": 7, "score": 112942.20328687233 }, { "content": "fn analyse_original_source<'a, 'gcx: 'a>(\n\n tcx: TyCtxt<'a, 'gcx, 'gcx>,\n\n changed_side_state: &ChangedSideState,\n\n expr: &'gcx hir::Expr,\n\n changed_span: &ChangedSpan,\n\n modified_source: String,\n\n body_id: hir::BodyId,\n\n item: &'gcx hir::Item,\n\n) -> String {\n\n let codemap = tcx.sess.codemap();\n\n let mut others_by_hash = HashMap::new();\n\n populate_placeholder_map(\n\n &changed_side_state.candidate_placeholders,\n\n &mut others_by_hash,\n\n );\n\n let mut candidates =\n\n PlaceholderCandidateFinder::find_placeholder_candidates(tcx, expr, |child_expr| child_expr);\n\n let other_filemap = codemap.new_filemap(\n\n syntax_pos::FileName::Custom(\"__other_source\".to_owned()),\n\n modified_source,\n", "file_path": "src/change_to_rule.rs", "rank": 8, "score": 111263.42056983672 }, { "content": "// Visits the replacement AST looking for variables that need to be replaced with their bound values\n\n// from the matched source then recording the spans for said replacement.\n\nstruct ReplacementVisitor<'r, 'a: 'r, 'gcx: 'a, T: StartMatch + 'gcx> {\n\n tcx: TyCtxt<'a, 'gcx, 'gcx>,\n\n result: Vec<CodeSubstitution>,\n\n current_match: &'r Match<'r, 'gcx, T>,\n\n parent_expr: Option<&'gcx hir::Expr>,\n\n // Map from NodeIds of variables declared in the replacement pattern to NodeIds declared in the\n\n // code that should replace them.\n\n substitute_node_ids: HashMap<NodeId, NodeId>,\n\n}\n\n\n\nimpl<'r, 'a, 'gcx, T: StartMatch> ReplacementVisitor<'r, 'a, 'gcx, T> {\n\n // Returns a snippet of code for the supplied definition.\n\n fn node_id_snippet(&self, node_id: NodeId) -> String {\n\n let codemap = self.tcx.sess.codemap();\n\n codemap.span_to_snippet(self.tcx.hir.span(node_id)).unwrap()\n\n }\n\n\n\n // Check if the supplied expression is a placeholder variable. If it is, replace the supplied\n\n // span with whatever was bound to the placeholder and return true.\n\n fn process_expr(&mut self, expr: &'gcx hir::Expr, placeholder_span: Span) -> bool {\n", "file_path": "src/rule_matcher.rs", "rank": 9, "score": 110616.15416410656 }, { "content": "struct PlaceholderMatcher<'a, 'gcx: 'a, 'placeholders> {\n\n tcx: TyCtxt<'a, 'gcx, 'gcx>,\n\n other_filemap: Rc<FileMap>,\n\n other_candidates: HashMap<u64, Vec<&'placeholders PlaceholderCandidate<RelativeSpan>>>,\n\n placeholders_found: Vec<Placeholder<'gcx>>,\n\n used_placeholder_spans: Vec<Span>,\n\n}\n\n\n\nimpl<'a, 'gcx: 'a, 'placeholders> PlaceholderMatcher<'a, 'gcx, 'placeholders> {\n\n fn find_placeholders(&mut self, candidates: &mut [PlaceholderCandidate<&'gcx hir::Expr>]) {\n\n // Sort candidates with longest first so that they take precedence.\n\n candidates.sort_by_key(|p| p.data.span.lo() - p.data.span.hi());\n\n for candidate in candidates {\n\n let mut got_match = false;\n\n if let Some(matching_others) = self.other_candidates.get(&candidate.hash) {\n\n let codemap = self.tcx.sess.codemap();\n\n let source = codemap.span_to_snippet(candidate.data.span).unwrap();\n\n let mut placeholder = Placeholder {\n\n expr: candidate.data,\n\n uses: Vec::new(),\n", "file_path": "src/change_to_rule.rs", "rank": 10, "score": 110575.9580600526 }, { "content": "// Returns whether following the expansions of `rule_span` and `code_span` results in the same\n\n// sequence of expansions.\n\nfn all_expansions_equal(rule_span: Span, code_span: Span) -> bool {\n\n get_original_spans(rule_span, code_span).is_some()\n\n}\n\n\n", "file_path": "src/rule_matcher.rs", "rank": 11, "score": 103771.3090692537 }, { "content": "struct PlaceholderCandidateFinder<'a, 'gcx: 'a, T, F>\n\nwhere\n\n F: Fn(&'gcx hir::Expr) -> T,\n\n{\n\n tcx: TyCtxt<'a, 'gcx, 'gcx>,\n\n stack: Vec<PlaceholderCandidate<T>>,\n\n data_fn: F,\n\n}\n\n\n\nimpl<'a, 'gcx: 'a, T, F> PlaceholderCandidateFinder<'a, 'gcx, T, F>\n\nwhere\n\n F: Fn(&'gcx hir::Expr) -> T,\n\n{\n\n fn find_placeholder_candidates(\n\n tcx: TyCtxt<'a, 'gcx, 'gcx>,\n\n node: &'gcx hir::Expr,\n\n data_fn: F,\n\n ) -> Vec<PlaceholderCandidate<T>> {\n\n let mut state = PlaceholderCandidateFinder {\n\n tcx,\n", "file_path": "src/change_to_rule.rs", "rank": 12, "score": 102295.94482413976 }, { "content": "fn after_analysis<'a, 'gcx>(\n\n state: &mut driver::CompileState<'a, 'gcx>,\n\n output: &Rc<RefCell<Result<RerastOutput, RerastErrors>>>,\n\n config: &Config,\n\n) {\n\n state.session.abort_if_errors();\n\n *output.borrow_mut() = find_and_apply_rules(state, config.clone());\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 13, "score": 98720.99650987887 }, { "content": "struct Replacer<'a, 'gcx: 'a> {\n\n tcx: TyCtxt<'a, 'gcx, 'gcx>,\n\n rerast_definitions: RerastDefinitions<'gcx>,\n\n rules: Rules<'gcx>,\n\n config: Config,\n\n}\n\n\n\nimpl<'a, 'gcx> Replacer<'a, 'gcx> {\n\n fn new(\n\n tcx: TyCtxt<'a, 'gcx, 'gcx>,\n\n rerast_definitions: RerastDefinitions<'gcx>,\n\n rules: Rules<'gcx>,\n\n config: Config,\n\n ) -> Replacer<'a, 'gcx> {\n\n Replacer {\n\n tcx,\n\n rerast_definitions,\n\n rules,\n\n config,\n\n }\n", "file_path": "src/lib.rs", "rank": 14, "score": 97827.6915772519 }, { "content": "struct ReplacementValidator<'a, 'gcx: 'a> {\n\n state: ValidatorState<'a, 'gcx>,\n\n}\n\n\n\nimpl<'a, 'gcx: 'a> intravisit::Visitor<'gcx> for ReplacementValidator<'a, 'gcx> {\n\n fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'gcx> {\n\n intravisit::NestedVisitorMap::All(&self.state.tcx.hir)\n\n }\n\n\n\n fn visit_qpath(&mut self, qpath: &'gcx hir::QPath, id: NodeId, span: Span) {\n\n if let Some(node_id) = node_id_from_path(qpath) {\n\n if self.state.placeholders.contains(&node_id)\n\n && !self.state.bound_placeholders.contains(&node_id)\n\n {\n\n self.state.add_error(\n\n \"Placeholder used in replacement pattern, but never bound.\",\n\n span,\n\n );\n\n }\n\n }\n\n intravisit::walk_qpath(self, qpath, id, span);\n\n }\n\n}\n", "file_path": "src/validation.rs", "rank": 15, "score": 94863.16463145842 }, { "content": "struct SearchValidator<'a, 'gcx: 'a> {\n\n state: ValidatorState<'a, 'gcx>,\n\n}\n\n\n\nimpl<'a, 'gcx: 'a> intravisit::Visitor<'gcx> for SearchValidator<'a, 'gcx> {\n\n fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'gcx> {\n\n intravisit::NestedVisitorMap::All(&self.state.tcx.hir)\n\n }\n\n\n\n fn visit_qpath(&mut self, qpath: &'gcx hir::QPath, id: NodeId, span: Span) {\n\n if let Some(node_id) = node_id_from_path(qpath) {\n\n if self.state.placeholders.contains(&node_id)\n\n && !self.state.bound_placeholders.insert(node_id)\n\n {\n\n self.state.add_error(\n\n \"Placeholder is bound multiple times. This is not currently permitted.\",\n\n span,\n\n );\n\n }\n\n }\n\n intravisit::walk_qpath(self, qpath, id, span);\n\n }\n\n}\n\n\n", "file_path": "src/validation.rs", "rank": 16, "score": 94863.16463145842 }, { "content": "struct ValidatorState<'a, 'gcx: 'a> {\n\n tcx: TyCtxt<'a, 'gcx, 'gcx>,\n\n errors: Vec<ErrorWithSpan>,\n\n // Definitions that are defined as placeholders.\n\n placeholders: HashSet<NodeId>,\n\n // Placeholders that have been bound.\n\n bound_placeholders: HashSet<NodeId>,\n\n}\n\n\n\nimpl<'a, 'gcx: 'a> ValidatorState<'a, 'gcx> {\n\n fn add_error<T: Into<String>>(&mut self, message: T, span: Span) {\n\n self.errors.push(ErrorWithSpan::new(message, span));\n\n }\n\n}\n\n\n\nimpl<'gcx, T: StartMatch + 'gcx> Rule<'gcx, T> {\n\n pub(crate) fn validate<'a>(\n\n &self,\n\n tcx: TyCtxt<'a, 'gcx, 'gcx>,\n\n ) -> Result<(), Vec<ErrorWithSpan>> {\n", "file_path": "src/validation.rs", "rank": 17, "score": 94863.16463145842 }, { "content": "fn is_same_expansion(a: &codemap::NameAndSpan, b: &codemap::NameAndSpan) -> bool {\n\n use codemap::ExpnFormat::*;\n\n a.format == b.format && match a.format {\n\n MacroBang(_) => a.span == b.span,\n\n // Not sure what we want to do here\n\n MacroAttribute(_) => unimplemented!(),\n\n // For desugaring, we ignore the span since it seems to just duplicate the span of the\n\n // caller which definitely won't be the same for two separate occurences.\n\n CompilerDesugaring(_) => true,\n\n }\n\n}\n\n\n", "file_path": "src/rule_matcher.rs", "rank": 18, "score": 93948.6764322244 }, { "content": "// Searches for variables declared within the search code. For example in the pattern Some(a), \"a\"\n\n// will be found.\n\nstruct DeclaredNamesFinder<'a, 'gcx: 'a> {\n\n tcx: TyCtxt<'a, 'gcx, 'gcx>,\n\n names: HashMap<Symbol, NodeId>,\n\n}\n\n\n\nimpl<'a, 'gcx> DeclaredNamesFinder<'a, 'gcx> {\n\n fn find<T: StartMatch>(tcx: TyCtxt<'a, 'gcx, 'gcx>, node: &'gcx T) -> HashMap<Symbol, NodeId> {\n\n let mut finder = DeclaredNamesFinder {\n\n tcx,\n\n names: HashMap::new(),\n\n };\n\n T::walk(&mut finder, node);\n\n finder.names\n\n }\n\n}\n\n\n\nimpl<'a, 'gcx, 'tcx> intravisit::Visitor<'gcx> for DeclaredNamesFinder<'a, 'gcx> {\n\n fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'gcx> {\n\n intravisit::NestedVisitorMap::All(&self.tcx.hir)\n\n }\n", "file_path": "src/lib.rs", "rank": 19, "score": 92176.80355025717 }, { "content": "struct FindRulesState {\n\n modified_file_name: String,\n\n modified_source: String,\n\n changed_span: ChangedSpan,\n\n result: String,\n\n changed_side_state: Option<ChangedSideState>,\n\n}\n\n\n", "file_path": "src/change_to_rule.rs", "rank": 20, "score": 83554.35752172409 }, { "content": "pub fn determine_rule(\n\n command_lines: &[Vec<String>],\n\n modified_file_name: &str,\n\n original_file_contents: &str,\n\n) -> Result<String, RerastErrors> {\n\n determine_rule_with_file_loader(\n\n &ClonableRealFileLoader,\n\n command_lines,\n\n modified_file_name,\n\n original_file_contents,\n\n )\n\n}\n\n\n", "file_path": "src/change_to_rule.rs", "rank": 21, "score": 79820.85756032835 }, { "content": "#[derive(Eq, PartialEq, Debug, Copy, Clone)]\n\nstruct ChangedSpan {\n\n common_prefix: usize,\n\n common_suffix: usize,\n\n}\n\n\n\nimpl ChangedSpan {\n\n fn new(common_prefix: usize, common_suffix: usize) -> ChangedSpan {\n\n ChangedSpan {\n\n common_prefix,\n\n common_suffix,\n\n }\n\n }\n\n\n\n fn from_span(span: Span, filemap: &FileMap) -> ChangedSpan {\n\n ChangedSpan {\n\n common_prefix: (span.lo() - filemap.start_pos).to_usize(),\n\n common_suffix: (filemap.end_pos - span.hi()).to_usize(),\n\n }\n\n }\n\n\n\n fn to_span(&self, filemap: &FileMap) -> Span {\n\n Span::new(\n\n filemap.start_pos + BytePos::from_usize(self.common_prefix),\n\n filemap.end_pos - BytePos::from_usize(self.common_suffix),\n\n SyntaxContext::empty(),\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/change_to_rule.rs", "rank": 22, "score": 77568.08576691004 }, { "content": "enum Node<'gcx> {\n\n NotFound,\n\n Expr(&'gcx hir::Expr, hir::BodyId, &'gcx hir::Item),\n\n}\n\n\n", "file_path": "src/change_to_rule.rs", "rank": 23, "score": 75918.8821242601 }, { "content": "fn substitute_placeholders(\n\n codemap: &CodeMap,\n\n span: Span,\n\n substitutions: &mut [(Span, String)],\n\n) -> String {\n\n substitutions.sort_by_key(|v| v.0.lo());\n\n let mut result = String::new();\n\n let mut start = span.lo();\n\n for &(subst_span, ref substitution) in substitutions.iter() {\n\n result += &codemap\n\n .span_to_snippet(Span::new(start, subst_span.lo(), syntax_pos::NO_EXPANSION))\n\n .unwrap();\n\n result += substitution;\n\n start = subst_span.hi();\n\n }\n\n result += &codemap\n\n .span_to_snippet(Span::new(start, span.hi(), syntax_pos::NO_EXPANSION))\n\n .unwrap();\n\n result\n\n}\n\n\n", "file_path": "src/change_to_rule.rs", "rank": 24, "score": 75495.52890985653 }, { "content": "struct ChangedSideState {\n\n candidate_placeholders: Vec<PlaceholderCandidate<RelativeSpan>>,\n\n required_paths: HashSet<String>,\n\n}\n\n\n", "file_path": "src/change_to_rule.rs", "rank": 25, "score": 75029.32074540065 }, { "content": "struct RCompilerCalls {\n\n find_rules_state: Rc<RefCell<FindRulesState>>,\n\n}\n\n\n\nimpl<'a> rustc_driver::CompilerCalls<'a> for RCompilerCalls {\n\n fn build_controller(\n\n &mut self,\n\n sess: &rustc::session::Session,\n\n matches: &getopts::Matches,\n\n ) -> rustc_driver::driver::CompileController<'a> {\n\n let mut defaults = rustc_driver::RustcDefaultCalls;\n\n let mut control = defaults.build_controller(sess, matches);\n\n let find_rules_state = Rc::clone(&self.find_rules_state);\n\n control.after_analysis.callback =\n\n Box::new(move |state| after_analysis(state, &mut *find_rules_state.borrow_mut()));\n\n control.after_analysis.stop = rustc_driver::Compilation::Stop;\n\n control\n\n }\n\n}\n\n\n", "file_path": "src/change_to_rule.rs", "rank": 26, "score": 75029.32074540065 }, { "content": "#[test]\n\nfn test_compilation_error() {\n\n cargo_rerast(\"tests/crates/compilation_error\")\n\n .with_args(&[\"-s\", \"file!()\", \"-r\", \"\\\"foo\\\"\"])\n\n .with_args(&[\"--diff\", \"--color=never\"])\n\n .stderr()\n\n .contains(\"this is not an i32\")\n\n .fails()\n\n .unwrap();\n\n}\n", "file_path": "tests/cargo_rerast_tests.rs", "rank": 27, "score": 73694.88740162933 }, { "content": "#[derive(Debug, PartialEq, Eq, Copy, Clone)]\n\nenum PlaceholderContents<'gcx> {\n\n Expr(&'gcx hir::Expr),\n\n Statements(&'gcx [hir::Stmt]),\n\n Pattern(&'gcx hir::Pat),\n\n}\n\n\n\nimpl<'gcx> PlaceholderContents<'gcx> {\n\n fn get_span(&self, target: Span) -> Span {\n\n use self::PlaceholderContents::*;\n\n match *self {\n\n Expr(expr) => span_within_span(expr.span, target),\n\n Statements(stmts) => if let Some(stmt) = stmts.get(0) {\n\n let result = span_within_span(stmt.span, target);\n\n let last_span = span_within_span(stmts[stmts.len() - 1].span, target);\n\n result.with_hi(last_span.hi())\n\n } else {\n\n syntax::ext::quote::rt::DUMMY_SP\n\n },\n\n Pattern(pattern) => pattern.span,\n\n }\n", "file_path": "src/rule_matcher.rs", "rank": 28, "score": 73379.3075802446 }, { "content": "struct PlaceholderCandidate<T> {\n\n hash: u64,\n\n children: Vec<PlaceholderCandidate<T>>,\n\n data: T,\n\n}\n\n\n\nimpl<T> PlaceholderCandidate<T> {\n\n fn new(data: T) -> PlaceholderCandidate<T> {\n\n PlaceholderCandidate {\n\n hash: 0,\n\n data,\n\n children: Vec::new(),\n\n }\n\n }\n\n}\n\n\n\nimpl<T> Hash for PlaceholderCandidate<T> {\n\n fn hash<H: Hasher>(&self, hasher: &mut H) {\n\n hasher.write_u64(self.hash);\n\n }\n\n}\n\n\n", "file_path": "src/change_to_rule.rs", "rank": 29, "score": 73213.26826635683 }, { "content": "// Allow rules files to contain extern crate rerast_macros and a corresponding\n\n// #[macro_use]. Replace these lines if present with empty lines so that the\n\n// rule compiles once it's in the context of a submodule.\n\nfn remove_extern_crate_rerast_from_rules(rules: &str) -> String {\n\n let mut result = String::new();\n\n let mut opt_pending_line = None;\n\n for line in rules.lines() {\n\n if line.trim() == \"#[macro_use] extern crate rerast_macros;\" {\n\n result.push('\\n');\n\n } else if line.trim() == \"extern crate rerast_macros;\" {\n\n result.push('\\n');\n\n if opt_pending_line.is_some() {\n\n result.push('\\n');\n\n }\n\n opt_pending_line = None;\n\n } else {\n\n if let Some(pending_line) = opt_pending_line.take() {\n\n result.push_str(pending_line);\n\n result.push('\\n');\n\n }\n\n if line.trim() == \"#[macro_use]\" {\n\n opt_pending_line = Some(line);\n\n } else {\n", "file_path": "src/lib.rs", "rank": 30, "score": 69184.68977759284 }, { "content": "fn populate_placeholder_map<'a, T>(\n\n candidates: &'a [PlaceholderCandidate<T>],\n\n map_out: &mut HashMap<u64, Vec<&'a PlaceholderCandidate<T>>>,\n\n) {\n\n for candidate in candidates {\n\n map_out\n\n .entry(candidate.hash)\n\n .or_insert_with(Vec::new)\n\n .push(candidate);\n\n populate_placeholder_map(&candidate.children, map_out);\n\n }\n\n}\n\n\n", "file_path": "src/change_to_rule.rs", "rank": 31, "score": 67562.82228574678 }, { "content": "struct RelativeSpan(Range<BytePos>);\n\n\n\nimpl RelativeSpan {\n\n fn new(absolute_span: Span, filemap: &FileMap) -> RelativeSpan {\n\n let absolute_span = span_within_span(\n\n absolute_span,\n\n Span::new(filemap.start_pos, filemap.end_pos, syntax_pos::NO_EXPANSION),\n\n );\n\n let start_pos = filemap.start_pos;\n\n assert!(absolute_span.lo() >= start_pos);\n\n assert!(absolute_span.hi() <= filemap.end_pos);\n\n RelativeSpan((absolute_span.lo() - start_pos)..(absolute_span.hi() - start_pos))\n\n }\n\n\n\n fn absolute(&self, filemap: &FileMap) -> Span {\n\n let start_pos = filemap.start_pos;\n\n let result = Span::new(\n\n self.0.start + start_pos,\n\n self.0.end + start_pos,\n\n syntax_pos::NO_EXPANSION,\n\n );\n\n assert!(result.lo() >= filemap.start_pos);\n\n assert!(result.hi() <= filemap.end_pos);\n\n result\n\n }\n\n}\n\n\n\n// The span of a file that has changed. Start and end are relative to the start and end of the file,\n\n// which makes the files the same in both the changed and the original version of the file.\n", "file_path": "src/change_to_rule.rs", "rank": 32, "score": 67234.0456312309 }, { "content": "pub fn foo() -> i32 {\n\n \"this is not an i32\"\n\n}\n\n\n", "file_path": "tests/crates/compilation_error/src/lib.rs", "rank": 33, "score": 66307.92821703004 }, { "content": "fn determine_rule_with_file_loader<T: FileLoader + Clone + Send + Sync + 'static>(\n\n file_loader: &T,\n\n command_lines: &[Vec<String>],\n\n modified_file_name: &str,\n\n original_file_contents: &str,\n\n) -> Result<String, RerastErrors> {\n\n let right = file_loader.read_file(Path::new(modified_file_name))?;\n\n let changed_span = match common(original_file_contents, &right) {\n\n Some(c) => c,\n\n None => {\n\n return Err(RerastErrors::with_message(\n\n \"Nothing appears to have changed\",\n\n ))\n\n }\n\n };\n\n let mut compiler_calls = RCompilerCalls {\n\n find_rules_state: Rc::new(RefCell::new(FindRulesState {\n\n modified_file_name: modified_file_name.to_owned(),\n\n modified_source: right.clone(),\n\n changed_span,\n", "file_path": "src/change_to_rule.rs", "rank": 34, "score": 60233.769114962815 }, { "content": "fn default_hash<T: Hash>(value: &T) -> u64 {\n\n let mut hasher = DefaultHasher::new();\n\n value.hash(&mut hasher);\n\n hasher.finish()\n\n}\n\n\n", "file_path": "src/change_to_rule.rs", "rank": 35, "score": 58469.81900906425 }, { "content": "// Searches the callsites of the first span until it finds one that is contained within the second\n\n// span.\n\nfn span_within_span(span: Span, target: Span) -> Span {\n\n if target.contains(span) {\n\n span\n\n } else if let Some(expn_info) = span.ctxt().outer().expn_info() {\n\n span_within_span(expn_info.call_site, target)\n\n } else {\n\n // TODO: Better error handling here.\n\n panic!(\"We never found a span within the target span\");\n\n }\n\n}\n\n\n", "file_path": "src/rule_matcher.rs", "rank": 36, "score": 56824.96023394172 }, { "content": "fn span_within_span(span: Span, target: Span) -> Span {\n\n if target.contains(span) {\n\n span\n\n } else if let Some(expn_info) = span.ctxt().outer().expn_info() {\n\n span_within_span(expn_info.call_site, target)\n\n } else {\n\n // TODO: Better error handling here.\n\n panic!(\"We never found a span within the target span\");\n\n }\n\n}\n\n\n", "file_path": "src/change_to_rule.rs", "rank": 37, "score": 56824.96023394172 }, { "content": "fn common_prefix(left: &str, right: &str) -> Option<usize> {\n\n for (i, (l, r)) in left.bytes().zip(right.bytes()).enumerate() {\n\n if l != r {\n\n return Some(i);\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/change_to_rule.rs", "rank": 38, "score": 55977.32141486289 }, { "content": "fn common(left: &str, right: &str) -> Option<ChangedSpan> {\n\n match (common_prefix(left, right), common_suffix(left, right)) {\n\n (Some(prefix), Some(suffix)) => Some(ChangedSpan::new(prefix, suffix)),\n\n _ => None,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use tests::NullFileLoader;\n\n\n\n fn check_determine_rule_with_file_loader(\n\n file_loader: &InMemoryFileLoader<NullFileLoader>,\n\n changed_filename: &str,\n\n original_file_contents: &str,\n\n expected_rule: &str,\n\n ) {\n\n let expected_rule = expected_rule\n\n .lines()\n", "file_path": "src/change_to_rule.rs", "rank": 39, "score": 55977.32141486289 }, { "content": "fn common_suffix(left: &str, right: &str) -> Option<usize> {\n\n for (i, (l, r)) in left.bytes().rev().zip(right.bytes().rev()).enumerate() {\n\n if l != r {\n\n return Some(i);\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/change_to_rule.rs", "rank": 40, "score": 55977.32141486289 }, { "content": "fn hash_token_stream(stream: &TokenStream, hasher: &mut DefaultHasher) {\n\n for tt in stream.trees() {\n\n match tt {\n\n TokenTree::Token(_span, _token) => {\n\n // If hash collisions become enough of a problem that we get bad performance, we'll\n\n // probably need to look into the structure of the token and hash that. In the mean\n\n // time, lets just hash an arbitrary constant value. At least expressions with\n\n // different tree structures will likely get different hashes.\n\n 42.hash(hasher)\n\n }\n\n TokenTree::Delimited(_span, delimited) => {\n\n hash_token_stream(&delimited.stream(), hasher)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/change_to_rule.rs", "rank": 41, "score": 53883.79352943879 }, { "content": "struct RerastCompilerCalls {\n\n // This needs to be an Rc because rust CompilerCalls::build_controller doesn't (at the time of\n\n // writing) provide any relationship between the lifetime of self and the the lifetime of the\n\n // returned CompileController.\n\n output: Rc<RefCell<Result<RerastOutput, RerastErrors>>>,\n\n config: Config,\n\n}\n\n\n\nimpl<'a> CompilerCalls<'a> for RerastCompilerCalls {\n\n fn build_controller(\n\n &mut self,\n\n sess: &Session,\n\n matches: &getopts::Matches,\n\n ) -> driver::CompileController<'a> {\n\n let mut defaults = RustcDefaultCalls;\n\n let mut control = defaults.build_controller(sess, matches);\n\n let output = Rc::clone(&self.output);\n\n let config = self.config.clone();\n\n control.after_analysis.callback =\n\n Box::new(move |state| after_analysis(state, &output, &config));\n\n control.after_analysis.stop = Compilation::Stop;\n\n control\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 42, "score": 52075.796549720166 }, { "content": "struct Chunk<'a> {\n\n lines: Vec<diff::Result<&'a str>>,\n\n left_range: Range<usize>,\n\n right_range: Range<usize>,\n\n}\n\n\n\nimpl<'a> Chunk<'a> {\n\n fn new() -> Chunk<'a> {\n\n Chunk {\n\n lines: Vec::new(),\n\n left_range: 0..0,\n\n right_range: 0..0,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> fmt::Display for Chunk<'a> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n writeln!(\n\n f,\n", "file_path": "src/chunked_diff.rs", "rank": 43, "score": 51741.56087032067 }, { "content": "fn run_compiler(\n\n file_loader: Option<Box<FileLoader + Send + Sync + 'static>>,\n\n args: &[String],\n\n config: Config,\n\n) -> Result<RerastOutput, RerastErrors> {\n\n let mut compiler_calls = RerastCompilerCalls {\n\n output: Rc::new(RefCell::new(Ok(RerastOutput::new()))),\n\n config,\n\n };\n\n let (_, _) = rustc_driver::run_compiler(args, &mut compiler_calls, file_loader, None);\n\n Rc::try_unwrap(compiler_calls.output)\n\n .map_err(|_| {\n\n RerastErrors::with_message(\n\n \"Internal error: rustc_driver unexpectedly kept a reference to our data\",\n\n )\n\n })?\n\n .into_inner()\n\n}\n\n\n\npub struct RerastCompilerDriver {\n", "file_path": "src/lib.rs", "rank": 44, "score": 51475.95439705877 }, { "content": "// Recursively searches the expansion of search_span and code_span in parallel. If at any point the\n\n// expansions performed by the two spans are different, then that means the search pattern and the\n\n// code invoked different macros, so returns None. If both reach the top (no expansions remaining)\n\n// together, then returns their spans.\n\nfn get_original_spans(search_span: Span, code_span: Span) -> Option<(Span, Span)> {\n\n match (\n\n search_span.ctxt().outer().expn_info(),\n\n code_span.ctxt().outer().expn_info(),\n\n ) {\n\n (Some(search_expn), Some(code_expn)) => {\n\n if is_same_expansion(&search_expn.callee, &code_expn.callee) {\n\n get_original_spans(search_expn.call_site, code_expn.call_site)\n\n } else {\n\n None\n\n }\n\n }\n\n (None, None) => Some((search_span, code_span)),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "src/rule_matcher.rs", "rank": 45, "score": 49725.05716363006 }, { "content": "#[test]\n\nfn test_help() {\n\n cargo_rerast(\".\")\n\n .with_args(&[\"--help\"])\n\n .stdout()\n\n .contains(\"cargo rerast\")\n\n .execute()\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/cargo_rerast_tests.rs", "rank": 46, "score": 48668.71194576744 }, { "content": "// Currently we require use of rustup\n\nfn rustup_sysroot() -> String {\n\n env!(\"RUSTUP_HOME\").to_owned() + \"/toolchains/\" + env!(\"RUSTUP_TOOLCHAIN\")\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 47, "score": 48337.80941322887 }, { "content": "#[test]\n\nfn test_simple_diff() {\n\n cargo_rerast(\"tests/crates/simple\")\n\n .with_args(&[\"-p\", \"p0: i32, p1: i32\"])\n\n .with_args(&[\"-s\", \"p0 > p1\"])\n\n .with_args(&[\"-r\", \"p1 < p0\"])\n\n .with_args(&[\"--diff\", \"--color=never\"])\n\n .stdout()\n\n .is(r#\"\n\n--- src/lib.rs\n\n+++ src/lib.rs\n\n@@ -4,7 +4,7 @@\n\n \n\n /// A well documented function.\n\n pub fn foo(a: i32, b: i32) -> i32 {\n\n- if a > b {\n\n+ if b < a {\n\n 42\n\n } else {\n\n b\n\n\n", "file_path": "tests/cargo_rerast_tests.rs", "rank": 48, "score": 47453.79332473141 }, { "content": "#[test]\n\nfn test_invalid_cargo_toml() {\n\n cargo_rerast(\"tests/crates/invalid_cargo_toml\")\n\n .with_args(&[\"-s\", \"file!()\", \"-r\", \"\\\"foo\\\"\"])\n\n .with_args(&[\"--diff\", \"--color=never\"])\n\n .stderr()\n\n .contains(\"cargo metadata failed\")\n\n .stderr()\n\n .contains(\"could not parse input as TOML\")\n\n .fails()\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/cargo_rerast_tests.rs", "rank": 49, "score": 46343.093298669075 }, { "content": "fn cargo_rerast(crate_root: &str) -> assert_cli::Assert {\n\n // We can't use Assert.current_dir, because then Assert::cargo_binary doesn't work, instead we\n\n // pass the crate root as an argument and get our binary to change directories once it's\n\n // running.\n\n assert_cli::Assert::cargo_binary(\"cargo-rerast\").with_args(&[\n\n \"rerast\",\n\n \"--crate_root\",\n\n crate_root,\n\n ])\n\n}\n\n\n", "file_path": "tests/cargo_rerast_tests.rs", "rank": 50, "score": 37295.34642280718 }, { "content": "/// A well documented function.\n\npub fn foo(a: i32, b: i32) -> i32 {\n\n if a > b {\n\n 42\n\n } else {\n\n b\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n fn bar(a: i32, b: i32) -> i32 {\n\n if a > b {\n\n 42\n\n } else {\n\n b\n\n }\n\n }\n\n\n\n #[test]\n\n fn x() {\n\n assert_eq!(super::foo(1, 2), bar(1, 2));\n\n }\n\n}\n", "file_path": "tests/crates/simple/src/lib.rs", "rank": 51, "score": 37164.94443924856 }, { "content": "pub fn print_diff(filename: &str, left: &str, right: &str) {\n\n println!(\"{}\", format!(\"--- {}\", filename).red());\n\n println!(\"{}\", format!(\"+++ {}\", filename).green());\n\n for chunk in chunked_diff(left, right, 3) {\n\n println!(\"{}\", chunk);\n\n }\n\n}\n\n\n", "file_path": "src/chunked_diff.rs", "rank": 52, "score": 34735.535816400254 }, { "content": "\n\n#[derive(Debug, PartialEq, Eq)]\n\npub(crate) struct ErrorWithSpan {\n\n message: String,\n\n span: Span,\n\n}\n\n\n\nimpl ErrorWithSpan {\n\n pub(crate) fn new<T: Into<String>>(message: T, span: Span) -> ErrorWithSpan {\n\n ErrorWithSpan {\n\n message: message.into(),\n\n span,\n\n }\n\n }\n\n\n\n pub(crate) fn with_snippet<'a, 'gcx>(self, tcx: TyCtxt<'a, 'gcx, 'gcx>) -> RerastError {\n\n RerastError {\n\n message: self.message,\n\n file_lines: Some(tcx.sess.codemap().span_to_lines(self.span)),\n\n }\n", "file_path": "src/errors.rs", "rank": 53, "score": 33829.66208665484 }, { "content": "// Copyright 2017 Google Inc.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// https://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse std::io;\n\nuse syntax::ext::quote::rt::Span;\n\nuse rustc::ty::TyCtxt;\n\nuse std::fmt;\n\nuse syntax_pos::{FileLinesResult, SpanLinesError};\n\nuse std;\n", "file_path": "src/errors.rs", "rank": 54, "score": 33824.60872243663 }, { "content": "impl RerastErrors {\n\n pub(crate) fn new(errors: Vec<RerastError>) -> RerastErrors {\n\n RerastErrors(errors)\n\n }\n\n pub(crate) fn with_message<T: Into<String>>(message: T) -> RerastErrors {\n\n RerastErrors(vec![\n\n RerastError {\n\n message: message.into(),\n\n file_lines: None,\n\n },\n\n ])\n\n }\n\n\n\n pub fn iter(&self) -> impl Iterator<Item = &RerastError> {\n\n self.0.iter()\n\n }\n\n}\n\n\n\nimpl std::ops::Index<usize> for RerastErrors {\n\n type Output = RerastError;\n", "file_path": "src/errors.rs", "rank": 55, "score": 33823.57697568052 }, { "content": "\n\n fn index(&self, index: usize) -> &RerastError {\n\n &self.0[index]\n\n }\n\n}\n\n\n\nimpl fmt::Debug for RerastErrors {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n fmt::Display::fmt(self, f)\n\n }\n\n}\n\n\n\nimpl fmt::Display for RerastErrors {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n for error in &self.0 {\n\n write!(f, \"{}\\n\", error)?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl From<io::Error> for RerastErrors {\n\n fn from(err: io::Error) -> RerastErrors {\n\n RerastErrors::with_message(err.to_string())\n\n }\n\n}\n", "file_path": "src/errors.rs", "rank": 56, "score": 33823.4943028766 }, { "content": " }\n\n}\n\n\n\nimpl From<ErrorWithSpan> for Vec<ErrorWithSpan> {\n\n fn from(error: ErrorWithSpan) -> Vec<ErrorWithSpan> {\n\n vec![error]\n\n }\n\n}\n\n\n\npub struct RerastError {\n\n pub(crate) message: String,\n\n pub(crate) file_lines: Option<FileLinesResult>,\n\n}\n\n\n\nimpl fmt::Display for RerastError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n writeln!(f, \"error: {}\", self.message)?;\n\n match self.file_lines {\n\n Some(Ok(ref file_lines)) => {\n\n if let Some(first_line) = file_lines.lines.get(0) {\n", "file_path": "src/errors.rs", "rank": 57, "score": 33823.37288719948 }, { "content": " )?;\n\n }\n\n }\n\n }\n\n Some(Err(ref span_lines_error)) => match *span_lines_error {\n\n SpanLinesError::IllFormedSpan(span) => {\n\n writeln!(f, \"Unable to report location. Ill-formed span: {:?}\", span)?;\n\n }\n\n SpanLinesError::DistinctSources(_) => {\n\n writeln!(f, \"Unable to report location. Spans distinct sources\")?;\n\n }\n\n },\n\n None => {}\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\npub struct RerastErrors(Vec<RerastError>);\n\n\n", "file_path": "src/errors.rs", "rank": 58, "score": 33823.216646479865 }, { "content": " writeln!(\n\n f,\n\n \" --> {}:{}:{}\",\n\n file_lines.file.name, first_line.line_index, first_line.start_col.0\n\n )?;\n\n }\n\n for line_info in &file_lines.lines {\n\n if let Some(line) = file_lines.file.get_line(line_info.line_index) {\n\n writeln!(f, \"{}\", line)?;\n\n writeln!(\n\n f,\n\n \"{}{}\",\n\n \" \".repeat(line_info.start_col.0),\n\n \"^\".repeat(line_info.end_col.0 - line_info.start_col.0)\n\n )?;\n\n } else {\n\n writeln!(\n\n f,\n\n \"Error occurred on non-existent line {}\",\n\n line_info.line_index\n", "file_path": "src/errors.rs", "rank": 59, "score": 33820.929272758665 }, { "content": "fn chunked_diff<'a>(left: &'a str, right: &'a str, context: usize) -> Vec<Chunk<'a>> {\n\n let mut chunks = Vec::new();\n\n let mut recent_common = VecDeque::new();\n\n let mut after_context_remaining = 0;\n\n let mut chunk = Chunk::new();\n\n let mut left_line_num = 1;\n\n let mut right_line_num = 1;\n\n for diff in diff::lines(left, right) {\n\n let line_delta = match diff {\n\n diff::Result::Left(_) => (1, 0),\n\n diff::Result::Right(_) => (0, 1),\n\n diff::Result::Both(_, _) => (1, 1),\n\n };\n\n left_line_num += line_delta.0;\n\n right_line_num += line_delta.1;\n\n match diff {\n\n diff::Result::Left(_) | diff::Result::Right(_) => {\n\n if chunk.lines.is_empty() {\n\n chunk.left_range =\n\n left_line_num - recent_common.len() - line_delta.0..left_line_num;\n", "file_path": "src/chunked_diff.rs", "rank": 60, "score": 31149.187328075506 }, { "content": "\n\n#[derive(Debug)]\n\npub(crate) struct Rule<'gcx, T: StartMatch + 'gcx> {\n\n pub(crate) search: &'gcx T,\n\n pub(crate) replace: &'gcx T,\n\n // The method in which the rule is defined.\n\n pub(crate) body_id: hir::BodyId,\n\n // Maps from the names of declared variables (which must be unique within the search pattern) to\n\n // their NodeId. This is used to pair up variables in the search pattern with their counterparts\n\n // in the replacement pattern. This is necessary since as far as rustc is concerned, they're\n\n // completely unrelated definitions. It isn't needed for expression placeholders since they're\n\n // declared as arguments to the function, so the search and replace pattern can both reference\n\n // the same placeholder variable.\n\n pub(crate) declared_name_node_ids: HashMap<Symbol, NodeId>,\n\n // IDs of the arguments to the function in which the rule was declared. When references to these\n\n // NodeIds are encountered in the search pattern, they should be treated as placeholders.\n\n pub(crate) placeholder_ids: HashSet<NodeId>,\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "src/rules.rs", "rank": 61, "score": 29917.994266921804 }, { "content": "pub(crate) struct Rules<'gcx> {\n\n pub(crate) expr_rules: Vec<Rule<'gcx, hir::Expr>>,\n\n pub(crate) pattern_rules: Vec<Rule<'gcx, hir::Pat>>,\n\n pub(crate) type_rules: Vec<Rule<'gcx, hir::Ty>>,\n\n pub(crate) trait_ref_rules: Vec<Rule<'gcx, hir::TraitRef>>,\n\n}\n\n\n\nimpl<'gcx> Rules<'gcx> {\n\n pub(crate) fn new() -> Rules<'gcx> {\n\n Rules {\n\n expr_rules: Vec::new(),\n\n pattern_rules: Vec::new(),\n\n type_rules: Vec::new(),\n\n trait_ref_rules: Vec::new(),\n\n }\n\n }\n\n\n\n pub(crate) fn len(&self) -> usize {\n\n self.expr_rules.len() + self.pattern_rules.len() + self.type_rules.len()\n\n + self.trait_ref_rules.len()\n\n }\n\n}\n", "file_path": "src/rules.rs", "rank": 62, "score": 29917.955523145498 }, { "content": "// Copyright 2017 Google Inc.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// https://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse syntax::ast::NodeId;\n\nuse syntax::symbol::Symbol;\n\nuse std::vec::Vec;\n\nuse std::collections::{HashMap, HashSet};\n\nuse rustc::hir;\n\nuse rule_finder::StartMatch;\n", "file_path": "src/rules.rs", "rank": 63, "score": 29917.685815341505 }, { "content": " tcx: TyCtxt<'a, 'gcx, 'gcx>,\n\n rerast_definitions: RerastDefinitions<'gcx>,\n\n krate: &'gcx hir::Crate,\n\n rules: &'r Rules<'gcx>,\n\n config: Config,\n\n ) -> Matches<'r, 'gcx> {\n\n let mut matcher = RuleMatcher {\n\n tcx,\n\n rules,\n\n matches: Matches::new(),\n\n rule_mod_symbol: Symbol::intern(super::RULES_MOD_NAME),\n\n parent_expr: None,\n\n body_id: None,\n\n rerast_definitions,\n\n config,\n\n debug_active: false,\n\n };\n\n intravisit::walk_crate(&mut matcher, krate);\n\n matcher.matches\n\n }\n", "file_path": "src/rule_matcher.rs", "rank": 66, "score": 28295.64827187148 }, { "content": " if $state.debug_active {\n\n println!($($args),*);\n\n }\n\n }\n\n}\n\n\n\npub(crate) struct RuleMatcher<'r, 'a, 'gcx: 'r + 'a> {\n\n tcx: TyCtxt<'a, 'gcx, 'gcx>,\n\n rules: &'r Rules<'gcx>,\n\n matches: Matches<'r, 'gcx>,\n\n rule_mod_symbol: Symbol,\n\n parent_expr: Option<&'gcx hir::Expr>,\n\n body_id: Option<hir::BodyId>,\n\n rerast_definitions: RerastDefinitions<'gcx>,\n\n config: Config,\n\n debug_active: bool,\n\n}\n\n\n\nimpl<'r, 'a, 'gcx> RuleMatcher<'r, 'a, 'gcx> {\n\n pub(crate) fn find_matches(\n", "file_path": "src/rule_matcher.rs", "rank": 68, "score": 28291.95132018123 }, { "content": " }\n\n}\n\n\n\nimpl Matchable for hir::Decl_ {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n use hir::Decl_::*;\n\n match (self, code) {\n\n (&DeclLocal(ref p), &DeclLocal(ref c)) => p.attempt_match(state, c),\n\n (&DeclItem(ref p), &DeclItem(ref c)) => {\n\n let krate = state.tcx.hir.krate();\n\n krate.item(p.id).attempt_match(state, krate.item(c.id))\n\n }\n\n _ => false,\n\n }\n\n }\n\n}\n", "file_path": "src/rule_matcher.rs", "rank": 69, "score": 28291.671824215253 }, { "content": " state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n self.node.attempt_match(state, &code.node)\n\n }\n\n}\n\n\n\nimpl Matchable for hir::Expr {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n use rustc::hir::Expr_::*;\n\n let result = match (&self.node, &code.node) {\n\n // TODO: ExprType, ExprInlineAsm (or more likely report that we don't support it).\n\n (&ExprCall(ref p_fn, ref p_args), &ExprCall(ref c_fn, ref c_args)) => {\n\n p_fn.attempt_match(state, c_fn) && p_args.attempt_match(state, c_args)\n\n }\n\n (\n", "file_path": "src/rule_matcher.rs", "rank": 70, "score": 28291.485995280338 }, { "content": "\n\n fn get_match<T: StartMatch + 'gcx>(\n\n &mut self,\n\n node: &'gcx T,\n\n parent_node: Option<&'gcx T>,\n\n original_span: Span,\n\n rule: &'r Rule<'gcx, T>,\n\n ) -> Option<Match<'r, 'gcx, T>> {\n\n let rule_fn_id = self.tcx.hir.body_owner_def_id(rule.body_id);\n\n let rule_tables = self.tcx.body_tables(rule.body_id);\n\n let rule_body = self.tcx.hir.body(rule.body_id);\n\n\n\n let maybe_match_placeholders = self.tcx.infer_ctxt().enter(|infcx| {\n\n let tcx = infcx.tcx;\n\n let substs = infcx.fresh_substs_for_item(\n\n tcx.def_span(rule_fn_id),\n\n rule_fn_id,\n\n );\n\n let placeholder_types_by_id = rule_body\n\n .arguments\n", "file_path": "src/rule_matcher.rs", "rank": 71, "score": 28291.442371511916 }, { "content": "}\n\n\n\nimpl<'r, 'a, 'gcx> intravisit::Visitor<'gcx> for RuleMatcher<'r, 'a, 'gcx> {\n\n fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'gcx> {\n\n intravisit::NestedVisitorMap::All(&self.tcx.hir)\n\n }\n\n\n\n fn visit_item(&mut self, item: &'gcx hir::Item) {\n\n if let hir::Item_::ItemMod(_) = item.node {\n\n // Avoid trying to find matches in the rules file.\n\n if item.name == self.rule_mod_symbol {\n\n return;\n\n }\n\n }\n\n intravisit::walk_item(self, item);\n\n }\n\n\n\n fn visit_trait_ref(&mut self, trait_ref: &'gcx hir::TraitRef) {\n\n if let Some(m) = self.get_first_match(trait_ref, None, &self.rules.trait_ref_rules) {\n\n self.matches.trait_ref_matches.push(m);\n", "file_path": "src/rule_matcher.rs", "rank": 72, "score": 28291.136627031672 }, { "content": " &self,\n\n _state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n self.node == code.node\n\n }\n\n}\n\n\n\nimpl Matchable for Symbol {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n _state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n self == code\n\n }\n\n}\n\n\n\nimpl Matchable for usize {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n", "file_path": "src/rule_matcher.rs", "rank": 73, "score": 28290.74995307223 }, { "content": "pub(crate) struct MatchState<'r, 'a, 'gcx: 'r + 'a + 'tcx, 'tcx: 'a> {\n\n tcx: TyCtxt<'a, 'gcx, 'tcx>,\n\n infcx: InferCtxt<'a, 'gcx, 'tcx>,\n\n body_id: Option<hir::BodyId>,\n\n rule_type_tables: &'gcx ty::TypeckTables<'gcx>,\n\n match_placeholders: MatchPlaceholders<'r, 'gcx>,\n\n // This map should have all the same keys as the placeholders on match_placeholders. It's here\n\n // instead of on Match because it contains types that don't live as long as the match.\n\n placeholder_types_by_id: HashMap<NodeId, ty::Ty<'tcx>>,\n\n rerast_definitions: RerastDefinitions<'gcx>,\n\n placeholder_ids: &'r HashSet<NodeId>,\n\n // Whether bindings within a pattern are permitted to match any pattern. Otherwise, bindings are\n\n // only permitted to match bindings. This is enabled within replace_pattern, since the bindings\n\n // are only used within the pattern, not also as expressions, so binding to a pattern is\n\n // permissible.\n\n bindings_can_match_patterns: bool,\n\n debug_active: bool,\n\n}\n\n\n\nimpl<'r, 'a, 'gcx: 'a + 'tcx, 'tcx: 'a> MatchState<'r, 'a, 'gcx, 'tcx> {\n", "file_path": "src/rule_matcher.rs", "rank": 76, "score": 28289.370136057816 }, { "content": "use syntax::ast::NodeId;\n\nuse rustc::hir::{self, intravisit};\n\nuse rustc::ty::{self, TyCtxt};\n\nuse syntax::ext::quote::rt::Span;\n\nuse rules::{Rule, Rules};\n\nuse rustc::infer::{self, InferCtxt};\n\nuse syntax::symbol::Symbol;\n\nuse std::mem;\n\nuse std::fmt::Debug;\n\nuse rustc::traits::ObligationCause;\n\nuse definitions::RerastDefinitions;\n\nuse syntax;\n\nuse rule_finder::StartMatch;\n\nuse Config;\n\nuse code_substitution::CodeSubstitution;\n\nuse super::node_id_from_path;\n\n\n\n#[macro_export]\n\nmacro_rules! debug {\n\n ($state:expr, $($args:expr),*) => {\n", "file_path": "src/rule_matcher.rs", "rank": 77, "score": 28289.040289898836 }, { "content": " state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n self.mutbl == code.mutbl && self.ty.attempt_match(state, &code.ty)\n\n }\n\n}\n\n\n\nimpl Matchable for hir::Lifetime {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n _state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n _code: &'gcx Self,\n\n ) -> bool {\n\n // TODO: Probably want to check if both are 'static, otherwise attempt to bind with a\n\n // placeholder lifetime. Need to write test first.\n\n false\n\n }\n\n}\n\n\n\nimpl Matchable for hir::BareFnTy {\n", "file_path": "src/rule_matcher.rs", "rank": 79, "score": 28288.81962715811 }, { "content": " None\n\n }\n\n\n\n fn code_type_tables(&self) -> &'gcx ty::TypeckTables<'gcx> {\n\n self.tcx.body_tables(self.body_id.unwrap())\n\n }\n\n\n\n fn span_to_snippet(&self, span: Span) -> Result<String, SpanSnippetError> {\n\n self.tcx.sess.codemap().span_to_snippet(span)\n\n }\n\n}\n\n\n\npub(crate) trait Matchable: Debug {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool;\n\n}\n\n\n", "file_path": "src/rule_matcher.rs", "rank": 80, "score": 28288.78757281713 }, { "content": " }\n\n}\n\n\n\nimpl Matchable for ast::Attribute {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n _state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n _code: &'gcx Self,\n\n ) -> bool {\n\n // TODO\n\n false\n\n }\n\n}\n\n\n\nimpl Matchable for hir::Block {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n", "file_path": "src/rule_matcher.rs", "rank": 82, "score": 28288.14363727286 }, { "content": " state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n self.name == code.name && self.parameters.attempt_match(state, &code.parameters)\n\n }\n\n}\n\n\n\nimpl Matchable for hir::Stmt {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n use rustc::hir::Stmt_::*;\n\n match (&self.node, &code.node) {\n\n (&StmtExpr(ref p, _), &StmtExpr(ref c, _))\n\n | (&StmtSemi(ref p, _), &StmtSemi(ref c, _)) => p.attempt_match(state, c),\n\n (&StmtDecl(ref p, _), &StmtDecl(ref c, _)) => p.attempt_match(state, c),\n\n _ => false,\n\n }\n", "file_path": "src/rule_matcher.rs", "rank": 83, "score": 28287.95099200833 }, { "content": "\n\nimpl<'a, 'gcx: 'a, T, F> intravisit::Visitor<'gcx> for PlaceholderCandidateFinder<'a, 'gcx, T, F>\n\nwhere\n\n F: Fn(&'gcx hir::Expr) -> T,\n\n{\n\n fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'gcx> {\n\n intravisit::NestedVisitorMap::All(&self.tcx.hir)\n\n }\n\n\n\n fn visit_expr(&mut self, expr: &'gcx hir::Expr) {\n\n self.stack\n\n .push(PlaceholderCandidate::new((self.data_fn)(expr)));\n\n self.walk_expr_children(expr);\n\n // We pushed to the stack. So long as all pushes and pops are matched, we should be able to\n\n // safely pop.\n\n let mut candidate = self.stack.pop().unwrap();\n\n candidate.hash = if candidate.children.is_empty() {\n\n // Leaf node. Get a token stream and hash its tokens.\n\n let snippet = self.tcx.sess.codemap().span_to_snippet(expr.span).unwrap();\n\n let session = ParseSess::new(FilePathMapping::empty());\n", "file_path": "src/change_to_rule.rs", "rank": 84, "score": 28287.890941114725 }, { "content": " &self,\n\n _state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n self == code\n\n }\n\n}\n\n\n\nimpl Matchable for hir::Destination {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n self.label.attempt_match(state, &code.label)\n\n }\n\n}\n\n\n\nimpl Matchable for hir::Label {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n", "file_path": "src/rule_matcher.rs", "rank": 85, "score": 28287.875136650353 }, { "content": "impl Matchable for hir::TraitRef {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n _state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n self.path.def == code.path.def\n\n }\n\n}\n\n\n\nimpl Matchable for hir::TyParamBound {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n _state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n _code: &'gcx Self,\n\n ) -> bool {\n\n // TODO\n\n false\n\n }\n\n}\n", "file_path": "src/rule_matcher.rs", "rank": 86, "score": 28287.823025614915 }, { "content": " fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n _state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n _code: &'gcx Self,\n\n ) -> bool {\n\n // TODO\n\n false\n\n }\n\n}\n\n\n\nimpl Matchable for hir::PolyTraitRef {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n self.trait_ref.attempt_match(state, &code.trait_ref)\n\n }\n\n}\n\n\n", "file_path": "src/rule_matcher.rs", "rank": 87, "score": 28287.823025614915 }, { "content": "\n\nimpl Matchable for hir::Visibility {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n use hir::Visibility::*;\n\n match (self, code) {\n\n (\n\n &Restricted {\n\n path: ref p_path, ..\n\n },\n\n &Restricted {\n\n path: ref c_path, ..\n\n },\n\n ) => p_path.attempt_match(state, c_path),\n\n (&Public, &Public) | (&Crate, &Crate) | (&Inherited, &Inherited) => true,\n\n _ => false,\n\n }\n", "file_path": "src/rule_matcher.rs", "rank": 88, "score": 28287.790370752577 }, { "content": " state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n self.node.attempt_match(state, &code.node)\n\n }\n\n}\n\n\n\nimpl Matchable for hir::Ty_ {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n use hir::Ty_::*;\n\n match (self, code) {\n\n (&TySlice(ref p), &TySlice(ref c)) | (&TyArray(ref p, _), &TyArray(ref c, _)) => {\n\n p.attempt_match(state, c)\n\n }\n\n (&TyPtr(ref p), &TyPtr(ref c)) => p.attempt_match(state, c),\n\n (&TyRptr(ref p_lifetime, ref p_ty), &TyRptr(ref c_lifetime, ref c_ty)) => {\n", "file_path": "src/rule_matcher.rs", "rank": 89, "score": 28287.67079668671 }, { "content": " &self,\n\n state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n self.name.attempt_match(state, &code.name)\n\n }\n\n}\n\n\n\nimpl Matchable for ast::Ident {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n self.name.attempt_match(state, &code.name)\n\n }\n\n}\n\n\n\nimpl Matchable for hir::QPath {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n", "file_path": "src/rule_matcher.rs", "rank": 90, "score": 28287.569955182265 }, { "content": "{\n\n fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'gcx> {\n\n intravisit::NestedVisitorMap::All(&self.tcx.hir)\n\n }\n\n\n\n fn visit_expr(&mut self, expr: &'gcx hir::Expr) {\n\n self.process_expr(expr, expr.span);\n\n let old_parent = self.parent_expr;\n\n self.parent_expr = Some(expr);\n\n intravisit::walk_expr(self, expr);\n\n self.parent_expr = old_parent;\n\n }\n\n\n\n fn visit_stmt(&mut self, stmt: &'gcx hir::Stmt) {\n\n if let hir::Stmt_::StmtSemi(ref expr, _) = stmt.node {\n\n if let hir::Expr_::ExprCall(ref expr_fn, _) = expr.node {\n\n if self.process_expr(expr_fn, stmt.span) {\n\n return;\n\n }\n\n }\n", "file_path": "src/rule_matcher.rs", "rank": 91, "score": 28287.466512474828 }, { "content": "impl<T: Matchable> Matchable for Option<T> {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n match (self.as_ref(), code.as_ref()) {\n\n (None, None) => true,\n\n (None, Some(_)) | (Some(_), None) => false,\n\n (Some(p), Some(c)) => p.attempt_match(state, c),\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Matchable> Matchable for [T] {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n", "file_path": "src/rule_matcher.rs", "rank": 92, "score": 28287.423822622673 }, { "content": " code: &'gcx Self,\n\n ) -> bool {\n\n self.arguments.attempt_match(state, &code.arguments)\n\n && self.value.attempt_match(state, &code.value)\n\n }\n\n}\n\n\n\nimpl Matchable for hir::Arg {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n self.pat.attempt_match(state, &code.pat)\n\n }\n\n}\n\n\n\nimpl Matchable for hir::Ty {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n", "file_path": "src/rule_matcher.rs", "rank": 94, "score": 28287.158364624047 }, { "content": "\n\nimpl Matchable for hir::Local {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n self.pat.attempt_match(state, &code.pat) && self.ty.attempt_match(state, &code.ty)\n\n && self.init.attempt_match(state, &code.init)\n\n && self.attrs.attempt_match(state, &code.attrs)\n\n }\n\n}\n\n\n\nimpl Matchable for hir::Item {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n state\n", "file_path": "src/rule_matcher.rs", "rank": 95, "score": 28287.008978511396 }, { "content": " &self,\n\n state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n self.name.attempt_match(state, &code.name) && self.expr.attempt_match(state, &code.expr)\n\n }\n\n}\n\n\n\nimpl Matchable for hir::FieldPat {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n self.name.attempt_match(state, &code.name) && self.pat.attempt_match(state, &code.pat)\n\n }\n\n}\n\n\n\nimpl Matchable for hir::BinOp {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n", "file_path": "src/rule_matcher.rs", "rank": 96, "score": 28286.964928270416 }, { "content": "\n\nimpl Matchable for hir::Arm {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n\n // For now only accept if attrs is empty\n\n self.attrs.is_empty() && code.attrs.is_empty() && self.pats.attempt_match(state, &code.pats)\n\n && self.guard.attempt_match(state, &code.guard)\n\n && self.body.attempt_match(state, &code.body)\n\n }\n\n}\n\n\n\nimpl Matchable for hir::Pat {\n\n fn attempt_match<'r, 'a, 'gcx, 'tcx>(\n\n &self,\n\n state: &mut MatchState<'r, 'a, 'gcx, 'tcx>,\n\n code: &'gcx Self,\n\n ) -> bool {\n", "file_path": "src/rule_matcher.rs", "rank": 97, "score": 28286.92126517346 }, { "content": " stack: vec![PlaceholderCandidate::new(data_fn(node))],\n\n data_fn,\n\n };\n\n state.walk_expr_children(node);\n\n state.stack.pop().unwrap().children\n\n }\n\n\n\n fn walk_expr_children(&mut self, expr: &'gcx hir::Expr) {\n\n if let hir::Expr_::ExprCall(ref _expr_fn, ref args) = expr.node {\n\n println!(\"** ExprCall: {:?}\", expr.node);\n\n // Ignore expr_fn as a candidate, just consider the args.\n\n for arg in args {\n\n use rustc::hir::intravisit::Visitor;\n\n self.visit_expr(arg);\n\n }\n\n } else {\n\n intravisit::walk_expr(self, expr);\n\n }\n\n }\n\n}\n", "file_path": "src/change_to_rule.rs", "rank": 99, "score": 28286.798362084282 } ]
Rust
kernel/env/mod.rs
pwoolcoc/redox
87f5ea23d26502494439cbf2a094bb5102f989b8
use alloc::boxed::Box; use collections::string::{String, ToString}; use collections::vec::Vec; use core::cell::UnsafeCell; use arch::context::ContextManager; use common::event::Event; use common::time::Duration; use disk::Disk; use network::Nic; use fs::{KScheme, Resource, Scheme, VecResource, Url}; use sync::WaitQueue; use system::error::{Error, Result, ENOENT, EEXIST}; use system::syscall::{O_CREAT, Stat}; use self::console::Console; use self::log::Log; pub mod console; pub mod log; pub struct Environment { pub contexts: UnsafeCell<ContextManager>, pub clock_realtime: UnsafeCell<Duration>, pub clock_monotonic: UnsafeCell<Duration>, pub console: UnsafeCell<Console>, pub disks: UnsafeCell<Vec<Box<Disk>>>, pub nics: UnsafeCell<Vec<Box<Nic>>>, pub events: WaitQueue<Event>, pub log: UnsafeCell<Log>, pub schemes: UnsafeCell<Vec<Box<KScheme>>>, pub interrupts: UnsafeCell<[u64; 256]>, } impl Environment { pub fn new() -> Box<Environment> { box Environment { contexts: UnsafeCell::new(ContextManager::new()), clock_realtime: UnsafeCell::new(Duration::new(0, 0)), clock_monotonic: UnsafeCell::new(Duration::new(0, 0)), console: UnsafeCell::new(Console::new()), disks: UnsafeCell::new(Vec::new()), nics: UnsafeCell::new(Vec::new()), events: WaitQueue::new(), log: UnsafeCell::new(Log::new()), schemes: UnsafeCell::new(Vec::new()), interrupts: UnsafeCell::new([0; 256]), } } pub fn on_irq(&self, irq: u8) { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { scheme.on_irq(irq); } } pub fn open(&self, url: Url, flags: usize) -> Result<Box<Resource>> { let url_scheme = url.scheme(); if url_scheme.is_empty() { let url_path = url.reference(); if url_path.trim_matches('/').is_empty() { let mut list = String::new(); for scheme in unsafe { &mut *self.schemes.get() }.iter() { let scheme_str = scheme.scheme(); if !scheme_str.is_empty() { if !list.is_empty() { list = list + "\n" + scheme_str; } else { list = scheme_str.to_string(); } } } Ok(box VecResource::new(":".to_string(), list.into_bytes())) } else if flags & O_CREAT == O_CREAT { for scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_path { return Err(Error::new(EEXIST)); } } match Scheme::new(url_path) { Ok((scheme, server)) => { unsafe { &mut *self.schemes.get() }.push(scheme); Ok(server) }, Err(err) => Err(err) } } else { Err(Error::new(ENOENT)) } } else { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_scheme { return scheme.open(url, flags); } } Err(Error::new(ENOENT)) } } pub fn mkdir(&self, url: Url, flags: usize) -> Result<()> { let url_scheme = url.scheme(); if !url_scheme.is_empty() { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_scheme { return scheme.mkdir(url, flags); } } } Err(Error::new(ENOENT)) } pub fn rmdir(&self, url: Url) -> Result<()> { let url_scheme = url.scheme(); if !url_scheme.is_empty() { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_scheme { return scheme.rmdir(url); } } } Err(Error::new(ENOENT)) } pub fn stat(&self, url: Url, stat: &mut Stat) -> Result<()> { let url_scheme = url.scheme(); if !url_scheme.is_empty() { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_scheme { return scheme.stat(url, stat); } } } Err(Error::new(ENOENT)) } pub fn unlink(&self, url: Url) -> Result<()> { let url_scheme = url.scheme(); if !url_scheme.is_empty() { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_scheme { return scheme.unlink(url); } } } Err(Error::new(ENOENT)) } }
use alloc::boxed::Box; use collections::string::{String, ToString}; use collections::vec::Vec; use core::cell::UnsafeCell; use arch::context::ContextManager; use common::event::Event; use common::time::Duration; use disk::Disk; use network::Nic; use fs::{KScheme, Resource, Scheme, VecResource, Url}; use sync::WaitQueue; use system::error::{Error, Result, ENOENT, EEXIST}; use system::syscall::{O_CREAT, Stat}; use self::console::Console; use self::log::Log; pub mod console; pub mod log; pub struct Environment { pub contexts: UnsafeCell<ContextManager>, pub clock_realtime: UnsafeCell<Duration>, pub clock_monotonic: UnsafeCell<Duration>, pub console: UnsafeCell<Console>, pub disks: UnsafeCell<Vec<Box<Disk>>>, pub nics: UnsafeCell<Vec<Box<Nic>>>, pub events: WaitQueue<Event>, pub log: UnsafeCell<Log>, pub schemes: UnsafeCell<Vec<Box<KScheme>>>,
} } else { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_scheme { return scheme.open(url, flags); } } Err(Error::new(ENOENT)) } } pub fn mkdir(&self, url: Url, flags: usize) -> Result<()> { let url_scheme = url.scheme(); if !url_scheme.is_empty() { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_scheme { return scheme.mkdir(url, flags); } } } Err(Error::new(ENOENT)) } pub fn rmdir(&self, url: Url) -> Result<()> { let url_scheme = url.scheme(); if !url_scheme.is_empty() { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_scheme { return scheme.rmdir(url); } } } Err(Error::new(ENOENT)) } pub fn stat(&self, url: Url, stat: &mut Stat) -> Result<()> { let url_scheme = url.scheme(); if !url_scheme.is_empty() { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_scheme { return scheme.stat(url, stat); } } } Err(Error::new(ENOENT)) } pub fn unlink(&self, url: Url) -> Result<()> { let url_scheme = url.scheme(); if !url_scheme.is_empty() { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_scheme { return scheme.unlink(url); } } } Err(Error::new(ENOENT)) } }
pub interrupts: UnsafeCell<[u64; 256]>, } impl Environment { pub fn new() -> Box<Environment> { box Environment { contexts: UnsafeCell::new(ContextManager::new()), clock_realtime: UnsafeCell::new(Duration::new(0, 0)), clock_monotonic: UnsafeCell::new(Duration::new(0, 0)), console: UnsafeCell::new(Console::new()), disks: UnsafeCell::new(Vec::new()), nics: UnsafeCell::new(Vec::new()), events: WaitQueue::new(), log: UnsafeCell::new(Log::new()), schemes: UnsafeCell::new(Vec::new()), interrupts: UnsafeCell::new([0; 256]), } } pub fn on_irq(&self, irq: u8) { for mut scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { scheme.on_irq(irq); } } pub fn open(&self, url: Url, flags: usize) -> Result<Box<Resource>> { let url_scheme = url.scheme(); if url_scheme.is_empty() { let url_path = url.reference(); if url_path.trim_matches('/').is_empty() { let mut list = String::new(); for scheme in unsafe { &mut *self.schemes.get() }.iter() { let scheme_str = scheme.scheme(); if !scheme_str.is_empty() { if !list.is_empty() { list = list + "\n" + scheme_str; } else { list = scheme_str.to_string(); } } } Ok(box VecResource::new(":".to_string(), list.into_bytes())) } else if flags & O_CREAT == O_CREAT { for scheme in unsafe { &mut *self.schemes.get() }.iter_mut() { if scheme.scheme() == url_path { return Err(Error::new(EEXIST)); } } match Scheme::new(url_path) { Ok((scheme, server)) => { unsafe { &mut *self.schemes.get() }.push(scheme); Ok(server) }, Err(err) => Err(err) } } else { Err(Error::new(ENOENT))
random
[ { "content": "pub trait Disk {\n\n fn name(&self) -> String;\n\n fn on_irq(&mut self, irq: u8);\n\n fn size(&self) -> u64;\n\n fn read(&mut self, block: u64, buffer: &mut [u8]) -> Result<usize>;\n\n fn write(&mut self, block: u64, buffer: &[u8]) -> Result<usize>;\n\n}\n", "file_path": "kernel/disk/mod.rs", "rank": 0, "score": 213604.32309858882 }, { "content": "pub fn stat(path: *const u8, stat: *mut Stat) -> Result<usize> {\n\n let contexts = unsafe { & *::env().contexts.get() };\n\n let current = try!(contexts.current());\n\n let path_string = current.canonicalize(c_string_to_str(path));\n\n let url = Url::from_str(&path_string)?;\n\n let stat_safe = current.get_ref_mut(stat)?;\n\n\n\n *stat_safe = Stat::default();\n\n ::env().stat(url, stat_safe).and(Ok(0))\n\n}\n\n\n", "file_path": "kernel/syscall/fs.rs", "rank": 1, "score": 211726.59645842086 }, { "content": "pub fn fstat(fd: usize, stat: *mut Stat) -> Result<usize> {\n\n let contexts = unsafe { & *::env().contexts.get() };\n\n let current = contexts.current()?;\n\n let resource = current.get_file(fd)?;\n\n let stat_safe = current.get_ref_mut(stat)?;\n\n resource.stat(stat_safe)\n\n}\n\n\n\n/** <!-- @MANSTART{sys_fsync} -->\n\nNAME\n\n sys_fsync - synchronize a file's in-core state with storage device\n\n\n\nSYNOPSIS\n\n sys_fsync(fd: usize) -> Result<usize>;\n\n\n\nDESCRIPTION\n\n sys_fsync transfers all modified in-core data of the file refered to by the file descriptor fd\n\n to the underlying device\n\n\n\nRETURN VALUE\n", "file_path": "kernel/syscall/fs.rs", "rank": 2, "score": 205378.2574577485 }, { "content": "pub trait Nic {\n\n fn name(&self) -> String;\n\n fn read(&mut self, buffer: &mut [u8]) -> Result<usize>;\n\n fn write(&mut self, buffer: &[u8]) -> Result<usize>;\n\n}\n", "file_path": "kernel/network/mod.rs", "rank": 3, "score": 201863.97042758352 }, { "content": "pub fn sys_fstat(fd: usize, stat: &mut Stat) -> Result<usize> {\n\n unsafe { syscall2(SYS_FSTAT, fd, stat as *mut Stat as usize) }\n\n}\n\n\n", "file_path": "crates/system/syscall/unix.rs", "rank": 4, "score": 199737.50270145037 }, { "content": "pub trait Scheme {\n\n fn handle(&mut self, packet: &mut Packet) {\n\n packet.a = Error::mux(match packet.a {\n\n SYS_OPEN => self.open(c_string_to_str(packet.b as *const u8), packet.c, packet.d),\n\n SYS_MKDIR => self.mkdir(c_string_to_str(packet.b as *const u8), packet.c),\n\n SYS_RMDIR => self.rmdir(c_string_to_str(packet.b as *const u8)),\n\n SYS_STAT => self.stat(c_string_to_str(packet.b as *const u8), unsafe { &mut *(packet.c as *mut Stat) }),\n\n SYS_UNLINK => self.unlink(c_string_to_str(packet.b as *const u8)),\n\n\n\n SYS_DUP => self.dup(packet.b),\n\n SYS_READ => self.read(packet.b, unsafe { slice::from_raw_parts_mut(packet.c as *mut u8, packet.d) }),\n\n SYS_WRITE => self.write(packet.b, unsafe { slice::from_raw_parts(packet.c as *const u8, packet.d) }),\n\n SYS_LSEEK => self.seek(packet.b, packet.c, packet.d),\n\n SYS_FPATH => self.fpath(packet.b, unsafe { slice::from_raw_parts_mut(packet.c as *mut u8, packet.d) }),\n\n SYS_FSTAT => self.fstat(packet.b, unsafe { &mut *(packet.c as *mut Stat) }),\n\n SYS_FSYNC => self.fsync(packet.b),\n\n SYS_FTRUNCATE => self.ftruncate(packet.b, packet.c),\n\n SYS_CLOSE => self.close(packet.b),\n\n\n\n _ => Err(Error::new(ENOSYS))\n", "file_path": "crates/system/scheme.rs", "rank": 5, "score": 168123.65250424203 }, { "content": "/// Resolve the host specified by `host` as a number of `SocketAddr` instances.\n\n///\n\n/// This method may perform a DNS query to resolve `host` and may also inspect\n\n/// system configuration to resolve the specified hostname.\n\n///\n\n/// # Examples\n\n///\n\n/// ```no_run\n\n/// #![feature(lookup_host)]\n\n///\n\n/// use std::net;\n\n///\n\n/// # fn foo() -> std::io::Result<()> {\n\n/// for host in try!(net::lookup_host(\"rust-lang.org\")) {\n\n/// println!(\"found address: {}\", try!(host));\n\n/// }\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn lookup_host(host: &str) -> io::Result<LookupHost> {\n\n net_imp::lookup_host(host).map(LookupHost)\n\n}\n", "file_path": "libstd/src/net/mod.rs", "rank": 6, "score": 157178.90409307578 }, { "content": "pub fn getpid() -> Result<usize> {\n\n let contexts = unsafe { & *::env().contexts.get() };\n\n let current = try!(contexts.current());\n\n Ok(current.pid)\n\n}\n\n\n", "file_path": "kernel/syscall/process.rs", "rank": 7, "score": 142582.93633884407 }, { "content": "pub fn sched_yield() -> Result<usize> {\n\n unsafe {\n\n context_switch();\n\n }\n\n Ok(0)\n\n}\n\n\n", "file_path": "kernel/syscall/process.rs", "rank": 8, "score": 139948.5286153587 }, { "content": "struct SchemeInner {\n\n name: String,\n\n context: *mut Context,\n\n next_id: Cell<usize>,\n\n todo: WaitQueue<Packet>,\n\n done: WaitMap<usize, (usize, usize, usize, usize)>,\n\n}\n\n\n\nimpl SchemeInner {\n\n fn new(name: &str, context: *mut Context) -> SchemeInner {\n\n SchemeInner {\n\n name: name.to_owned(),\n\n context: context,\n\n next_id: Cell::new(1),\n\n todo: WaitQueue::new(),\n\n done: WaitMap::new(),\n\n }\n\n }\n\n\n\n fn call(inner: &Weak<SchemeInner>, a: usize, b: usize, c: usize, d: usize) -> Result<usize> {\n", "file_path": "kernel/fs/scheme.rs", "rank": 9, "score": 139716.38962077338 }, { "content": "pub fn sys_yield() -> Result<usize> {\n\n unsafe { syscall0(SYS_YIELD) }\n\n}\n", "file_path": "crates/system/syscall/unix.rs", "rank": 10, "score": 137472.08191328237 }, { "content": "/// Method to return the current directory\n\npub fn current_dir() -> Result<PathBuf> {\n\n // Return the current path\n\n get_path_from(\"./\")\n\n}\n\n\n", "file_path": "libstd/src/env.rs", "rank": 11, "score": 137472.08191328237 }, { "content": "pub fn sys_getpid() -> Result<usize> {\n\n unsafe { syscall0(SYS_GETPID) }\n\n}\n\n\n\npub unsafe fn sys_iopl(level: usize) -> Result<usize> {\n\n syscall1(SYS_IOPL, level)\n\n}\n\n\n\npub unsafe fn sys_link(old: *const u8, new: *const u8) -> Result<usize> {\n\n syscall2(SYS_LINK, old as usize, new as usize)\n\n}\n\n\n", "file_path": "crates/system/syscall/unix.rs", "rank": 12, "score": 137472.08191328237 }, { "content": "#[allow(unused_variables)]\n\npub trait Resource {\n\n /// Duplicate the resource\n\n /// Returns `EPERM` if the operation is not supported.\n\n fn dup(&self) -> Result<Box<Resource>> {\n\n Err(Error::new(EPERM))\n\n }\n\n\n\n /// Return the path of this resource\n\n /// Returns `EPERM` if the operation is not supported.\n\n fn path(&self, buf: &mut [u8]) -> Result<usize> {\n\n Err(Error::new(EPERM))\n\n }\n\n\n\n /// Read data to buffer\n\n /// Returns `EPERM` if the operation is not supported.\n\n fn read(&mut self, buf: &mut [u8]) -> Result<usize> {\n\n Err(Error::new(EPERM))\n\n }\n\n\n\n /// Write to resource\n", "file_path": "kernel/fs/resource.rs", "rank": 13, "score": 132745.04667410514 }, { "content": "/// Supervise a child process of the current context.\n\n///\n\n/// This will make all syscalls the given process makes mark the process as blocked, until it is\n\n/// handled by the supervisor (parrent process) through the returned handle (for details, see the\n\n/// docs in the `system` crate).\n\n///\n\n/// This routine is done by having a field defining whether the process is blocked by a syscall.\n\n/// When the syscall is read from the file handle, this field is set to false, but the process is\n\n/// still stopped (because it is marked as `blocked`), until the new value of the EAX register is\n\n/// written to the file handle.\n\npub fn supervise(pid: usize) -> Result<usize> {\n\n let contexts = unsafe { &mut *::env().contexts.get() };\n\n let cur_pid = try!(contexts.current_mut()).pid;\n\n\n\n let procc;\n\n\n\n {\n\n let jailed = try!(contexts.find_mut(pid));\n\n\n\n // Make sure that this is actually a child process of the invoker.\n\n if jailed.ppid != cur_pid {\n\n return Err(Error::new(EACCES));\n\n }\n\n\n\n jailed.supervised = true;\n\n\n\n procc = &mut **jailed as *mut _;\n\n }\n\n\n\n let current = try!(contexts.current_mut());\n", "file_path": "kernel/syscall/process.rs", "rank": 14, "score": 132442.40862602554 }, { "content": "pub fn clone(regs: &Regs) -> Result<usize> {\n\n unsafe { context_clone(regs) }\n\n}\n\n\n", "file_path": "kernel/syscall/process.rs", "rank": 15, "score": 132438.82358155173 }, { "content": "pub fn brk(addr: usize) -> Result<usize> {\n\n let mut ret = 0;\n\n\n\n let contexts = unsafe { & *::env().contexts.get() };\n\n if let Ok(current) = contexts.current() {\n\n ret = unsafe { (*current.heap.get()).next_mem() };\n\n\n\n // TODO: Make this smarter, currently it attempt to resize the entire data segment\n\n if let Some(mut mem) = unsafe { (*current.heap.get()).memory.last_mut() } {\n\n if mem.writeable && mem.allocated {\n\n if addr >= mem.virtual_address {\n\n unsafe { mem.unmap() };\n\n\n\n let size = addr - mem.virtual_address;\n\n let physical_address = unsafe { memory::realloc_aligned(mem.physical_address, size, 4096) };\n\n if physical_address > 0 {\n\n mem.physical_address = physical_address;\n\n mem.virtual_size = size;\n\n ret = mem.virtual_address + mem.virtual_size;\n\n } else {\n", "file_path": "kernel/syscall/memory.rs", "rank": 16, "score": 132438.82358155173 }, { "content": "pub fn dup(fd: usize) -> Result<usize> {\n\n let contexts = unsafe { & *::env().contexts.get() };\n\n let current = try!(contexts.current());\n\n let resource = try!(current.get_file(fd));\n\n let new_resource = try!(resource.dup());\n\n let new_fd = current.next_fd();\n\n\n\n unsafe {\n\n (*current.files.get()).push(ContextFile {\n\n fd: new_fd,\n\n resource: new_resource,\n\n });\n\n }\n\n Ok(new_fd)\n\n}\n\n\n", "file_path": "kernel/syscall/fs.rs", "rank": 17, "score": 132438.82358155173 }, { "content": "pub fn fsync(fd: usize) -> Result<usize> {\n\n let contexts = unsafe { &mut *::env().contexts.get() };\n\n let mut current = try!(contexts.current_mut());\n\n let mut resource = try!(current.get_file_mut(fd));\n\n resource.sync().and(Ok(0))\n\n}\n\n\n\n/** <!-- @MANSTART{sys_ftruncate} -->\n\nNAME\n\n sys_ftruncate - truncate a file to a specified length\n\n\n\nSYNOPSIS\n\n sys_ftruncate(fd: usize, length: usize) -> Result<usize>;\n\n\n\nDESCRIPTION\n\n sys_ftruncate causes the file referenced by fd to be truncated to a size of precisely length\n\n bytes\n\n\n\nRETURN VALUE\n\n On success, Ok(0) is returned. On error, Err(err) is returned where err is one of the following\n", "file_path": "kernel/syscall/fs.rs", "rank": 18, "score": 132438.82358155173 }, { "content": "pub fn close(fd: usize) -> Result<usize> {\n\n let contexts = unsafe { & *::env().contexts.get() };\n\n let current = try!(contexts.current());\n\n\n\n for i in 0..unsafe { (*current.files.get()).len() } {\n\n let mut remove = false;\n\n if let Some(file) = unsafe { (*current.files.get()).get(i) } {\n\n if file.fd == fd {\n\n remove = true;\n\n }\n\n }\n\n\n\n if remove {\n\n if i < unsafe { (*current.files.get()).len() } {\n\n drop(unsafe { (*current.files.get()).remove(i) });\n\n\n\n return Ok(0);\n\n }\n\n }\n\n }\n", "file_path": "kernel/syscall/fs.rs", "rank": 19, "score": 132438.82358155173 }, { "content": "struct Prdt {\n\n reg: Pio<u32>,\n\n mem: Memory<Prd>,\n\n}\n\n\n\nimpl Prdt {\n\n fn new(port: u16) -> Self {\n\n let mut reg = Pio::<u32>::new(port);\n\n reg.write(0);\n\n\n\n Prdt {\n\n reg: reg,\n\n mem: Memory::new_aligned(512, 65536).unwrap(),\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for Prdt {\n\n fn drop(&mut self) {\n\n self.reg.write(0);\n", "file_path": "kernel/disk/ide.rs", "rank": 20, "score": 131312.05491682555 }, { "content": "#[repr(packed)]\n\nstruct Prd {\n\n addr: u32,\n\n size: u16,\n\n rsv: u8,\n\n eot: u8,\n\n}\n\n\n", "file_path": "kernel/disk/ide.rs", "rank": 21, "score": 131312.05491682555 }, { "content": "pub trait NetworkScheme {\n\n fn add(&mut self, resource: *mut NetworkResource);\n\n fn remove(&mut self, resource: *mut NetworkResource);\n\n fn sync(&mut self);\n\n}\n\n\n\npub struct NetworkResource {\n\n pub nic: *mut NetworkScheme,\n\n pub ptr: *mut NetworkResource,\n\n pub inbound: WaitQueue<Vec<u8>>,\n\n pub outbound: UnsafeCell<VecDeque<Vec<u8>>>,\n\n}\n\n\n\nimpl NetworkResource {\n\n pub fn new(nic: *mut NetworkScheme) -> Box<Self> {\n\n let mut ret = box NetworkResource {\n\n nic: nic,\n\n ptr: 0 as *mut NetworkResource,\n\n inbound: WaitQueue::new(),\n\n outbound: UnsafeCell::new(VecDeque::new()),\n", "file_path": "kernel/network/scheme.rs", "rank": 22, "score": 129596.77187482972 }, { "content": "/// Returns the environment variable `key` from the current process. If `key` is not valid Unicode\n\n/// or if the variable is not present then `Err` is returned\n\npub fn var<K: AsRef<OsStr>>(key: K) -> ::core::result::Result<String, VarError> {\n\n if let Some(key_str) = key.as_ref().to_str() {\n\n let mut file = try!(File::open(&(\"env:\".to_owned() + key_str)).or(Err(VarError::NotPresent)));\n\n let mut string = String::new();\n\n try!(file.read_to_string(&mut string).or(Err(VarError::NotPresent)));\n\n Ok(string)\n\n } else {\n\n Err(VarError::NotUnicode(key.as_ref().to_owned()))\n\n }\n\n}\n\n\n", "file_path": "libstd/src/env.rs", "rank": 23, "score": 128377.95951726928 }, { "content": "struct Ac97Resource {\n\n audio: usize,\n\n bus_master: usize,\n\n bdl: *mut Bd,\n\n}\n\n\n\nimpl Resource for Ac97Resource {\n\n fn dup(&self) -> syscall::Result<Box<Resource>> {\n\n Ok(box Ac97Resource {\n\n audio: self.audio,\n\n bus_master: self.bus_master,\n\n bdl: self.bdl\n\n })\n\n }\n\n\n\n fn path(&self, buf: &mut [u8]) -> syscall::Result <usize> {\n\n let path = b\"audio:\";\n\n\n\n let mut i = 0;\n\n while i < buf.len() && i < path.len() {\n", "file_path": "kernel/audio/ac97.rs", "rank": 24, "score": 128216.2037140755 }, { "content": "/// <!-- @MANSTART{supervise} -->\n\n/// Supervise a given child process' system calls.\n\n///\n\n/// SUPERVISE allows a process to run another process in a restricted, traced, and supervised\n\n/// environment, which is useful for various purposes, such as emulation, virtualisation, tracing,\n\n/// logging, and debugging.\n\n///\n\n/// SUPERVISE takes a PID specifing the process to be supervised. This PID must be a child process\n\n/// of the invoker. If not, EACCES will be returned.\n\n///\n\n/// A process can only have one supervisor at a time. If SUPERVISE is called on a process, which\n\n/// already have a supervisor EPERM will be returned.\n\n///\n\n/// The process identified by the given PID will be restricted in such a way, that every syscall\n\n/// made will mark the process as blocked and store the syscall until it is handled by the parrent.\n\n///\n\n/// The return value (if successful) is a file descriptor, from which syscalls can be read and written:\n\n/// the syscalls are read in `Packet` sized packages, containing the respective blocking syscall. If\n\n/// no syscall is blocking (or the last blocking syscall have been handled), 0 bytes will be read to\n\n/// the buffer.\n\n///\n\n/// Writing pointer sized integers to this file handle will set the EAX register of the particular\n\n/// process, after which the process is unblocked and the syscall buffer is emptied. The behavior of\n\n/// writing packages of unexpected size is unspecified.\n\n///\n\n/// Note that a process blocked by a syscall will have its potential sleep cleared (i.e., it will\n\n/// not wake up after the sleep is finished).\n\n///\n\n/// Passing a non-existent PID results in ESRCH.\n\n///\n\n/// A process being supervised is referred to as 'jailed' or 'supervised'.\n\n/// <!-- @MANEND -->\n\npub fn sys_supervise(pid: usize) -> Result<usize> {\n\n unsafe { syscall1(SYS_SUPERVISE, pid) }\n\n}\n", "file_path": "crates/system/syscall/redox.rs", "rank": 25, "score": 127914.48222179717 }, { "content": "pub fn rmdir(path: *const u8) -> Result<usize> {\n\n let contexts = unsafe { & *::env().contexts.get() };\n\n let current = try!(contexts.current());\n\n let path_string = current.canonicalize(c_string_to_str(path));\n\n ::env().rmdir(try!(Url::from_str(&path_string))).and(Ok(0))\n\n}\n\n\n", "file_path": "kernel/syscall/fs.rs", "rank": 26, "score": 127907.31157584942 }, { "content": "pub fn chdir(path: *const u8) -> Result<usize> {\n\n let contexts = unsafe { & *::env().contexts.get() };\n\n let current = try!(contexts.current());\n\n unsafe {\n\n *current.cwd.get() = current.canonicalize(c_string_to_str(path));\n\n }\n\n Ok(0)\n\n}\n\n\n\n/** <!-- @MANSTART{sys_close} -->\n\nNAME\n\n sys_close - close a file descriptor\n\n\n\nSYNOPSIS\n\n sys_close(fd: usize) -> Result<usize>;\n\n\n\nDESCRIPTION\n\n sys_close closes a file descriptor, so that it no longer refers to any file and may be reused.\n\n\n\nRETURN VALUE\n", "file_path": "kernel/syscall/fs.rs", "rank": 27, "score": 127907.31157584942 }, { "content": "#[cfg(target_arch = \"x86_64\")]\n\npub fn iopl(regs: &mut Regs) -> Result<usize> {\n\n let level = regs.bx;\n\n if level <= 3 {\n\n let contexts = unsafe { &mut *::env().contexts.get() };\n\n let mut current = try!(contexts.current_mut());\n\n current.iopl = level;\n\n\n\n regs.flags &= 0xFFFFFFFFFFFFFFFF - 0x3000;\n\n regs.flags |= (current.iopl << 12) & 0x3000;\n\n\n\n Ok(0)\n\n } else {\n\n Err(Error::new(EINVAL))\n\n }\n\n}\n\n\n", "file_path": "kernel/syscall/process.rs", "rank": 28, "score": 127907.31157584942 }, { "content": "pub fn unlink(path: *const u8) -> Result<usize> {\n\n let contexts = unsafe { & *::env().contexts.get() };\n\n let current = try!(contexts.current());\n\n let path_string = current.canonicalize(c_string_to_str(path));\n\n ::env().unlink(try!(Url::from_str(&path_string))).and(Ok(0))\n\n}\n\n\n\n/** <!-- @MANSTART{sys_write} -->\n\nNAME\n\n sys_write - read from a file descriptor\n\n\n\nSYNOPSIS\n\n sys_write(fd: usize, buf: *mut u8, count: usize) -> Result<usize>;\n\n\n\nDESCRIPTION\n\n sys_write attempts to read up to count bytes from file descriptor fd into the buffer starting at\n\n buf\n\n\n\nRETURN VALUE\n\n On success, Ok(count) is returned, where count is the number of bytes read into buf. On error,\n", "file_path": "kernel/syscall/fs.rs", "rank": 29, "score": 127907.31157584942 }, { "content": "pub fn sys_exit(status: usize) -> Result<usize> {\n\n unsafe { syscall1(SYS_EXIT, status) }\n\n}\n\n\n", "file_path": "crates/system/syscall/unix.rs", "rank": 30, "score": 127906.19922450146 }, { "content": "pub fn sys_fsync(fd: usize) -> Result<usize> {\n\n unsafe { syscall1(SYS_FSYNC, fd) }\n\n}\n\n\n", "file_path": "crates/system/syscall/unix.rs", "rank": 31, "score": 127906.19922450146 }, { "content": "pub fn sys_dup(fd: usize) -> Result<usize> {\n\n unsafe { syscall1(SYS_DUP, fd) }\n\n}\n\n\n\npub unsafe fn sys_execve(path: *const u8, args: *const *const u8) -> Result<usize> {\n\n syscall2(SYS_EXECVE, path as usize, args as usize)\n\n}\n\n\n", "file_path": "crates/system/syscall/unix.rs", "rank": 32, "score": 127906.19922450146 }, { "content": "pub fn sys_close(fd: usize) -> Result<usize> {\n\n unsafe { syscall1(SYS_CLOSE, fd) }\n\n}\n\n\n", "file_path": "crates/system/syscall/unix.rs", "rank": 33, "score": 127906.19922450146 }, { "content": "struct OrbitalScheme {\n\n start: Instant,\n\n image: Image,\n\n background: Image,\n\n cursor: Image,\n\n cursor_x: i32,\n\n cursor_y: i32,\n\n dragging: bool,\n\n drag_x: i32,\n\n drag_y: i32,\n\n next_id: isize,\n\n next_x: i32,\n\n next_y: i32,\n\n order: VecDeque<usize>,\n\n windows: BTreeMap<usize, Window>,\n\n redraws: Vec<Rect>,\n\n todo: Vec<Packet>\n\n}\n\n\n\nimpl OrbitalScheme {\n", "file_path": "crates/orbital/main.rs", "rank": 34, "score": 127860.72155513777 }, { "content": "struct ExampleScheme {\n\n next_id: isize,\n\n files: BTreeMap<usize, ExampleFile>\n\n}\n\n\n\nimpl ExampleScheme {\n\n fn new() -> ExampleScheme {\n\n ExampleScheme {\n\n next_id: 1,\n\n files: BTreeMap::new()\n\n }\n\n }\n\n}\n\n\n\nimpl Scheme for ExampleScheme {\n\n fn open(&mut self, path: &str, flags: usize, mode: usize) -> Result<usize> {\n\n println!(\"open {:X} = {}, {:X}, {:X}\", path.as_ptr() as usize, path, flags, mode);\n\n let id = self.next_id as usize;\n\n self.next_id += 1;\n\n if self.next_id < 0 {\n", "file_path": "crates/example/main.rs", "rank": 35, "score": 127860.72155513777 }, { "content": "struct IntelHdaResource {\n\n base: usize,\n\n}\n\n\n\nimpl Resource for IntelHdaResource {\n\n fn dup(&self) -> syscall::Result<Box<Resource>> {\n\n Ok(box IntelHdaResource { base: self.base })\n\n }\n\n\n\n fn path(&self, buf: &mut [u8]) -> syscall::Result <usize> {\n\n let path = b\"audio:\";\n\n\n\n let mut i = 0;\n\n while i < buf.len() && i < path.len() {\n\n buf[i] = path[i];\n\n i += 1;\n\n }\n\n\n\n Ok(i)\n\n }\n", "file_path": "kernel/audio/intelhda.rs", "rank": 36, "score": 125203.40916863081 }, { "content": "pub fn lookup_host(host: &str) -> Result<LookupHost> {\n\n let mut dns = [0; 4];\n\n try!(try!(File::open(\"netcfg:dns\")).read(&mut dns));\n\n\n\n let tid = (time::SystemTime::now().duration_since(time::UNIX_EPOCH).unwrap().subsec_nanos() >> 16) as u16;\n\n\n\n let packet = Dns {\n\n transaction_id: tid,\n\n flags: 0x0100,\n\n queries: vec![DnsQuery {\n\n name: host.to_string(),\n\n q_type: 0x0001,\n\n q_class: 0x0001,\n\n }],\n\n answers: vec![]\n\n };\n\n\n\n let packet_data = packet.compile();\n\n\n\n let mut socket = try!(File::open(&format!(\"udp:{}.{}.{}.{}:53\", dns[0], dns[1], dns[2], dns[3])));\n", "file_path": "libstd/src/sys/common/net.rs", "rank": 37, "score": 123858.92940732709 }, { "content": "pub fn execute_thread(context_ptr: *mut Context, entry: usize, mut args: Vec<String>) -> ! {\n\n Context::spawn(\"kexec\".into(),\n\n box move || {\n\n let context = unsafe { &mut *context_ptr };\n\n\n\n let mut context_args: Vec<usize> = Vec::new();\n\n context_args.push(0); // ENVP\n\n context_args.push(0); // ARGV NULL\n\n let mut argc = 0;\n\n while let Some(mut arg) = args.pop() {\n\n if ! arg.ends_with('\\0') {\n\n arg.push('\\0');\n\n }\n\n\n\n let mut physical_address = arg.as_ptr() as usize;\n\n if physical_address >= 0x80000000 {\n\n physical_address -= 0x80000000;\n\n }\n\n\n\n let virtual_address = unsafe { (*context.image.get()).next_mem() };\n", "file_path": "kernel/syscall/execute.rs", "rank": 38, "score": 123827.49102080651 }, { "content": "pub fn ftruncate(fd: usize, length: usize) -> Result<usize> {\n\n let contexts = unsafe { &mut *::env().contexts.get() };\n\n let mut current = try!(contexts.current_mut());\n\n let mut resource = try!(current.get_file_mut(fd));\n\n resource.truncate(length).and(Ok(0))\n\n}\n\n\n\n//TODO: Link\n\n\n\n/** <!-- @MANSTART{sys_lseek} -->\n\nNAME\n\n sys_lseek - reposition read/write file offset\n\n\n\nSYNOPSIS\n\n sys_lseek(fd: usize, offset: isize, whence: usize) -> Result<usize>;\n\n\n\nDESCRIPTION\n\n sys_lseek repositions the offset of the file referenced by fd to the offset according to whence\n\n\n\n SEEK_SET: 0\n", "file_path": "kernel/syscall/fs.rs", "rank": 39, "score": 123685.18488048812 }, { "content": "/// Execute an executable\n\npub fn execute(mut args: Vec<String>) -> Result<usize> {\n\n let contexts = unsafe { &mut *::env().contexts.get() };\n\n let current = try!(contexts.current_mut());\n\n\n\n let mut vec: Vec<u8> = Vec::new();\n\n\n\n let path = current.canonicalize(args.get(0).map_or(\"\", |p| &p));\n\n let url = try!(Url::from_str(&path));\n\n {\n\n let mut resource = try!(url.open());\n\n\n\n // Hack to allow file scheme to find memory in context's memory space\n\n unsafe {\n\n let mmap = &mut *current.mmap.get();\n\n\n\n let virtual_size = 1024*1024;\n\n let virtual_address = mmap.next_mem();\n\n\n\n let physical_address = memory::alloc_aligned(virtual_size, 4096);\n\n if physical_address == 0 {\n", "file_path": "kernel/syscall/execute.rs", "rank": 40, "score": 123685.18488048812 }, { "content": "#[repr(packed)]\n\nstruct HbaCmdHeader {\n\n // DW0\n\n cfl: Mmio<u8>, /* Command FIS length in DWORDS, 2 ~ 16, atapi: 4, write - host to device: 2, prefetchable: 1 */\n\n pm: Mmio<u8>, // Reset - 0x80, bist: 0x40, clear busy on ok: 0x20, port multiplier\n\n\n\n prdtl: Mmio<u16>, // Physical region descriptor table length in entries\n\n\n\n // DW1\n\n prdbc: Mmio<u32>, // Physical region descriptor byte count transferred\n\n\n\n // DW2, 3\n\n ctba: Mmio<u64>, // Command table descriptor base address\n\n\n\n // DW4 - 7\n\n rsv1: [Mmio<u32>; 4], // Reserved\n\n}\n", "file_path": "kernel/disk/ahci/hba.rs", "rank": 41, "score": 122265.72458346872 }, { "content": "#[repr(packed)]\n\nstruct HbaCmdTable {\n\n // 0x00\n\n cfis: [Mmio<u8>; 64], // Command FIS\n\n\n\n // 0x40\n\n acmd: [Mmio<u8>; 16], // ATAPI command, 12 or 16 bytes\n\n\n\n // 0x50\n\n rsv: [Mmio<u8>; 48], // Reserved\n\n\n\n // 0x80\n\n prdt_entry: [HbaPrdtEntry; 65536], // Physical region descriptor table entries, 0 ~ 65535\n\n}\n\n\n", "file_path": "kernel/disk/ahci/hba.rs", "rank": 42, "score": 122265.72458346872 }, { "content": "#[repr(packed)]\n\nstruct HbaPrdtEntry {\n\n dba: Mmio<u64>, // Data base address\n\n rsv0: Mmio<u32>, // Reserved\n\n dbc: Mmio<u32>, // Byte count, 4M max, interrupt = 1\n\n}\n\n\n", "file_path": "kernel/disk/ahci/hba.rs", "rank": 43, "score": 122265.72458346872 }, { "content": "/// Create a new directory, using a path\n\n/// The default mode of the directory is 744\n\npub fn create_dir<P: AsRef<Path>>(path: P) -> Result<()> {\n\n let path_str = path.as_ref().as_os_str().as_inner();\n\n let mut path_c = path_str.to_owned();\n\n path_c.push_str(\"\\0\");\n\n unsafe {\n\n sys_mkdir(path_c.as_ptr(), 755).and(Ok(())).map_err(|x| Error::from_sys(x))\n\n }\n\n}\n\n\n", "file_path": "libstd/src/fs.rs", "rank": 44, "score": 121611.10558701036 }, { "content": "/// Removes a file from the filesystem\n\npub fn remove_file<P: AsRef<Path>>(path: P) -> Result<()> {\n\n let path_str = path.as_ref().as_os_str().as_inner();\n\n let mut path_c = path_str.to_owned();\n\n path_c.push_str(\"\\0\");\n\n unsafe {\n\n sys_unlink(path_c.as_ptr()).and(Ok(()))\n\n }.map_err(|x| Error::from_sys(x))\n\n}\n", "file_path": "libstd/src/fs.rs", "rank": 45, "score": 121605.88703983228 }, { "content": "/// Removes an existing, empty directory\n\npub fn remove_dir<P: AsRef<Path>>(path: P) -> Result<()> {\n\n let path_str = path.as_ref().as_os_str().as_inner();\n\n let mut path_c = path_str.to_owned();\n\n path_c.push_str(\"\\0\");\n\n unsafe {\n\n sys_rmdir(path_c.as_ptr()).and(Ok(()))\n\n }.map_err(|x| Error::from_sys(x))\n\n}\n\n\n", "file_path": "libstd/src/fs.rs", "rank": 46, "score": 121605.88703983228 }, { "content": "pub fn pipe2(fds: *mut usize, _flags: usize) -> Result<usize> {\n\n let contexts = unsafe { & *::env().contexts.get() };\n\n let current = try!(contexts.current());\n\n if fds as usize > 0 {\n\n let read = box PipeRead::new();\n\n let write = box PipeWrite::new(&read);\n\n\n\n unsafe {\n\n *fds.offset(0) = current.next_fd();\n\n (*current.files.get()).push(ContextFile {\n\n fd: *fds.offset(0),\n\n resource: read,\n\n });\n\n\n\n *fds.offset(1) = current.next_fd();\n\n (*current.files.get()).push(ContextFile {\n\n fd: *fds.offset(1),\n\n resource: write,\n\n });\n\n }\n", "file_path": "kernel/syscall/fs.rs", "rank": 47, "score": 119740.91208051662 }, { "content": "pub fn open(path_c: *const u8, flags: usize) -> Result<usize> {\n\n let contexts = unsafe { & *::env().contexts.get() };\n\n let current = try!(contexts.current());\n\n let path = current.canonicalize(c_string_to_str(path_c));\n\n let url = try!(Url::from_str(&path));\n\n let resource = try!(::env().open(url, flags));\n\n let fd = current.next_fd();\n\n unsafe {\n\n (*current.files.get()).push(ContextFile {\n\n fd: fd,\n\n resource: resource,\n\n });\n\n }\n\n Ok(fd)\n\n}\n\n\n", "file_path": "kernel/syscall/fs.rs", "rank": 48, "score": 119740.91208051662 }, { "content": "/// Get information about a file\n\npub fn metadata<P: AsRef<Path>>(path: P) -> Result<Metadata> {\n\n let mut stat = Stat::default();\n\n let path_str = path.as_ref().as_os_str().as_inner();\n\n let mut path_c = path_str.to_owned();\n\n path_c.push_str(\"\\0\");\n\n unsafe {\n\n try!(sys_stat(path_c.as_ptr(), &mut stat).map_err(|x| Error::from_sys(x)));\n\n }\n\n Ok(Metadata {\n\n stat: stat\n\n })\n\n}\n\n\n", "file_path": "libstd/src/fs.rs", "rank": 49, "score": 119740.91208051662 }, { "content": "pub fn mkdir(path: *const u8, flags: usize) -> Result<usize> {\n\n let contexts = unsafe { & *::env().contexts.get() };\n\n let current = try!(contexts.current());\n\n let path_string = current.canonicalize(c_string_to_str(path));\n\n ::env().mkdir(try!(Url::from_str(&path_string)), flags).and(Ok(0))\n\n}\n\n\n\n/** <!-- @MANSTART{sys_open} -->\n\nNAME\n\n sys_open - open and possibly create a file\n\n\n\nSYNOPSIS\n\n sys_open(path: *const u8, flags: usize) -> Result<usize>;\n\n\n\nDESCRIPTION\n\n sys_open returns a file descriptor referencing path, creating path if O_CREAT is provided\n\n\n\n TODO: Open is very complicated, and has a lot of flags\n\n\n\nRETURN VALUE\n", "file_path": "kernel/syscall/fs.rs", "rank": 50, "score": 119740.91208051662 }, { "content": "pub fn sys_write(fd: usize, buf: &[u8]) -> Result<usize> {\n\n unsafe { syscall3(SYS_WRITE, fd, buf.as_ptr() as usize, buf.len()) }\n\n}\n\n\n", "file_path": "crates/system/syscall/unix.rs", "rank": 51, "score": 119637.91506331376 }, { "content": "/// Set the current directory\n\npub fn set_current_dir<P: AsRef<Path>>(path: P) -> Result<()> {\n\n let path_str = path.as_ref().as_os_str().as_inner();\n\n let file_result = if path_str.is_empty() || path_str.ends_with('/') {\n\n File::open(path_str)\n\n } else {\n\n let mut path_string = path_str.to_owned();\n\n path_string.push_str(\"/\");\n\n File::open(path_string)\n\n };\n\n\n\n match file_result {\n\n Ok(file) => {\n\n match file.path() {\n\n Ok(path) => {\n\n if let Some(path_str) = path.to_str() {\n\n let mut path_c = path_str.to_owned();\n\n path_c.push_str(\"\\0\");\n\n unsafe {\n\n sys_chdir(path_c.as_ptr()).and(Ok(()))\n\n }.map_err(|x| Error::from_sys(x))\n", "file_path": "libstd/src/env.rs", "rank": 52, "score": 119637.91506331376 }, { "content": "pub fn sys_ftruncate(fd: usize, len: usize) -> Result<usize> {\n\n unsafe { syscall2(SYS_FTRUNCATE, fd, len) }\n\n}\n\n\n", "file_path": "crates/system/syscall/unix.rs", "rank": 53, "score": 119637.91506331376 }, { "content": "/// Get information about a file without following symlinks\n\n/// Warning: Redox does not currently support symlinks\n\npub fn symlink_metadata<P: AsRef<Path>>(path: P) -> Result<Metadata> {\n\n metadata(path)\n\n}\n\n\n", "file_path": "libstd/src/fs.rs", "rank": 54, "score": 117772.94010399809 }, { "content": "/// Find the canonical path of a file\n\npub fn canonicalize<P: AsRef<Path>>(path: P) -> Result<PathBuf> {\n\n match File::open(path) {\n\n Ok(file) => {\n\n match file.path() {\n\n Ok(realpath) => Ok(realpath),\n\n Err(err) => Err(err)\n\n }\n\n },\n\n Err(err) => Err(err)\n\n }\n\n}\n\n\n", "file_path": "libstd/src/fs.rs", "rank": 55, "score": 117772.94010399809 }, { "content": "/// Add `message` to the kernel logs, with a priority level of `level`\n\npub fn syslog(level: LogLevel, message: &str) {\n\n syslog_inner(level, format_args!(\"{}\", message));\n\n}\n\n\n", "file_path": "kernel/logging.rs", "rank": 56, "score": 117572.68579714872 }, { "content": "#[allow(unused_variables)]\n\npub trait KScheme {\n\n fn on_irq(&mut self, irq: u8) {\n\n\n\n }\n\n\n\n fn scheme(&self) -> &str {\n\n \"\"\n\n }\n\n\n\n fn open(&mut self, path: Url, flags: usize) -> Result<Box<Resource>> {\n\n Err(Error::new(EPERM))\n\n }\n\n\n\n fn mkdir(&mut self, path: Url, flags: usize) -> Result<()> {\n\n Err(Error::new(EPERM))\n\n }\n\n\n\n fn rmdir(&mut self, path: Url) -> Result<()> {\n\n Err(Error::new(EPERM))\n\n }\n\n\n\n fn stat(&mut self, path: Url, stat: &mut Stat) -> Result<()> {\n\n Err(Error::new(EPERM))\n\n }\n\n\n\n fn unlink(&mut self, path: Url) -> Result<()> {\n\n Err(Error::new(EPERM))\n\n }\n\n}\n", "file_path": "kernel/fs/kscheme.rs", "rank": 57, "score": 117449.66789348086 }, { "content": "/// The `Seek` trait provides a cursor which can be moved within a stream of\n\n/// bytes.\n\n///\n\n/// The stream typically has a fixed size, allowing seeking relative to either\n\n/// end or the current offset.\n\n///\n\n/// # Examples\n\n///\n\n/// [`File`][file]s implement `Seek`:\n\n///\n\n/// [file]: ../fs/struct.File.html\n\n///\n\n/// ```\n\n/// use std::io;\n\n/// use std::io::prelude::*;\n\n/// use std::fs::File;\n\n/// use std::io::SeekFrom;\n\n///\n\n/// # fn foo() -> io::Result<()> {\n\n/// let mut f = try!(File::open(\"foo.txt\"));\n\n///\n\n/// // move the cursor 42 bytes from the start of the file\n\n/// try!(f.seek(SeekFrom::Start(42)));\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub trait Seek {\n\n /// Seek to an offset, in bytes, in a stream.\n\n ///\n\n /// A seek beyond the end of a stream is allowed, but implementation\n\n /// defined.\n\n ///\n\n /// If the seek operation completed successfully,\n\n /// this method returns the new position from the start of the stream.\n\n /// That position can be used later with `SeekFrom::Start`.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Seeking to a negative offset is considered an error.\n\n fn seek(&mut self, pos: SeekFrom) -> Result<u64>;\n\n}\n\n\n\n/// Enumeration of possible methods to seek within an I/O object.\n\n#[derive(Copy, PartialEq, Eq, Clone, Debug)]\n\npub enum SeekFrom {\n\n /// Set the offset to the provided number of bytes.\n", "file_path": "libstd/src/io/mod.rs", "rank": 58, "score": 117267.74891754967 }, { "content": "/// The `Read` trait allows for reading bytes from a source.\n\n///\n\n/// Implementors of the `Read` trait are sometimes called 'readers'.\n\n///\n\n/// Readers are defined by one required method, `read()`. Each call to `read`\n\n/// will attempt to pull bytes from this source into a provided buffer. A\n\n/// number of other methods are implemented in terms of `read()`, giving\n\n/// implementors a number of ways to read bytes while only needing to implement\n\n/// a single method.\n\n///\n\n/// Readers are intended to be composable with one another. Many implementors\n\n/// throughout `std::io` take and provide types which implement the `Read`\n\n/// trait.\n\n///\n\n/// Please note that each call to `read` may involve a system call, and\n\n/// therefore, using something that implements [`BufRead`][bufread], such as\n\n/// [`BufReader`][bufreader], will be more efficient.\n\n///\n\n/// [bufread]: trait.BufRead.html\n\n/// [bufreader]: struct.BufReader.html\n\n///\n\n/// # Examples\n\n///\n\n/// [`File`][file]s implement `Read`:\n\n///\n\n/// [file]: ../fs/struct.File.html\n\n///\n\n/// ```\n\n/// use std::io;\n\n/// use std::io::prelude::*;\n\n/// use std::fs::File;\n\n///\n\n/// # fn foo() -> io::Result<()> {\n\n/// let mut f = try!(File::open(\"foo.txt\"));\n\n/// let mut buffer = [0; 10];\n\n///\n\n/// // read up to 10 bytes\n\n/// try!(f.read(&mut buffer));\n\n///\n\n/// let mut buffer = vec![0; 10];\n\n/// // read the whole file\n\n/// try!(f.read_to_end(&mut buffer));\n\n///\n\n/// // read into a String, so that you don't need to do the conversion.\n\n/// let mut buffer = String::new();\n\n/// try!(f.read_to_string(&mut buffer));\n\n///\n\n/// // and more! See the other methods for more details.\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub trait Read {\n\n /// Pull some bytes from this source into the specified buffer, returning\n\n /// how many bytes were read.\n\n ///\n\n /// This function does not provide any guarantees about whether it blocks\n\n /// waiting for data, but if an object needs to block for a read but cannot\n\n /// it will typically signal this via an `Err` return value.\n\n ///\n\n /// If the return value of this method is `Ok(n)`, then it must be\n\n /// guaranteed that `0 <= n <= buf.len()`. A nonzero `n` value indicates\n\n /// that the buffer `buf` has been filled in with `n` bytes of data from this\n\n /// source. If `n` is `0`, then it can indicate one of two scenarios:\n\n ///\n\n /// 1. This reader has reached its \"end of file\" and will likely no longer\n\n /// be able to produce bytes. Note that this does not mean that the\n\n /// reader will *always* no longer be able to produce bytes.\n\n /// 2. The buffer specified was 0 bytes in length.\n\n ///\n\n /// No guarantees are provided about the contents of `buf` when this\n\n /// function is called, implementations cannot rely on any property of the\n", "file_path": "libstd/src/io/mod.rs", "rank": 59, "score": 117265.16708151608 }, { "content": "/// A trait for objects which are byte-oriented sinks.\n\n///\n\n/// Implementors of the `Write` trait are sometimes called 'writers'.\n\n///\n\n/// Writers are defined by two required methods, `write()` and `flush()`:\n\n///\n\n/// * The `write()` method will attempt to write some data into the object,\n\n/// returning how many bytes were successfully written.\n\n///\n\n/// * The `flush()` method is useful for adaptors and explicit buffers\n\n/// themselves for ensuring that all buffered data has been pushed out to the\n\n/// 'true sink'.\n\n///\n\n/// Writers are intended to be composable with one another. Many implementors\n\n/// throughout `std::io` take and provide types which implement the `Write`\n\n/// trait.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use std::io::prelude::*;\n\n/// use std::fs::File;\n\n///\n\n/// # fn foo() -> std::io::Result<()> {\n\n/// let mut buffer = try!(File::create(\"foo.txt\"));\n\n///\n\n/// try!(buffer.write(b\"some bytes\"));\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub trait Write {\n\n /// Write a buffer into this object, returning how many bytes were written.\n\n ///\n\n /// This function will attempt to write the entire contents of `buf`, but\n\n /// the entire write may not succeed, or the write may also generate an\n\n /// error. A call to `write` represents *at most one* attempt to write to\n\n /// any wrapped object.\n\n ///\n\n /// Calls to `write` are not guaranteed to block waiting for data to be\n\n /// written, and a write which would otherwise block can be indicated through\n\n /// an `Err` variant.\n\n ///\n\n /// If the return value is `Ok(n)` then it must be guaranteed that\n\n /// `0 <= n <= buf.len()`. A return value of `0` typically means that the\n\n /// underlying object is no longer able to accept bytes and will likely not\n\n /// be able to in the future as well, or that the buffer provided is empty.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Each call to `write` may generate an I/O error indicating that the\n", "file_path": "libstd/src/io/mod.rs", "rank": 60, "score": 117263.34261138132 }, { "content": "pub fn reset() {\n\n let mut port: Pio<u8> = Pio::new(0x64);\n\n while port.readf(0x02) {}\n\n port.write(0xFE);\n\n}\n", "file_path": "drivers/reboot/power/mod.rs", "rank": 61, "score": 117255.75825635632 }, { "content": "pub fn tsa<A: ToSocketAddrs>(a: A) -> Result<Vec<SocketAddr>, String> {\n\n match a.to_socket_addrs() {\n\n Ok(a) => Ok(a.collect()),\n\n Err(e) => Err(e.to_string()),\n\n }\n\n}\n\n\n", "file_path": "libstd/src/net/test.rs", "rank": 62, "score": 116047.21029829129 }, { "content": "pub fn lseek(fd: usize, offset: isize, whence: usize) -> Result<usize> {\n\n let contexts = unsafe { &mut *::env().contexts.get() };\n\n let mut current = try!(contexts.current_mut());\n\n let mut resource = try!(current.get_file_mut(fd));\n\n match whence {\n\n SEEK_SET => resource.seek(ResourceSeek::Start(offset as usize)),\n\n SEEK_CUR => resource.seek(ResourceSeek::Current(offset)),\n\n SEEK_END => resource.seek(ResourceSeek::End(offset)),\n\n _ => Err(Error::new(EINVAL)),\n\n }\n\n}\n\n\n\n/** <!-- @MANSTART{sys_mkdir} -->\n\nNAME\n\n sys_mkdir - create a directory\n\n\n\nSYNOPSIS\n\n sys_mkdir(path: *const u8, flags: usize) -> Result<usize>;\n\n\n\nDESCRIPTION\n", "file_path": "kernel/syscall/fs.rs", "rank": 63, "score": 116047.21029829129 }, { "content": "pub fn sys_fpath(fd: usize, buf: &mut [u8]) -> Result<usize> {\n\n unsafe { syscall3(SYS_FPATH, fd, buf.as_mut_ptr() as usize, buf.len()) }\n\n}\n\n\n", "file_path": "crates/system/syscall/unix.rs", "rank": 64, "score": 115907.58649109674 }, { "content": "/// Get the time of a given clock.\n\npub fn clock_gettime(clock: usize, tp: *mut TimeSpec) -> Result<usize> {\n\n let contexts = unsafe { & *::env().contexts.get() };\n\n let current = contexts.current()?;\n\n let tp_safe = current.get_ref_mut(tp)?;\n\n\n\n match clock {\n\n CLOCK_REALTIME => {\n\n let clock_realtime = Duration::realtime();\n\n tp_safe.tv_sec = clock_realtime.secs;\n\n tp_safe.tv_nsec = clock_realtime.nanos;\n\n Ok(0)\n\n }\n\n CLOCK_MONOTONIC => {\n\n let clock_monotonic = Duration::monotonic();\n\n tp_safe.tv_sec = clock_monotonic.secs;\n\n tp_safe.tv_nsec = clock_monotonic.nanos;\n\n Ok(0)\n\n }\n\n _ => Err(Error::new(EINVAL)),\n\n }\n\n}\n\n\n", "file_path": "kernel/syscall/time.rs", "rank": 65, "score": 115907.58649109674 }, { "content": "/// Return an iterator over the entries within a directory\n\npub fn read_dir<P: AsRef<Path>>(path: P) -> Result<ReadDir> {\n\n let path_buf = path.as_ref().to_owned();\n\n File::open(&path_buf).map(|file| ReadDir { path: path_buf, file: BufReader::new(file) })\n\n}\n\n\n", "file_path": "libstd/src/fs.rs", "rank": 66, "score": 115907.58649109674 }, { "content": "pub fn sys_read(fd: usize, buf: &mut [u8]) -> Result<usize> {\n\n unsafe { syscall3(SYS_READ, fd, buf.as_mut_ptr() as usize, buf.len()) }\n\n}\n\n\n\npub unsafe fn sys_rmdir(path: *const u8) -> Result<usize> {\n\n syscall1(SYS_RMDIR, path as usize)\n\n}\n\n\n\npub unsafe fn sys_stat(path: *const u8, stat: &mut Stat) -> Result<usize> {\n\n syscall2(SYS_STAT, path as usize, stat as *mut Stat as usize)\n\n}\n\n\n\npub unsafe fn sys_unlink(path: *const u8) -> Result<usize> {\n\n syscall1(SYS_UNLINK, path as usize)\n\n}\n\n\n", "file_path": "crates/system/syscall/unix.rs", "rank": 67, "score": 115907.58649109674 }, { "content": "/// Get the environment pointer.\n\n///\n\n/// This is unsafe, due to reading of a mutable static variable.\n\npub fn env() -> &'static Environment {\n\n unsafe {\n\n match ENV_PTR {\n\n Some(&mut ref p) => p,\n\n None => unreachable!(),\n\n }\n\n }\n\n}\n\n\n\n/// The PIT (programmable interval timer) duration.\n\n///\n\n/// This duration defines the PIT interval, which is added to the monotonic clock and the real time\n\n/// clock, when interrupt 0x20 is received.\n\nstatic PIT_DURATION: Duration = Duration {\n\n secs: 0,\n\n nanos: 4500572,\n\n};\n\n\n", "file_path": "kernel/main.rs", "rank": 68, "score": 114996.07570387745 }, { "content": "pub fn memory_used() -> usize {\n\n let mut ret = 0;\n\n\n\n unsafe {\n\n for i in 0..CLUSTER_COUNT {\n\n if cluster(i) != 0 && cluster(i) != 0xFFFFFFFF {\n\n ret += CLUSTER_SIZE;\n\n }\n\n }\n\n }\n\n\n\n ret\n\n}\n\n\n", "file_path": "kernel/arch/memory.rs", "rank": 69, "score": 113531.3318438503 }, { "content": "/// Create a standard input\n\npub fn stdin() -> Stdin {\n\n Stdin\n\n}\n\n\n\nimpl Stdin {\n\n pub fn lock(&self) -> StdinLock {\n\n StdinLock\n\n }\n\n\n\n pub fn read_line(&mut self, string: &mut String) -> Result<usize> {\n\n let mut i = 0;\n\n loop {\n\n let mut byte = [0];\n\n match sys_read(0, &mut byte) {\n\n Ok(0) => return Ok(i),\n\n Ok(_) => {\n\n unsafe { string.as_mut_vec().push(byte[0]) };\n\n i += 1;\n\n if byte[0] == b'\\n' {\n\n return Ok(i);\n", "file_path": "libstd/src/io/mod.rs", "rank": 70, "score": 112963.55006766717 }, { "content": "/// Create a standard output\n\npub fn stdout() -> Stdout {\n\n Stdout\n\n}\n\n\n\nimpl Stdout {\n\n pub fn lock(&self) -> StdoutLock {\n\n StdoutLock\n\n }\n\n}\n\n\n\n/// Write implementation for standard output\n\nimpl Write for Stdout {\n\n fn write(&mut self, buf: &[u8]) -> Result<usize> {\n\n sys_write(1, buf).map_err(|x| Error::from_sys(x))\n\n }\n\n\n\n fn flush(&mut self) -> Result<()> {\n\n sys_fsync(1).map_err(|x| Error::from_sys(x)).and(Ok(()))\n\n }\n\n}\n", "file_path": "libstd/src/io/mod.rs", "rank": 71, "score": 112963.55006766717 }, { "content": "/// Create a standard error\n\npub fn stderr() -> Stderr {\n\n Stderr\n\n}\n\n\n\nimpl Stderr {\n\n pub fn lock(&self) -> StderrLock {\n\n StderrLock\n\n }\n\n}\n\n\n\n/// Write implementation for standard error\n\nimpl Write for Stderr {\n\n fn write(&mut self, buf: &[u8]) -> Result<usize> {\n\n sys_write(2, buf).map_err(|x| Error::from_sys(x))\n\n }\n\n\n\n fn flush(&mut self) -> Result<()> {\n\n sys_fsync(2).map_err(|x| Error::from_sys(x)).and(Ok(()))\n\n }\n\n}\n", "file_path": "libstd/src/io/mod.rs", "rank": 72, "score": 112963.55006766717 }, { "content": "pub fn execve(path: *const u8, args: *const *const u8) -> Result<usize> {\n\n let mut args_vec = Vec::new();\n\n args_vec.push(c_string_to_str(path).to_string());\n\n for arg in c_array_to_slice(args) {\n\n args_vec.push(c_string_to_str(*arg).to_string());\n\n }\n\n\n\n execute(args_vec)\n\n}\n\n\n", "file_path": "kernel/syscall/process.rs", "rank": 73, "score": 112580.33850892204 }, { "content": "pub fn read(fd: usize, buf: *mut u8, count: usize) -> Result<usize> {\n\n let contexts = unsafe { &mut *::env().contexts.get() };\n\n let mut current = contexts.current_mut()?;\n\n let mut resource = current.get_file_mut(fd)?;\n\n if count > 0 {\n\n let buf_safe = current.get_slice_mut(buf, count)?;\n\n resource.read(buf_safe)\n\n } else {\n\n Ok(0)\n\n }\n\n}\n\n\n", "file_path": "kernel/syscall/fs.rs", "rank": 74, "score": 112580.33850892204 }, { "content": "pub fn write(fd: usize, buf: *const u8, count: usize) -> Result<usize> {\n\n let contexts = unsafe { &mut *::env().contexts.get() };\n\n let mut current = contexts.current_mut()?;\n\n let mut resource = current.get_file_mut(fd)?;\n\n if count > 0 {\n\n let buf_safe = current.get_slice(buf, count)?;\n\n resource.write(buf_safe)\n\n } else {\n\n Ok(0)\n\n }\n\n}\n", "file_path": "kernel/syscall/fs.rs", "rank": 75, "score": 112580.33850892204 }, { "content": "pub fn fpath(fd: usize, buf: *mut u8, count: usize) -> Result<usize> {\n\n let contexts = unsafe { & *::env().contexts.get() };\n\n let current = contexts.current()?;\n\n let resource = current.get_file(fd)?;\n\n if count > 0 {\n\n let buf_safe = current.get_slice_mut(buf, count)?;\n\n resource.path(buf_safe)\n\n } else {\n\n Ok(0)\n\n }\n\n}\n\n\n", "file_path": "kernel/syscall/fs.rs", "rank": 76, "score": 112580.33850892204 }, { "content": "pub fn sys_nanosleep(req: &TimeSpec, rem: &mut TimeSpec) -> Result<usize> {\n\n unsafe { syscall2(SYS_NANOSLEEP, req as *const TimeSpec as usize, rem as *mut TimeSpec as usize) }\n\n}\n\n\n\npub unsafe fn sys_open(path: *const u8, flags: usize, mode: usize) -> Result<usize> {\n\n syscall3(SYS_OPEN, path as usize, flags, mode)\n\n}\n\n\n\npub unsafe fn sys_pipe2(fds: *mut usize, flags: usize) -> Result<usize> {\n\n syscall2(SYS_PIPE2, fds as usize, flags)\n\n}\n\n\n", "file_path": "crates/system/syscall/unix.rs", "rank": 77, "score": 112454.2197873851 }, { "content": "pub fn sys_clock_gettime(clock: usize, tp: &mut TimeSpec) -> Result<usize> {\n\n unsafe { syscall2(SYS_CLOCK_GETTIME, clock, tp as *mut TimeSpec as usize) }\n\n}\n\n\n", "file_path": "crates/system/syscall/unix.rs", "rank": 78, "score": 112454.2197873851 }, { "content": "/// Sleep in N nanoseconds.\n\npub fn nanosleep(req: *const TimeSpec, rem: *mut TimeSpec) -> Result<usize> {\n\n {\n\n let contexts = unsafe { &mut *::env().contexts.get() };\n\n let mut current = try!(contexts.current_mut());\n\n\n\n // Copied with * to avoid borrow issue on current.blocked = true\n\n let req_safe = *current.get_ref(req)?;\n\n\n\n current.block(\"nanosleep\");\n\n current.wake = Some(Duration::monotonic() + Duration::new(req_safe.tv_sec, req_safe.tv_nsec));\n\n }\n\n\n\n unsafe { context_switch(); }\n\n\n\n {\n\n let contexts = unsafe { & *::env().contexts.get() };\n\n let current = try!(contexts.current());\n\n\n\n if let Ok(rem_safe) = current.get_ref_mut(rem) {\n\n rem_safe.tv_sec = 0;\n\n rem_safe.tv_nsec = 0;\n\n }\n\n }\n\n\n\n Ok(0)\n\n}\n", "file_path": "kernel/syscall/time.rs", "rank": 79, "score": 112411.29895296018 }, { "content": "pub fn sys_lseek(fd: usize, offset: isize, whence: usize) -> Result<usize> {\n\n unsafe { syscall3(SYS_LSEEK, fd, offset as usize, whence) }\n\n}\n\n\n\npub unsafe fn sys_mkdir(path: *const u8, mode: usize) -> Result<usize> {\n\n syscall2(SYS_MKDIR, path as usize, mode)\n\n}\n\n\n", "file_path": "crates/system/syscall/unix.rs", "rank": 80, "score": 112411.29895296018 }, { "content": "//TODO: Limit log message size\n\npub fn syslog_inner(level: LogLevel, message: fmt::Arguments) {\n\n let time = Duration::monotonic();\n\n\n\n let (prefix, display) = match level {\n\n LogLevel::Debug => (\"DEBUG \", false),\n\n LogLevel::Info => (\"INFO \", true),\n\n LogLevel::Warning => (\"WARN \", true),\n\n LogLevel::Error => (\"ERROR \", true),\n\n LogLevel::Critical => (\"CRIT \", true),\n\n };\n\n\n\n let _ = write!(unsafe { &mut *::env().log.get() }, \"[{}.{:>03}] {}{}\\n\", time.secs, time.nanos/1000000, prefix, message);\n\n if display {\n\n let _ = write!(::common::debug::SerialConsole::new(), \"[{}.{:>03}] {}{}\\n\", time.secs, time.nanos/1000000, prefix, message);\n\n }\n\n}\n", "file_path": "kernel/logging.rs", "rank": 81, "score": 112261.32223665515 }, { "content": "//TODO: Finish implementation, add more functions to WaitMap so that matching any or using WNOHANG works\n\npub fn waitpid(pid: isize, status_ptr: *mut usize, _options: usize) -> Result<usize> {\n\n let contexts = unsafe { &mut *::env().contexts.get() };\n\n let current = try!(contexts.current_mut());\n\n\n\n if pid > 0 {\n\n let status = current.statuses.receive(&(pid as usize), \"waitpid status\");\n\n\n\n if let Ok(status_safe) = current.get_ref_mut(status_ptr) {\n\n *status_safe = status;\n\n }\n\n\n\n Ok(pid as usize)\n\n } else {\n\n Err(Error::new(ECHILD))\n\n }\n\n}\n\n\n", "file_path": "kernel/syscall/process.rs", "rank": 82, "score": 110814.82658550648 }, { "content": "pub fn test() -> bool {\n\n use common::slice::GetSlice;\n\n let array = [1, 2, 3, 4, 5];\n\n\n\n test!(array.get_slice(100..100) == &[]);\n\n test!(array.get_slice(..100) == &array);\n\n test!(array.get_slice(1..) == &array[1..]);\n\n test!(array.get_slice(1..2) == &[2]);\n\n test!(array.get_slice(3..5) == &[4, 5]);\n\n test!(array.get_slice(3..7) == &[4, 5]);\n\n test!(array.get_slice(3..4) == &[4]);\n\n test!(array.get_slice(4..2) == &[]);\n\n test!(array.get_slice(4..1) == &[]);\n\n test!(array.get_slice(20..) == &[]);\n\n //test!(array.get_slice(..) == &array);\n\n succ!();\n\n}\n", "file_path": "kernel/schemes/test/get_slice.rs", "rank": 83, "score": 110523.81673283735 }, { "content": "/// A `BufRead` is a type of `Read`er which has an internal buffer, allowing it\n\n/// to perform extra ways of reading.\n\n///\n\n/// For example, reading line-by-line is inefficient without using a buffer, so\n\n/// if you want to read by line, you'll need `BufRead`, which includes a\n\n/// [`read_line()`][readline] method as well as a [`lines()`][lines] iterator.\n\n///\n\n/// [readline]: #method.read_line\n\n/// [lines]: #method.lines\n\n///\n\n/// # Examples\n\n///\n\n/// A locked standard input implements `BufRead`:\n\n///\n\n/// ```\n\n/// use std::io;\n\n/// use std::io::prelude::*;\n\n///\n\n/// let stdin = io::stdin();\n\n/// for line in stdin.lock().lines() {\n\n/// println!(\"{}\", line.unwrap());\n\n/// }\n\n/// ```\n\n///\n\n/// If you have something that implements `Read`, you can use the [`BufReader`\n\n/// type][bufreader] to turn it into a `BufRead`.\n\n///\n\n/// For example, [`File`][file] implements `Read`, but not `BufRead`.\n\n/// `BufReader` to the rescue!\n\n///\n\n/// [bufreader]: struct.BufReader.html\n\n/// [file]: ../fs/struct.File.html\n\n///\n\n/// ```\n\n/// use std::io::{self, BufReader};\n\n/// use std::io::prelude::*;\n\n/// use std::fs::File;\n\n///\n\n/// # fn foo() -> io::Result<()> {\n\n/// let f = try!(File::open(\"foo.txt\"));\n\n/// let f = BufReader::new(f);\n\n///\n\n/// for line in f.lines() {\n\n/// println!(\"{}\", line.unwrap());\n\n/// }\n\n///\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\n///\n\npub trait BufRead: Read {\n\n /// Fills the internal buffer of this object, returning the buffer contents.\n\n ///\n\n /// This function is a lower-level call. It needs to be paired with the\n\n /// [`consume`][consume] method to function properly. When calling this\n\n /// method, none of the contents will be \"read\" in the sense that later\n\n /// calling `read` may return the same contents. As such, `consume` must be\n\n /// called with the number of bytes that are consumed from this buffer to\n\n /// ensure that the bytes are never returned twice.\n\n ///\n\n /// [consume]: #tymethod.consume\n\n ///\n\n /// An empty buffer returned indicates that the stream has reached EOF.\n\n ///\n\n /// # Errors\n\n ///\n\n /// This function will return an I/O error if the underlying reader was\n\n /// read, but returned an error.\n\n ///\n\n /// # Examples\n", "file_path": "libstd/src/io/mod.rs", "rank": 84, "score": 110353.0236411027 }, { "content": "/// A trait for extracting representations from std types\n\npub trait IntoInner<Inner> {\n\n fn into_inner(self) -> Inner;\n\n}\n\n\n", "file_path": "libstd/src/sys/common/mod.rs", "rank": 85, "score": 110341.89341416009 }, { "content": "/// A trait for creating std types from internal representations\n\npub trait FromInner<Inner> {\n\n fn from_inner(inner: Inner) -> Self;\n\n}\n", "file_path": "libstd/src/sys/common/mod.rs", "rank": 86, "score": 110341.89341416009 }, { "content": "/// Rename a file or directory to a new name\n\npub fn rename<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> Result<()> {\n\n try!(copy(Path::new(from.as_ref()), to));\n\n remove_file(from)\n\n}\n\n\n", "file_path": "libstd/src/fs.rs", "rank": 87, "score": 109699.896987688 }, { "content": "pub fn sys_waitpid(pid: usize, status: &mut usize, options: usize) -> Result<usize> {\n\n unsafe { syscall3(SYS_WAITPID, pid, status as *mut usize as usize, options) }\n\n}\n\n\n", "file_path": "crates/system/syscall/unix.rs", "rank": 88, "score": 109126.9718052104 }, { "content": "pub fn meta_test_woah() -> bool {\n\n test!(true == true);\n\n test!(true);\n\n succ!();\n\n}\n", "file_path": "kernel/schemes/test/meta.rs", "rank": 89, "score": 108053.90240933865 }, { "content": "/// Handle the syscall defined by the given registers.\n\n///\n\n/// AX defines which syscall to use. The arguments are provided in other registers, as specified by\n\n/// the specific sycall.\n\n///\n\n/// The return value is placed in AX, unless otherwise specified.\n\npub fn handle(regs: &mut Regs) {\n\n {\n\n let contexts = unsafe { &mut *::env().contexts.get() };\n\n if let Ok(cur) = contexts.current_mut() {\n\n cur.current_syscall = Some((regs.ip, regs.ax, regs.bx, regs.cx, regs.dx));\n\n // debugln!(\"PID {}: {} @ {:X}: {} {} {:X} {:X} {:X}\", cur.pid, cur.name, regs.ip, regs.ax, name(regs.ax), regs.bx, regs.cx, regs.dx);\n\n if cur.supervised {\n\n // Block the process.\n\n cur.blocked_syscall = true;\n\n cur.block(\"syscall::handle Supervise\");\n\n // Clear the timer.\n\n cur.wake = None;\n\n\n\n loop {\n\n if cur.blocked > 0 {\n\n unsafe { context_switch() };\n\n } else {\n\n return;\n\n }\n\n }\n", "file_path": "kernel/syscall/mod.rs", "rank": 90, "score": 107763.2455890821 }, { "content": "/// Copy the contents of one file to another\n\npub fn copy<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> Result<u64> {\n\n let mut infile = try!(File::open(from));\n\n let mut outfile = try!(File::create(to));\n\n io::copy(&mut infile, &mut outfile)\n\n}\n\n\n", "file_path": "libstd/src/fs.rs", "rank": 91, "score": 106629.45929990684 }, { "content": "/// A trait for viewing representations from std types\n\npub trait AsInner<Inner: ?Sized> {\n\n fn as_inner(&self) -> &Inner;\n\n}\n\n\n", "file_path": "libstd/src/sys/common/mod.rs", "rank": 92, "score": 106523.72552351652 }, { "content": "pub fn meta_test_woah_fail() -> bool {\n\n test!(true == false);\n\n test!(false);\n\n fail!();\n\n}\n\n\n", "file_path": "kernel/schemes/test/meta.rs", "rank": 93, "score": 105727.77517255925 }, { "content": "#[allow(unused_must_use)]\n\npub fn _print(args: fmt::Arguments) {\n\n stdout().write_fmt(args);\n\n}\n", "file_path": "libstd/src/io/mod.rs", "rank": 94, "score": 105299.75385180018 }, { "content": "pub fn name(number: usize) -> &'static str {\n\n match number {\n\n // Redox\n\n SYS_SUPERVISE => \"supervise\",\n\n\n\n // Unix\n\n SYS_BRK => \"brk\",\n\n SYS_CHDIR => \"chdir\",\n\n SYS_CLONE => \"clone\",\n\n SYS_CLOSE => \"close\",\n\n SYS_CLOCK_GETTIME => \"clock_gettime\",\n\n SYS_DUP => \"dup\",\n\n SYS_EXECVE => \"execve\",\n\n SYS_EXIT => \"exit\",\n\n SYS_FPATH => \"fpath\",\n\n SYS_FSTAT => \"fstat\",\n\n SYS_FSYNC => \"fsync\",\n\n SYS_FTRUNCATE => \"ftruncate\",\n\n SYS_GETPID => \"getpid\",\n\n SYS_IOPL => \"iopl\",\n", "file_path": "kernel/syscall/mod.rs", "rank": 95, "score": 104172.75335972267 }, { "content": "/// Copies the entire contents of a reader into a writer.\n\n///\n\n/// This function will continuously read data from `reader` and then\n\n/// write it into `writer` in a streaming fashion until `reader`\n\n/// returns EOF.\n\n///\n\n/// On success, the total number of bytes that were copied from\n\n/// `reader` to `writer` is returned.\n\n///\n\n/// # Errors\n\n///\n\n/// This function will return an error immediately if any call to `read` or\n\n/// `write` returns an error. All instances of `ErrorKind::Interrupted` are\n\n/// handled by this function and the underlying operation is retried.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use std::io;\n\n///\n\n/// # fn foo() -> io::Result<()> {\n\n/// let mut reader: &[u8] = b\"hello\";\n\n/// let mut writer: Vec<u8> = vec![];\n\n///\n\n/// try!(io::copy(&mut reader, &mut writer));\n\n///\n\n/// assert_eq!(reader, &writer[..]);\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn copy<R: ?Sized, W: ?Sized>(reader: &mut R, writer: &mut W) -> io::Result<u64>\n\n where R: Read, W: Write\n\n{\n\n let mut buf = [0; super::DEFAULT_BUF_SIZE];\n\n let mut written = 0;\n\n loop {\n\n let len = match reader.read(&mut buf) {\n\n Ok(0) => return Ok(written),\n\n Ok(len) => len,\n\n Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,\n\n Err(e) => return Err(e),\n\n };\n\n try!(writer.write_all(&buf[..len]));\n\n written += len as u64;\n\n }\n\n}\n\n\n\n/// A reader which is always at EOF.\n\n///\n\n/// This struct is generally created by calling [`empty()`][empty]. Please see\n\n/// the documentation of `empty()` for more details.\n\n///\n\n/// [empty]: fn.empty.html\n\npub struct Empty { _priv: () }\n\n\n", "file_path": "libstd/src/io/util.rs", "rank": 96, "score": 99151.77401425918 }, { "content": "use alloc::arc::Arc;\n\nuse alloc::boxed::Box;\n\n\n\nuse collections::borrow::ToOwned;\n\nuse collections::{String, Vec};\n\n\n\nuse core::cell::UnsafeCell;\n\nuse core::cmp;\n\nuse disk::Disk;\n\nuse fs::{KScheme, Resource, ResourceSeek, Url, VecResource};\n\n\n\nuse syscall::{MODE_DIR, MODE_FILE, Stat};\n\n\n\nuse system::error::{Error, Result, ENOENT};\n\n\n\n/// A disk resource\n\npub struct DiskResource {\n\n pub path: String,\n\n pub disk: Arc<UnsafeCell<Box<Disk>>>,\n\n pub seek: u64,\n", "file_path": "kernel/schemes/disk.rs", "rank": 97, "score": 97705.79314523931 }, { "content": "use alloc::boxed::Box;\n\n\n\nuse collections::string::{String, ToString};\n\n\n\nuse arch::context;\n\n\n\nuse fs::{KScheme, Resource, Url, VecResource};\n\n\n\nuse system::error::Result;\n\n\n\npub struct ContextScheme;\n\n\n\nimpl KScheme for ContextScheme {\n\n fn scheme(&self) -> &str {\n\n \"context\"\n\n }\n\n\n\n fn open(&mut self, _: Url, _: usize) -> Result<Box<Resource>> {\n\n let mut string = format!(\"{:<6}{:<6}{:<8}{:<8}{:<8}{:<6}{:<6}{:<6}{}\\n\",\n\n \"PID\",\n", "file_path": "kernel/schemes/context.rs", "rank": 98, "score": 97695.69833241189 } ]
Rust
src/lib.rs
antoyo/password-store-rs
b2615c12fec5d8957798fa8a745bdec1f1274719
/* * Copyright (c) 2016-2020 Boucher, Antoni <[email protected]> * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #[macro_use] extern crate json; mod chomp; use std::error; use std::ffi::OsStr; use std::fmt::{self, Display, Formatter}; use std::io::{self, Write}; use std::process::{Command, Stdio}; use std::str::{self, Utf8Error}; use std::string; use json::JsonValue; use Error::*; use chomp::Chomp; macro_rules! validate_path { ($path:expr) => { if $path.trim().is_empty() { return Err(InvalidInput); } }; } const MSG_SIZE: usize = 4; #[derive(Debug)] pub enum Error { FromUtf8(string::FromUtf8Error), Json(json::Error), Io(io::Error), InvalidInput, InvalidOutput, Pass(String), Utf8(Utf8Error), } impl From<json::Error> for Error { fn from(error: json::Error) -> Self { Json(error) } } impl From<io::Error> for Error { fn from(error: io::Error) -> Self { Io(error) } } impl From<Utf8Error> for Error { fn from(error: Utf8Error) -> Self { Utf8(error) } } impl From<string::FromUtf8Error> for Error { fn from(error: string::FromUtf8Error) -> Self { FromUtf8(error) } } impl Display for Error { fn fmt(&self, formatter: &mut Formatter) -> fmt::Result { let string = match *self { FromUtf8(ref error) => error.to_string(), Json(ref error) => error.to_string(), Io(ref error) => error.to_string(), InvalidInput => "invalid input".to_string(), InvalidOutput => "invalid output".to_string(), Pass(ref error) => error.clone(), Utf8(ref error) => error.to_string(), }; write!(formatter, "{}", string) } } impl error::Error for Error { fn description(&self) -> &str { match *self { FromUtf8(ref error) => error.description(), Json(ref error) => error.description(), Io(ref error) => error.description(), InvalidInput => "invalid input", InvalidOutput => "invalid output", Pass(ref error) => error, Utf8(ref error) => error.description(), } } } pub type Result<T> = std::result::Result<T, Error>; pub struct PasswordStore; impl PasswordStore { pub fn get(path: &str) -> Result<(String, String)> { validate_path!(path); let mut response = gopass_ipc(object! { "type" => "getLogin", "entry" => path })?; if let (Some(mut username), Some(password)) = (response["username"].take_string(), response["password"].take_string()) { if username.is_empty() { username = path.to_string(); } Ok((username, password)) } else { Err(InvalidOutput) } } pub fn get_usernames(path: &str) -> Result<Vec<String>> { validate_path!(path); let response = gopass_ipc(object! { "type" => "query", "query" => path })?; let mut result = vec![]; match response { JsonValue::Array(usernames) => { for username in usernames { let username = match username.as_str() { Some(username) => username, None => return Err(InvalidOutput), }; let index = username.rfind('/').map(|index| index + 1).unwrap_or(0); result.push(username[index..].to_string()); } }, _ => return Err(InvalidOutput), } Ok(result) } pub fn generate(path: &str, use_symbols: bool, length: i32) -> Result<()> { validate_path!(path); let response = gopass_ipc(object! { "type" => "create", "entry_name" => path, "password" => "", "generate" => true, "length" => length, "use_symbols" => use_symbols })?; if response["username"].as_str().is_none() { return Err(InvalidOutput); } Ok(()) } pub fn insert(path: &str, password: &str) -> Result<()> { validate_path!(path); let response = gopass_ipc(object! { "type" => "create", "entry_name" => path, "password" => password })?; if let Some(inserted_password) = response["password"].as_str() { if password != inserted_password { return Err(InvalidOutput); } } Ok(()) } pub fn remove(path: &str) -> Result<()> { validate_path!(path); exec_pass("rm", &["-f", path])?; Ok(()) } } fn exec_pass<S: AsRef<OsStr>>(command: &str, args: &[S]) -> Result<String> { let mut process = Command::new("gopass"); if !command.trim().is_empty() { process.arg(command); } let child = process.args(args) .stderr(Stdio::piped()) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn()?; let output = child.wait_with_output()?; let mut stderr = String::from_utf8(output.stderr)?; if !stderr.is_empty() { stderr.chomp(); Err(Pass(stderr)) } else { Ok(String::from_utf8(output.stdout)?) } } fn gopass_ipc(json_query: JsonValue) -> Result<JsonValue> { let mut process = Command::new("gopass-jsonapi"); let mut child = process.args(&["listen"]) .stderr(Stdio::piped()) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn()?; if let Some(stdin) = child.stdin.as_mut() { let json_string = json_query.dump(); stdin.write_all(&i32_to_bytes(json_string.len() as i32))?; write!(stdin, "{}", json_string)?; } let output = child.wait_with_output()?; let mut stderr = String::from_utf8(output.stderr)?; if !stderr.is_empty() { stderr.chomp(); Err(Pass(stderr)) } else { json::parse(str::from_utf8(&output.stdout[MSG_SIZE..])?) .map_err(Into::into) } } fn i32_to_bytes(num: i32) -> Vec<u8> { vec![ (num & 0xFF) as u8, ((num >> 8) & 0xFF) as u8, ((num >> 16) & 0xFF) as u8, ((num >> 24) & 0xFF) as u8, ] }
/* * Copyright (c) 2016-2020 Boucher, Antoni <[email protected]> * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #[macro_use] extern crate json; mod chomp; use std::error; use std::ffi::OsStr; use std::fmt::{self, Display, Formatter}; use std::io::{self, Write}; use std::process::{Command, Stdio}; use std::str::{self, Utf8Error}; use std::string; use json::JsonValue; use Error::*; use chomp::Chomp; macro_rules! validate_path { ($path:expr) => { if $path.trim().is_empty() { return Err(InvalidInput); } }; } const MSG_SIZE: usize = 4; #[derive(Debug)] pub enum Error { FromUtf8(string::FromUtf8Error), Json(json::Error), Io(io::Error), InvalidInput, InvalidOutput, Pass(String), Utf8(Utf8Error), } impl From<json::Error> for Error { fn from(error: json::Error) -> Self { Json(error) } } impl From<io::Error> for Error { fn from(error: io::Error) -> Self { Io(error) } } impl From<Utf8Error> for Error { fn from(error: Utf8Error) -> Self { Utf8(error) } } impl From<string::FromUtf8Error> for Error { fn from(error: string::FromUtf8Error) -> Self { FromUtf8(error) } } impl Display for Error { fn fmt(&self, fo
=> "invalid output".to_string(), Pass(ref error) => error.clone(), Utf8(ref error) => error.to_string(), }; write!(formatter, "{}", string) } } impl error::Error for Error { fn description(&self) -> &str { match *self { FromUtf8(ref error) => error.description(), Json(ref error) => error.description(), Io(ref error) => error.description(), InvalidInput => "invalid input", InvalidOutput => "invalid output", Pass(ref error) => error, Utf8(ref error) => error.description(), } } } pub type Result<T> = std::result::Result<T, Error>; pub struct PasswordStore; impl PasswordStore { pub fn get(path: &str) -> Result<(String, String)> { validate_path!(path); let mut response = gopass_ipc(object! { "type" => "getLogin", "entry" => path })?; if let (Some(mut username), Some(password)) = (response["username"].take_string(), response["password"].take_string()) { if username.is_empty() { username = path.to_string(); } Ok((username, password)) } else { Err(InvalidOutput) } } pub fn get_usernames(path: &str) -> Result<Vec<String>> { validate_path!(path); let response = gopass_ipc(object! { "type" => "query", "query" => path })?; let mut result = vec![]; match response { JsonValue::Array(usernames) => { for username in usernames { let username = match username.as_str() { Some(username) => username, None => return Err(InvalidOutput), }; let index = username.rfind('/').map(|index| index + 1).unwrap_or(0); result.push(username[index..].to_string()); } }, _ => return Err(InvalidOutput), } Ok(result) } pub fn generate(path: &str, use_symbols: bool, length: i32) -> Result<()> { validate_path!(path); let response = gopass_ipc(object! { "type" => "create", "entry_name" => path, "password" => "", "generate" => true, "length" => length, "use_symbols" => use_symbols })?; if response["username"].as_str().is_none() { return Err(InvalidOutput); } Ok(()) } pub fn insert(path: &str, password: &str) -> Result<()> { validate_path!(path); let response = gopass_ipc(object! { "type" => "create", "entry_name" => path, "password" => password })?; if let Some(inserted_password) = response["password"].as_str() { if password != inserted_password { return Err(InvalidOutput); } } Ok(()) } pub fn remove(path: &str) -> Result<()> { validate_path!(path); exec_pass("rm", &["-f", path])?; Ok(()) } } fn exec_pass<S: AsRef<OsStr>>(command: &str, args: &[S]) -> Result<String> { let mut process = Command::new("gopass"); if !command.trim().is_empty() { process.arg(command); } let child = process.args(args) .stderr(Stdio::piped()) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn()?; let output = child.wait_with_output()?; let mut stderr = String::from_utf8(output.stderr)?; if !stderr.is_empty() { stderr.chomp(); Err(Pass(stderr)) } else { Ok(String::from_utf8(output.stdout)?) } } fn gopass_ipc(json_query: JsonValue) -> Result<JsonValue> { let mut process = Command::new("gopass-jsonapi"); let mut child = process.args(&["listen"]) .stderr(Stdio::piped()) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn()?; if let Some(stdin) = child.stdin.as_mut() { let json_string = json_query.dump(); stdin.write_all(&i32_to_bytes(json_string.len() as i32))?; write!(stdin, "{}", json_string)?; } let output = child.wait_with_output()?; let mut stderr = String::from_utf8(output.stderr)?; if !stderr.is_empty() { stderr.chomp(); Err(Pass(stderr)) } else { json::parse(str::from_utf8(&output.stdout[MSG_SIZE..])?) .map_err(Into::into) } } fn i32_to_bytes(num: i32) -> Vec<u8> { vec![ (num & 0xFF) as u8, ((num >> 8) & 0xFF) as u8, ((num >> 16) & 0xFF) as u8, ((num >> 24) & 0xFF) as u8, ] }
rmatter: &mut Formatter) -> fmt::Result { let string = match *self { FromUtf8(ref error) => error.to_string(), Json(ref error) => error.to_string(), Io(ref error) => error.to_string(), InvalidInput => "invalid input".to_string(), InvalidOutput
function_block-random_span
[ { "content": "pub trait Chomp {\n\n fn chomp(&mut self);\n\n}\n\n\n\nimpl Chomp for String {\n\n fn chomp(&mut self) {\n\n if self.chars().last() == Some('\\n') {\n\n self.pop();\n\n }\n\n }\n\n}\n", "file_path": "src/chomp.rs", "rank": 0, "score": 50258.440840050505 }, { "content": "fn main() {\n\n PasswordStore::insert(\"pass\", \"password\").unwrap();\n\n let password = PasswordStore::get(\"pass\");\n\n println!(\"Password: {:?}\", password);\n\n let usernames = PasswordStore::get_usernames(\"pass\").unwrap();\n\n println!(\"Usernames: {:?}\", usernames);\n\n PasswordStore::remove(\"pass\").unwrap();\n\n\n\n PasswordStore::insert(\"test/pass\", \"password\").unwrap();\n\n let password = PasswordStore::get(\"test/pass\");\n\n println!(\"Password: {:?}\", password);\n\n let usernames = PasswordStore::get_usernames(\"test/pass\").unwrap();\n\n println!(\"Usernames: {:?}\", usernames);\n\n PasswordStore::remove(\"test/pass\").unwrap();\n\n\n\n PasswordStore::insert(\"test with spaces/pass with spaces\", \"password\").unwrap();\n\n let password = PasswordStore::get(\"test with spaces/pass with spaces\");\n\n println!(\"{:?}\", password);\n\n let usernames = PasswordStore::get_usernames(\"test with spaces\").unwrap();\n\n println!(\"{:?}\", usernames);\n\n PasswordStore::remove(\"test with spaces/pass with spaces\").unwrap();\n\n\n\n PasswordStore::generate(\"pass\", true, 25).unwrap();\n\n let password = PasswordStore::get(\"pass\");\n\n println!(\"Password: {:?}\", password);\n\n let usernames = PasswordStore::get_usernames(\"pass\").unwrap();\n\n println!(\"Usernames: {:?}\", usernames);\n\n PasswordStore::remove(\"pass\").unwrap();\n\n}\n", "file_path": "examples/main.rs", "rank": 2, "score": 19795.131253338852 }, { "content": "/*\n\n * Copyright (c) 2017 Boucher, Antoni <[email protected]>\n\n *\n\n * Permission is hereby granted, free of charge, to any person obtaining a copy of\n\n * this software and associated documentation files (the \"Software\"), to deal in\n\n * the Software without restriction, including without limitation the rights to\n\n * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of\n\n * the Software, and to permit persons to whom the Software is furnished to do so,\n\n * subject to the following conditions:\n\n *\n\n * The above copyright notice and this permission notice shall be included in all\n\n * copies or substantial portions of the Software.\n\n *\n\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\n\n * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\n\n * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\n\n * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\n\n * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n */\n\n\n", "file_path": "src/chomp.rs", "rank": 3, "score": 14843.545250147115 }, { "content": "/*\n\n * Copyright (c) 2016 Boucher, Antoni <[email protected]>\n\n *\n\n * Permission is hereby granted, free of charge, to any person obtaining a copy of\n\n * this software and associated documentation files (the \"Software\"), to deal in\n\n * the Software without restriction, including without limitation the rights to\n\n * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of\n\n * the Software, and to permit persons to whom the Software is furnished to do so,\n\n * subject to the following conditions:\n\n *\n\n * The above copyright notice and this permission notice shall be included in all\n\n * copies or substantial portions of the Software.\n\n *\n\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\n\n * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\n\n * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\n\n * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\n\n * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n */\n\n\n\nextern crate password_store;\n\n\n\nuse password_store::PasswordStore;\n\n\n", "file_path": "examples/main.rs", "rank": 7, "score": 49.26240690946592 } ]
Rust
gstreamer-app/src/app_src.rs
kad3nce/gstreamer-rs
fcc361f920c9c4d0926cc90de997a0293a21b0b1
use glib::translate::*; use glib_sys::{gboolean, gpointer}; use gst; use gst_app_sys; use std::cell::RefCell; use std::mem; use std::ptr; use AppSrc; #[allow(clippy::type_complexity)] pub struct AppSrcCallbacks { need_data: Option<RefCell<Box<dyn FnMut(&AppSrc, u32) + Send + 'static>>>, enough_data: Option<Box<dyn Fn(&AppSrc) + Send + Sync + 'static>>, seek_data: Option<Box<dyn Fn(&AppSrc, u64) -> bool + Send + Sync + 'static>>, callbacks: gst_app_sys::GstAppSrcCallbacks, } unsafe impl Send for AppSrcCallbacks {} unsafe impl Sync for AppSrcCallbacks {} impl AppSrcCallbacks { #[allow(clippy::new_ret_no_self)] pub fn new() -> AppSrcCallbacksBuilder { skip_assert_initialized!(); AppSrcCallbacksBuilder { need_data: None, enough_data: None, seek_data: None, } } } #[allow(clippy::type_complexity)] pub struct AppSrcCallbacksBuilder { need_data: Option<RefCell<Box<dyn FnMut(&AppSrc, u32) + Send + 'static>>>, enough_data: Option<Box<dyn Fn(&AppSrc) + Send + Sync + 'static>>, seek_data: Option<Box<dyn Fn(&AppSrc, u64) -> bool + Send + Sync + 'static>>, } impl AppSrcCallbacksBuilder { pub fn need_data<F: FnMut(&AppSrc, u32) + Send + 'static>(self, need_data: F) -> Self { Self { need_data: Some(RefCell::new(Box::new(need_data))), ..self } } pub fn enough_data<F: Fn(&AppSrc) + Send + Sync + 'static>(self, enough_data: F) -> Self { Self { enough_data: Some(Box::new(enough_data)), ..self } } pub fn seek_data<F: Fn(&AppSrc, u64) -> bool + Send + Sync + 'static>( self, seek_data: F, ) -> Self { Self { seek_data: Some(Box::new(seek_data)), ..self } } pub fn build(self) -> AppSrcCallbacks { let have_need_data = self.need_data.is_some(); let have_enough_data = self.enough_data.is_some(); let have_seek_data = self.seek_data.is_some(); AppSrcCallbacks { need_data: self.need_data, enough_data: self.enough_data, seek_data: self.seek_data, callbacks: gst_app_sys::GstAppSrcCallbacks { need_data: if have_need_data { Some(trampoline_need_data) } else { None }, enough_data: if have_enough_data { Some(trampoline_enough_data) } else { None }, seek_data: if have_seek_data { Some(trampoline_seek_data) } else { None }, _gst_reserved: [ ptr::null_mut(), ptr::null_mut(), ptr::null_mut(), ptr::null_mut(), ], }, } } } unsafe extern "C" fn trampoline_need_data( appsrc: *mut gst_app_sys::GstAppSrc, length: u32, callbacks: gpointer, ) { let callbacks = &*(callbacks as *const AppSrcCallbacks); if let Some(ref need_data) = callbacks.need_data { (&mut *need_data.borrow_mut())(&from_glib_borrow(appsrc), length); } } unsafe extern "C" fn trampoline_enough_data( appsrc: *mut gst_app_sys::GstAppSrc, callbacks: gpointer, ) { let callbacks = &*(callbacks as *const AppSrcCallbacks); if let Some(ref enough_data) = callbacks.enough_data { (*enough_data)(&from_glib_borrow(appsrc)); } } unsafe extern "C" fn trampoline_seek_data( appsrc: *mut gst_app_sys::GstAppSrc, offset: u64, callbacks: gpointer, ) -> gboolean { let callbacks = &*(callbacks as *const AppSrcCallbacks); let ret = if let Some(ref seek_data) = callbacks.seek_data { (*seek_data)(&from_glib_borrow(appsrc), offset) } else { false }; ret.to_glib() } unsafe extern "C" fn destroy_callbacks(ptr: gpointer) { Box::<AppSrcCallbacks>::from_raw(ptr as *mut _); } impl AppSrc { pub fn end_of_stream(&self) -> Result<gst::FlowSuccess, gst::FlowError> { let ret: gst::FlowReturn = unsafe { from_glib(gst_app_sys::gst_app_src_end_of_stream( self.to_glib_none().0, )) }; ret.into_result() } pub fn push_buffer(&self, buffer: gst::Buffer) -> Result<gst::FlowSuccess, gst::FlowError> { let ret: gst::FlowReturn = unsafe { from_glib(gst_app_sys::gst_app_src_push_buffer( self.to_glib_none().0, buffer.into_ptr(), )) }; ret.into_result() } #[cfg(any(feature = "v1_14", feature = "dox"))] pub fn push_buffer_list( &self, list: gst::BufferList, ) -> Result<gst::FlowSuccess, gst::FlowError> { let ret: gst::FlowReturn = unsafe { from_glib(gst_app_sys::gst_app_src_push_buffer_list( self.to_glib_none().0, list.into_ptr(), )) }; ret.into_result() } pub fn push_sample(&self, sample: &gst::Sample) -> Result<gst::FlowSuccess, gst::FlowError> { let ret: gst::FlowReturn = unsafe { from_glib(gst_app_sys::gst_app_src_push_sample( self.to_glib_none().0, sample.to_glib_none().0, )) }; ret.into_result() } pub fn set_callbacks(&self, callbacks: AppSrcCallbacks) { unsafe { gst_app_sys::gst_app_src_set_callbacks( self.to_glib_none().0, mut_override(&callbacks.callbacks), Box::into_raw(Box::new(callbacks)) as *mut _, Some(destroy_callbacks), ); } } pub fn set_latency(&self, min: gst::ClockTime, max: gst::ClockTime) { unsafe { gst_app_sys::gst_app_src_set_latency( self.to_glib_none().0, min.to_glib(), max.to_glib(), ); } } pub fn get_latency(&self) -> (gst::ClockTime, gst::ClockTime) { unsafe { let mut min = mem::MaybeUninit::uninit(); let mut max = mem::MaybeUninit::uninit(); gst_app_sys::gst_app_src_get_latency( self.to_glib_none().0, min.as_mut_ptr(), max.as_mut_ptr(), ); (from_glib(min.assume_init()), from_glib(max.assume_init())) } } }
use glib::translate::*; use glib_sys::{gboolean, gpointer}; use gst; use gst_app_sys; use std::cell::RefCell; use std::mem; use std::ptr; use AppSrc; #[allow(clippy::type_complexity)] pub struct AppSrcCallbacks { need_data: Option<RefCell<Box<dyn FnMut(&AppSrc, u32) + Send + 'static>>>, enough_data: Option<Box<dyn Fn(&AppSrc) + Send + Sync + 'static>>, seek_data: Option<Box<dyn Fn(&AppSrc, u64) -> bool + Send + Sync + 'static>>, callbacks: gst_app_sys::GstAppSrcCallbacks, } unsafe impl Send for AppSrcCallbacks {} unsafe impl Sync for AppSrcCallbacks {} impl AppSrcCallbacks { #[allow(clippy::new_ret_no_self)] pub fn new() -> AppSrcCallbacksBuilder { skip_assert_initialized!(); AppSrcCallbacksBuilder { need_data: None, enough_data: None, seek_data: None, } } } #[allow(clippy::type_complexity)] pub struct AppSrcCallbacksBuilder { need_data: Option<RefCell<Box<dyn FnMut(&AppSrc, u32) + Send + 'static>>>, enough_data: Option<Box<dyn Fn(&AppSrc) + Send + Sync + 'static>>, seek_data: Option<Box<dyn Fn(&AppSrc, u64) -> bool + Send + Sync + 'static>>, } impl AppSrcCallbacksBuilder { pub fn need_data<F: FnMut(&AppSrc, u32) + Send + 'static>(self, need_data: F) -> Self { Self { need_data: Some(RefCell::new(Box::new(need_data))), ..self } }
pub fn seek_data<F: Fn(&AppSrc, u64) -> bool + Send + Sync + 'static>( self, seek_data: F, ) -> Self { Self { seek_data: Some(Box::new(seek_data)), ..self } } pub fn build(self) -> AppSrcCallbacks { let have_need_data = self.need_data.is_some(); let have_enough_data = self.enough_data.is_some(); let have_seek_data = self.seek_data.is_some(); AppSrcCallbacks { need_data: self.need_data, enough_data: self.enough_data, seek_data: self.seek_data, callbacks: gst_app_sys::GstAppSrcCallbacks { need_data: if have_need_data { Some(trampoline_need_data) } else { None }, enough_data: if have_enough_data { Some(trampoline_enough_data) } else { None }, seek_data: if have_seek_data { Some(trampoline_seek_data) } else { None }, _gst_reserved: [ ptr::null_mut(), ptr::null_mut(), ptr::null_mut(), ptr::null_mut(), ], }, } } } unsafe extern "C" fn trampoline_need_data( appsrc: *mut gst_app_sys::GstAppSrc, length: u32, callbacks: gpointer, ) { let callbacks = &*(callbacks as *const AppSrcCallbacks); if let Some(ref need_data) = callbacks.need_data { (&mut *need_data.borrow_mut())(&from_glib_borrow(appsrc), length); } } unsafe extern "C" fn trampoline_enough_data( appsrc: *mut gst_app_sys::GstAppSrc, callbacks: gpointer, ) { let callbacks = &*(callbacks as *const AppSrcCallbacks); if let Some(ref enough_data) = callbacks.enough_data { (*enough_data)(&from_glib_borrow(appsrc)); } } unsafe extern "C" fn trampoline_seek_data( appsrc: *mut gst_app_sys::GstAppSrc, offset: u64, callbacks: gpointer, ) -> gboolean { let callbacks = &*(callbacks as *const AppSrcCallbacks); let ret = if let Some(ref seek_data) = callbacks.seek_data { (*seek_data)(&from_glib_borrow(appsrc), offset) } else { false }; ret.to_glib() } unsafe extern "C" fn destroy_callbacks(ptr: gpointer) { Box::<AppSrcCallbacks>::from_raw(ptr as *mut _); } impl AppSrc { pub fn end_of_stream(&self) -> Result<gst::FlowSuccess, gst::FlowError> { let ret: gst::FlowReturn = unsafe { from_glib(gst_app_sys::gst_app_src_end_of_stream( self.to_glib_none().0, )) }; ret.into_result() } pub fn push_buffer(&self, buffer: gst::Buffer) -> Result<gst::FlowSuccess, gst::FlowError> { let ret: gst::FlowReturn = unsafe { from_glib(gst_app_sys::gst_app_src_push_buffer( self.to_glib_none().0, buffer.into_ptr(), )) }; ret.into_result() } #[cfg(any(feature = "v1_14", feature = "dox"))] pub fn push_buffer_list( &self, list: gst::BufferList, ) -> Result<gst::FlowSuccess, gst::FlowError> { let ret: gst::FlowReturn = unsafe { from_glib(gst_app_sys::gst_app_src_push_buffer_list( self.to_glib_none().0, list.into_ptr(), )) }; ret.into_result() } pub fn push_sample(&self, sample: &gst::Sample) -> Result<gst::FlowSuccess, gst::FlowError> { let ret: gst::FlowReturn = unsafe { from_glib(gst_app_sys::gst_app_src_push_sample( self.to_glib_none().0, sample.to_glib_none().0, )) }; ret.into_result() } pub fn set_callbacks(&self, callbacks: AppSrcCallbacks) { unsafe { gst_app_sys::gst_app_src_set_callbacks( self.to_glib_none().0, mut_override(&callbacks.callbacks), Box::into_raw(Box::new(callbacks)) as *mut _, Some(destroy_callbacks), ); } } pub fn set_latency(&self, min: gst::ClockTime, max: gst::ClockTime) { unsafe { gst_app_sys::gst_app_src_set_latency( self.to_glib_none().0, min.to_glib(), max.to_glib(), ); } } pub fn get_latency(&self) -> (gst::ClockTime, gst::ClockTime) { unsafe { let mut min = mem::MaybeUninit::uninit(); let mut max = mem::MaybeUninit::uninit(); gst_app_sys::gst_app_src_get_latency( self.to_glib_none().0, min.as_mut_ptr(), max.as_mut_ptr(), ); (from_glib(min.assume_init()), from_glib(max.assume_init())) } } }
pub fn enough_data<F: Fn(&AppSrc) + Send + Sync + 'static>(self, enough_data: F) -> Self { Self { enough_data: Some(Box::new(enough_data)), ..self } }
function_block-full_function
[ { "content": "fn into_raw_pad_task<F: FnMut() + Send + 'static>(func: F) -> gpointer {\n\n #[allow(clippy::type_complexity)]\n\n let func: Box<RefCell<F>> = Box::new(RefCell::new(func));\n\n Box::into_raw(func) as gpointer\n\n}\n\n\n\nunsafe extern \"C\" fn destroy_closure_pad_task<F>(ptr: gpointer) {\n\n Box::<RefCell<F>>::from_raw(ptr as *mut _);\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use prelude::*;\n\n use std::sync::mpsc::channel;\n\n use std::sync::{Arc, Mutex};\n\n\n\n #[test]\n\n fn test_event_chain_functions() {\n\n ::init().unwrap();\n", "file_path": "gstreamer/src/pad.rs", "rank": 0, "score": 381959.3618226971 }, { "content": "fn into_raw_sync<F: Fn(&Bus, &Message) -> BusSyncReply + Send + Sync + 'static>(\n\n func: F,\n\n) -> gpointer {\n\n let func: Box<F> = Box::new(func);\n\n Box::into_raw(func) as gpointer\n\n}\n\n\n\nimpl Bus {\n\n pub fn add_signal_watch_full(&self, priority: Priority) {\n\n unsafe {\n\n gst_sys::gst_bus_add_signal_watch_full(self.to_glib_none().0, priority.to_glib());\n\n }\n\n }\n\n\n\n pub fn create_watch<F>(&self, name: Option<&str>, priority: Priority, func: F) -> glib::Source\n\n where\n\n F: FnMut(&Bus, &Message) -> Continue + Send + 'static,\n\n {\n\n skip_assert_initialized!();\n\n unsafe {\n", "file_path": "gstreamer/src/bus.rs", "rank": 1, "score": 377891.92075775843 }, { "content": "fn into_raw_wait_async<F: Fn(&Clock, ClockTime, &ClockId) + Send + 'static>(func: F) -> gpointer {\n\n #[allow(clippy::type_complexity)]\n\n let func: Box<F> = Box::new(func);\n\n Box::into_raw(func) as gpointer\n\n}\n\n\n\nimpl ClockId {\n\n pub fn get_time(&self) -> ClockTime {\n\n unsafe { from_glib(gst_sys::gst_clock_id_get_time(self.to_glib_none().0)) }\n\n }\n\n\n\n pub fn unschedule(&self) {\n\n unsafe { gst_sys::gst_clock_id_unschedule(self.to_glib_none().0) }\n\n }\n\n\n\n pub fn wait(&self) -> (Result<ClockSuccess, ClockError>, ClockTimeDiff) {\n\n unsafe {\n\n let mut jitter = 0;\n\n let res: ClockReturn = from_glib(gst_sys::gst_clock_id_wait(\n\n self.to_glib_none().0,\n", "file_path": "gstreamer/src/clock.rs", "rank": 2, "score": 358677.3186175135 }, { "content": "#[cfg(target_os = \"macos\")]\n\npub fn run<T, F: FnOnce() -> T + Send + 'static>(main: F) -> T\n\nwhere\n\n T: Send + 'static,\n\n{\n\n use std::thread;\n\n\n\n let l = runloop::CFRunLoop::get_main();\n\n let t = thread::spawn(move || {\n\n let res = main();\n\n l.stop();\n\n res\n\n });\n\n\n\n runloop::CFRunLoop::run();\n\n\n\n t.join().unwrap()\n\n}\n", "file_path": "tutorials/src/tutorials-common.rs", "rank": 3, "score": 358516.4700370481 }, { "content": "#[cfg(not(target_os = \"macos\"))]\n\npub fn run<T, F: FnOnce() -> T + Send + 'static>(main: F) -> T\n\nwhere\n\n T: Send + 'static,\n\n{\n\n main()\n\n}\n\n\n", "file_path": "examples/src/examples-common.rs", "rank": 4, "score": 358516.47003704816 }, { "content": "pub trait PipelineImpl: BinImpl + Send + Sync + 'static {}\n\n\n\nunsafe impl<T: ObjectSubclass + PipelineImpl> IsSubclassable<T> for PipelineClass\n\nwhere\n\n <T as ObjectSubclass>::Instance: PanicPoison,\n\n{\n\n fn override_vfuncs(&mut self) {\n\n <::BinClass as IsSubclassable<T>>::override_vfuncs(self);\n\n unsafe {\n\n let _klass = &mut *(self as *mut Self as *mut gst_sys::GstPipelineClass);\n\n // Nothing to do here\n\n }\n\n }\n\n}\n", "file_path": "gstreamer/src/subclass/pipeline.rs", "rank": 5, "score": 352956.85648169345 }, { "content": "pub trait GhostPadImpl: PadImpl + Send + Sync + 'static {}\n\n\n\nunsafe impl<T: ObjectSubclass + GhostPadImpl> IsSubclassable<T> for GhostPadClass {\n\n fn override_vfuncs(&mut self) {\n\n <::PadClass as IsSubclassable<T>>::override_vfuncs(self);\n\n unsafe {\n\n let _klass = &mut *(self as *mut Self as *mut gst_sys::GstGhostPadClass);\n\n // Nothing to do here\n\n }\n\n }\n\n}\n", "file_path": "gstreamer/src/subclass/ghost_pad.rs", "rank": 6, "score": 343857.28537119564 }, { "content": "fn into_raw_watch<F: FnMut(&RTSPSessionPool) -> Continue + Send + 'static>(func: F) -> gpointer {\n\n #[allow(clippy::type_complexity)]\n\n let func: Box<RefCell<F>> = Box::new(RefCell::new(func));\n\n Box::into_raw(func) as gpointer\n\n}\n\n\n", "file_path": "gstreamer-rtsp-server/src/rtsp_session_pool.rs", "rank": 7, "score": 343219.3050413131 }, { "content": "pub trait PadImpl: PadImplExt + ObjectImpl + Send + Sync + 'static {\n\n fn linked(&self, pad: &Pad, peer: &Pad) {\n\n self.parent_linked(pad, peer)\n\n }\n\n\n\n fn unlinked(&self, pad: &Pad, peer: &Pad) {\n\n self.parent_unlinked(pad, peer)\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/subclass/pad.rs", "rank": 8, "score": 340305.0196462622 }, { "content": "pub trait BinImpl: BinImplExt + ElementImpl + Send + Sync + 'static {\n\n fn add_element(&self, bin: &Bin, element: &Element) -> Result<(), LoggableError> {\n\n self.parent_add_element(bin, element)\n\n }\n\n\n\n fn remove_element(&self, bin: &Bin, element: &Element) -> Result<(), LoggableError> {\n\n self.parent_remove_element(bin, element)\n\n }\n\n\n\n fn handle_message(&self, bin: &Bin, message: Message) {\n\n self.parent_handle_message(bin, message)\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/subclass/bin.rs", "rank": 9, "score": 340305.0196462622 }, { "content": "pub trait ElementImpl: ElementImplExt + ObjectImpl + Send + Sync + 'static {\n\n fn change_state(\n\n &self,\n\n element: &::Element,\n\n transition: StateChange,\n\n ) -> Result<StateChangeSuccess, StateChangeError> {\n\n self.parent_change_state(element, transition)\n\n }\n\n\n\n fn request_new_pad(\n\n &self,\n\n element: &::Element,\n\n templ: &::PadTemplate,\n\n name: Option<String>,\n\n caps: Option<&::Caps>,\n\n ) -> Option<::Pad> {\n\n self.parent_request_new_pad(element, templ, name, caps)\n\n }\n\n\n\n fn release_pad(&self, element: &::Element, pad: &::Pad) {\n", "file_path": "gstreamer/src/subclass/element.rs", "rank": 10, "score": 340305.0196462622 }, { "content": "pub trait AggregatorImpl: AggregatorImplExt + ElementImpl + Send + Sync + 'static {\n\n fn flush(&self, aggregator: &Aggregator) -> Result<gst::FlowSuccess, gst::FlowError> {\n\n self.parent_flush(aggregator)\n\n }\n\n\n\n fn clip(\n\n &self,\n\n aggregator: &Aggregator,\n\n aggregator_pad: &AggregatorPad,\n\n buffer: gst::Buffer,\n\n ) -> Option<gst::Buffer> {\n\n self.parent_clip(aggregator, aggregator_pad, buffer)\n\n }\n\n\n\n fn finish_buffer(\n\n &self,\n\n aggregator: &Aggregator,\n\n buffer: gst::Buffer,\n\n ) -> Result<gst::FlowSuccess, gst::FlowError> {\n\n self.parent_finish_buffer(aggregator, buffer)\n", "file_path": "gstreamer-base/src/subclass/aggregator.rs", "rank": 11, "score": 336282.66611909505 }, { "content": "pub trait BaseSinkImpl: BaseSinkImplExt + ElementImpl + Send + Sync + 'static {\n\n fn start(&self, element: &BaseSink) -> Result<(), gst::ErrorMessage> {\n\n self.parent_start(element)\n\n }\n\n\n\n fn stop(&self, element: &BaseSink) -> Result<(), gst::ErrorMessage> {\n\n self.parent_stop(element)\n\n }\n\n\n\n fn render(\n\n &self,\n\n element: &BaseSink,\n\n buffer: &gst::Buffer,\n\n ) -> Result<gst::FlowSuccess, gst::FlowError>;\n\n\n\n fn prepare(\n\n &self,\n\n element: &BaseSink,\n\n buffer: &gst::Buffer,\n\n ) -> Result<gst::FlowSuccess, gst::FlowError> {\n", "file_path": "gstreamer-base/src/subclass/base_sink.rs", "rank": 12, "score": 325026.85345838114 }, { "content": "pub trait VideoEncoderImpl: VideoEncoderImplExt + ElementImpl + Send + Sync + 'static {\n\n fn open(&self, element: &VideoEncoder) -> Result<(), gst::ErrorMessage> {\n\n self.parent_open(element)\n\n }\n\n\n\n fn close(&self, element: &VideoEncoder) -> Result<(), gst::ErrorMessage> {\n\n self.parent_close(element)\n\n }\n\n\n\n fn start(&self, element: &VideoEncoder) -> Result<(), gst::ErrorMessage> {\n\n self.parent_start(element)\n\n }\n\n\n\n fn stop(&self, element: &VideoEncoder) -> Result<(), gst::ErrorMessage> {\n\n self.parent_stop(element)\n\n }\n\n\n\n fn finish(&self, element: &VideoEncoder) -> Result<gst::FlowSuccess, gst::FlowError> {\n\n self.parent_finish(element)\n\n }\n", "file_path": "gstreamer-video/src/subclass/video_encoder.rs", "rank": 13, "score": 325026.85345838114 }, { "content": "pub trait AggregatorPadImpl: AggregatorPadImplExt + PadImpl + Send + Sync + 'static {\n\n fn flush(\n\n &self,\n\n aggregator_pad: &AggregatorPad,\n\n aggregator: &Aggregator,\n\n ) -> Result<gst::FlowSuccess, gst::FlowError> {\n\n self.parent_flush(aggregator_pad, aggregator)\n\n }\n\n\n\n fn skip_buffer(\n\n &self,\n\n aggregator_pad: &AggregatorPad,\n\n aggregator: &Aggregator,\n\n buffer: &gst::Buffer,\n\n ) -> bool {\n\n self.parent_skip_buffer(aggregator_pad, aggregator, buffer)\n\n }\n\n}\n\n\n", "file_path": "gstreamer-base/src/subclass/aggregator_pad.rs", "rank": 14, "score": 325026.85345838114 }, { "content": "pub trait BaseSrcImpl: BaseSrcImplExt + ElementImpl + Send + Sync + 'static {\n\n fn start(&self, element: &BaseSrc) -> Result<(), gst::ErrorMessage> {\n\n self.parent_start(element)\n\n }\n\n\n\n fn stop(&self, element: &BaseSrc) -> Result<(), gst::ErrorMessage> {\n\n self.parent_stop(element)\n\n }\n\n\n\n fn is_seekable(&self, element: &BaseSrc) -> bool {\n\n self.parent_is_seekable(element)\n\n }\n\n\n\n fn get_size(&self, element: &BaseSrc) -> Option<u64> {\n\n self.parent_get_size(element)\n\n }\n\n\n\n fn fill(\n\n &self,\n\n element: &BaseSrc,\n", "file_path": "gstreamer-base/src/subclass/base_src.rs", "rank": 15, "score": 325026.85345838114 }, { "content": "pub trait BaseTransformImpl: BaseTransformImplExt + ElementImpl + Send + Sync + 'static {\n\n fn start(&self, element: &BaseTransform) -> Result<(), gst::ErrorMessage> {\n\n self.parent_start(element)\n\n }\n\n\n\n fn stop(&self, element: &BaseTransform) -> Result<(), gst::ErrorMessage> {\n\n self.parent_stop(element)\n\n }\n\n\n\n fn transform_caps(\n\n &self,\n\n element: &BaseTransform,\n\n direction: gst::PadDirection,\n\n caps: &gst::Caps,\n\n filter: Option<&gst::Caps>,\n\n ) -> Option<gst::Caps> {\n\n self.parent_transform_caps(element, direction, caps, filter)\n\n }\n\n\n\n fn fixate_caps(\n", "file_path": "gstreamer-base/src/subclass/base_transform.rs", "rank": 16, "score": 325026.85345838114 }, { "content": "pub trait BaseParseImpl: BaseParseImplExt + ElementImpl + Send + Sync + 'static {\n\n fn start(&self, element: &BaseParse) -> Result<(), gst::ErrorMessage> {\n\n self.parent_start(element)\n\n }\n\n\n\n fn set_sink_caps(\n\n &self,\n\n element: &BaseParse,\n\n caps: &gst::Caps,\n\n ) -> Result<(), gst::ErrorMessage> {\n\n self.parent_set_sink_caps(element, caps)\n\n }\n\n\n\n fn handle_frame<'a>(\n\n &'a self,\n\n element: &BaseParse,\n\n frame: BaseParseFrame,\n\n ) -> Result<(gst::FlowSuccess, u32), gst::FlowError> {\n\n self.parent_handle_frame(element, frame)\n\n }\n\n\n\n fn convert<V: Into<gst::GenericFormattedValue>>(\n\n &self,\n\n element: &BaseParse,\n\n src_val: V,\n\n dest_format: gst::Format,\n\n ) -> Option<gst::GenericFormattedValue>;\n\n}\n\n\n", "file_path": "gstreamer-base/src/subclass/base_parse.rs", "rank": 17, "score": 325026.85345838114 }, { "content": "pub trait VideoDecoderImpl: VideoDecoderImplExt + ElementImpl + Send + Sync + 'static {\n\n fn open(&self, element: &VideoDecoder) -> Result<(), gst::ErrorMessage> {\n\n self.parent_open(element)\n\n }\n\n\n\n fn close(&self, element: &VideoDecoder) -> Result<(), gst::ErrorMessage> {\n\n self.parent_close(element)\n\n }\n\n\n\n fn start(&self, element: &VideoDecoder) -> Result<(), gst::ErrorMessage> {\n\n self.parent_start(element)\n\n }\n\n\n\n fn stop(&self, element: &VideoDecoder) -> Result<(), gst::ErrorMessage> {\n\n self.parent_stop(element)\n\n }\n\n\n\n fn finish(&self, element: &VideoDecoder) -> Result<gst::FlowSuccess, gst::FlowError> {\n\n self.parent_finish(element)\n\n }\n", "file_path": "gstreamer-video/src/subclass/video_decoder.rs", "rank": 18, "score": 325026.85345838114 }, { "content": "pub trait URIHandlerImpl: super::element::ElementImpl + Send + Sync + 'static {\n\n fn get_uri(&self, element: &URIHandler) -> Option<String>;\n\n fn set_uri(&self, element: &URIHandler, uri: &str) -> Result<(), glib::Error>;\n\n fn get_uri_type() -> URIType;\n\n fn get_protocols() -> Vec<String>;\n\n}\n\n\n\nunsafe impl<T: ObjectSubclass + URIHandlerImpl> IsImplementable<T> for URIHandler {\n\n unsafe extern \"C\" fn interface_init(\n\n iface: glib_sys::gpointer,\n\n _iface_data: glib_sys::gpointer,\n\n ) {\n\n let uri_handler_iface = &mut *(iface as *mut gst_sys::GstURIHandlerInterface);\n\n\n\n // Store the protocols in the interface data for later use\n\n let mut data = T::type_data();\n\n let protocols = T::get_protocols();\n\n let protocols: *mut *const libc::c_char = protocols.to_glib_full();\n\n let data = data.as_mut();\n\n if data.interface_data.is_null() {\n", "file_path": "gstreamer/src/subclass/uri_handler.rs", "rank": 19, "score": 323564.9906602029 }, { "content": "pub trait ChildProxyImpl: super::element::ElementImpl + Send + Sync + 'static {\n\n fn get_child_by_name(&self, object: &ChildProxy, name: &str) -> Option<glib::Object> {\n\n unsafe {\n\n let type_ = gst_sys::gst_child_proxy_get_type();\n\n let iface = gobject_sys::g_type_default_interface_ref(type_)\n\n as *mut gst_sys::GstChildProxyInterface;\n\n assert!(!iface.is_null());\n\n\n\n let ret = ((*iface).get_child_by_name.as_ref().unwrap())(\n\n object.to_glib_none().0,\n\n name.to_glib_none().0,\n\n );\n\n\n\n gobject_sys::g_type_default_interface_unref(iface as glib_sys::gpointer);\n\n\n\n from_glib_full(ret)\n\n }\n\n }\n\n\n\n fn get_child_by_index(&self, object: &ChildProxy, index: u32) -> Option<glib::Object>;\n", "file_path": "gstreamer/src/subclass/child_proxy.rs", "rank": 20, "score": 323564.9906602029 }, { "content": "fn into_raw_watch<F: FnMut(&Bus, &Message) -> Continue + 'static>(func: F) -> gpointer {\n\n #[allow(clippy::type_complexity)]\n\n let func: Box<RefCell<F>> = Box::new(RefCell::new(func));\n\n Box::into_raw(func) as gpointer\n\n}\n\n\n\nunsafe extern \"C\" fn trampoline_sync<\n\n F: Fn(&Bus, &Message) -> BusSyncReply + Send + Sync + 'static,\n\n>(\n\n bus: *mut gst_sys::GstBus,\n\n msg: *mut gst_sys::GstMessage,\n\n func: gpointer,\n\n) -> gst_sys::GstBusSyncReply {\n\n let f: &F = &*(func as *const F);\n\n let res = f(&from_glib_borrow(bus), &Message::from_glib_borrow(msg)).to_glib();\n\n\n\n if res == gst_sys::GST_BUS_DROP {\n\n gst_sys::gst_mini_object_unref(msg as *mut _);\n\n }\n\n\n", "file_path": "gstreamer/src/bus.rs", "rank": 21, "score": 299985.42212260334 }, { "content": "pub fn is_force_key_unit_event(event: &gst::EventRef) -> bool {\n\n unsafe {\n\n from_glib(gst_video_sys::gst_video_event_is_force_key_unit(\n\n event.as_mut_ptr(),\n\n ))\n\n }\n\n}\n\n\n\n// FIXME: Copy from gstreamer/src/event.rs\n\nmacro_rules! event_builder_generic_impl {\n\n ($new_fn:expr) => {\n\n pub fn seqnum(self, seqnum: gst::Seqnum) -> Self {\n\n Self {\n\n seqnum: Some(seqnum),\n\n .. self\n\n }\n\n }\n\n\n\n pub fn running_time_offset(self, running_time_offset: i64) -> Self {\n\n Self {\n", "file_path": "gstreamer-video/src/video_event.rs", "rank": 22, "score": 281293.29926329805 }, { "content": "pub fn is_video_overlay_prepare_window_handle_message(msg: &gst::MessageRef) -> bool {\n\n unsafe {\n\n from_glib(\n\n gst_video_sys::gst_is_video_overlay_prepare_window_handle_message(msg.as_mut_ptr()),\n\n )\n\n }\n\n}\n", "file_path": "gstreamer-video/src/video_overlay.rs", "rank": 23, "score": 274631.3147195485 }, { "content": "pub trait IteratorImpl<T>: Clone + Send + 'static\n\nwhere\n\n for<'a> T: FromValueOptional<'a> + StaticType + ToValue + Send + 'static,\n\n{\n\n fn next(&mut self) -> Option<Result<T, IteratorError>>;\n\n fn resync(&mut self);\n\n}\n\n\n\nunsafe extern \"C\" fn rs_iterator_copy<T, I: IteratorImpl<T>>(\n\n it: *const gst_sys::GstIterator,\n\n copy: *mut gst_sys::GstIterator,\n\n) where\n\n for<'a> T: FromValueOptional<'a> + StaticType + ToValue + Send + 'static,\n\n{\n\n let it = it as *const RsIterator<T, I>;\n\n let copy = copy as *mut RsIterator<T, I>;\n\n\n\n ptr::write(&mut (*copy).imp, (*it).imp.clone());\n\n}\n\n\n", "file_path": "gstreamer/src/iterator.rs", "rank": 24, "score": 273368.2254260463 }, { "content": "pub fn version() -> (u32, u32, u32, u32) {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut major = mem::MaybeUninit::uninit();\n\n let mut minor = mem::MaybeUninit::uninit();\n\n let mut micro = mem::MaybeUninit::uninit();\n\n let mut nano = mem::MaybeUninit::uninit();\n\n gst_sys::gst_version(\n\n major.as_mut_ptr(),\n\n minor.as_mut_ptr(),\n\n micro.as_mut_ptr(),\n\n nano.as_mut_ptr(),\n\n );\n\n let major = major.assume_init();\n\n let minor = minor.assume_init();\n\n let micro = micro.assume_init();\n\n let nano = nano.assume_init();\n\n (major, minor, micro, nano)\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 25, "score": 268488.56970986474 }, { "content": "pub fn type_find_helper<P: IsA<gst::Pad>>(src: &P, size: u64) -> Option<gst::Caps> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib_full(gst_base_sys::gst_type_find_helper(\n\n src.as_ref().to_glib_none().0,\n\n size,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gstreamer-base/src/auto/functions.rs", "rank": 26, "score": 260870.97348140937 }, { "content": "pub fn plugins_base_version() -> (u32, u32, u32, u32) {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut major = mem::MaybeUninit::uninit();\n\n let mut minor = mem::MaybeUninit::uninit();\n\n let mut micro = mem::MaybeUninit::uninit();\n\n let mut nano = mem::MaybeUninit::uninit();\n\n gst_pbutils_sys::gst_plugins_base_version(\n\n major.as_mut_ptr(),\n\n minor.as_mut_ptr(),\n\n micro.as_mut_ptr(),\n\n nano.as_mut_ptr(),\n\n );\n\n let major = major.assume_init();\n\n let minor = minor.assume_init();\n\n let micro = micro.assume_init();\n\n let nano = nano.assume_init();\n\n (major, minor, micro, nano)\n\n }\n\n}\n\n\n", "file_path": "gstreamer-pbutils/src/auto/functions.rs", "rank": 27, "score": 260800.0018291702 }, { "content": "pub fn debug_is_active() -> bool {\n\n assert_initialized_main_thread!();\n\n unsafe { from_glib(gst_sys::gst_debug_is_active()) }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 28, "score": 249625.89866065021 }, { "content": "pub fn debug_is_colored() -> bool {\n\n assert_initialized_main_thread!();\n\n unsafe { from_glib(gst_sys::gst_debug_is_colored()) }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 29, "score": 249625.89866065021 }, { "content": "pub fn new_still_frame_event<'a>(in_still: bool) -> StillFrameEventBuilder<'a> {\n\n StillFrameEventBuilder::new(in_still)\n\n}\n\n\n\npub struct StillFrameEventBuilder<'a> {\n\n seqnum: Option<gst::Seqnum>,\n\n running_time_offset: Option<i64>,\n\n other_fields: Vec<(&'a str, &'a dyn ToSendValue)>,\n\n in_still: bool,\n\n}\n\n\n\nimpl<'a> StillFrameEventBuilder<'a> {\n\n fn new(in_still: bool) -> Self {\n\n skip_assert_initialized!();\n\n Self {\n\n seqnum: None,\n\n running_time_offset: None,\n\n other_fields: Vec::new(),\n\n in_still,\n\n }\n", "file_path": "gstreamer-video/src/video_event.rs", "rank": 30, "score": 246273.33763775675 }, { "content": "pub fn convert_sample_async<F>(\n\n sample: &gst::Sample,\n\n caps: &gst::Caps,\n\n timeout: gst::ClockTime,\n\n func: F,\n\n) where\n\n F: FnOnce(Result<gst::Sample, glib::Error>) + Send + 'static,\n\n{\n\n unsafe { convert_sample_async_unsafe(sample, caps, timeout, func) }\n\n}\n\n\n", "file_path": "gstreamer-video/src/functions.rs", "rank": 31, "score": 246198.10215620836 }, { "content": "pub fn convert_sample_async_local<F>(\n\n sample: &gst::Sample,\n\n caps: &gst::Caps,\n\n timeout: gst::ClockTime,\n\n func: F,\n\n) where\n\n F: FnOnce(Result<gst::Sample, glib::Error>) + Send + 'static,\n\n{\n\n unsafe {\n\n assert!(glib::MainContext::ref_thread_default().is_owner());\n\n convert_sample_async_unsafe(sample, caps, timeout, func)\n\n }\n\n}\n\n\n\nunsafe fn convert_sample_async_unsafe<F>(\n\n sample: &gst::Sample,\n\n caps: &gst::Caps,\n\n timeout: gst::ClockTime,\n\n func: F,\n\n) where\n", "file_path": "gstreamer-video/src/functions.rs", "rank": 32, "score": 242937.20568400528 }, { "content": "pub fn debug_set_colored(colored: bool) {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n gst_sys::gst_debug_set_colored(colored.to_glib());\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 33, "score": 235911.7510410711 }, { "content": "pub fn debug_set_active(active: bool) {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n gst_sys::gst_debug_set_active(active.to_glib());\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 34, "score": 235911.7510410711 }, { "content": "pub fn tag_exists(name: &str) -> bool {\n\n unsafe { from_glib(gst_sys::gst_tag_exists(name.to_glib_none().0)) }\n\n}\n\n\n", "file_path": "gstreamer/src/tags.rs", "rank": 35, "score": 233088.8304921491 }, { "content": "#[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\npub fn debug_add_ring_buffer_logger(max_size_per_thread: u32, thread_timeout: u32) {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n gst_sys::gst_debug_add_ring_buffer_logger(max_size_per_thread, thread_timeout);\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 36, "score": 228232.5158158309 }, { "content": "pub fn type_find_helper_for_buffer<P: IsA<gst::Object>>(\n\n obj: Option<&P>,\n\n buf: &gst::Buffer,\n\n) -> Option<(gst::Caps, gst::TypeFindProbability)> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut prob = mem::MaybeUninit::uninit();\n\n let ret = gst_base_sys::gst_type_find_helper_for_buffer(\n\n obj.map(|p| p.as_ref()).to_glib_none().0,\n\n buf.to_glib_none().0,\n\n prob.as_mut_ptr(),\n\n );\n\n if ret.is_null() {\n\n None\n\n } else {\n\n Some((from_glib_full(ret), from_glib(prob.assume_init())))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer-base/src/functions.rs", "rank": 37, "score": 218357.11765590758 }, { "content": "pub fn debug_set_threshold_from_string(list: &str, reset: bool) {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n gst_sys::gst_debug_set_threshold_from_string(list.to_glib_none().0, reset.to_glib());\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 38, "score": 215536.86590004963 }, { "content": "pub fn type_find_helper_for_extension<P: IsA<gst::Object>>(\n\n obj: Option<&P>,\n\n extension: &str,\n\n) -> Option<gst::Caps> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib_full(gst_base_sys::gst_type_find_helper_for_extension(\n\n obj.map(|p| p.as_ref()).to_glib_none().0,\n\n extension.to_glib_none().0,\n\n ))\n\n }\n\n}\n\n\n\n//pub fn type_find_helper_get_range<P: IsA<gst::Object>, Q: IsA<gst::Object>, R: FnMut(&gst::Object, Option<&gst::Object>, u64, u32, &gst::Buffer) -> gst::FlowReturn>(obj: &P, parent: Option<&Q>, func: R, size: u64, extension: Option<&str>) -> (Option<gst::Caps>, gst::TypeFindProbability) {\n\n// unsafe { TODO: call gst_base_sys:gst_type_find_helper_get_range() }\n\n//}\n\n\n\n//#[cfg(any(feature = \"v1_14_3\", feature = \"dox\"))]\n\n//pub fn type_find_helper_get_range_full<P: IsA<gst::Object>, Q: IsA<gst::Object>, R: FnMut(&gst::Object, Option<&gst::Object>, u64, u32, &gst::Buffer) -> gst::FlowReturn>(obj: &P, parent: Option<&Q>, func: R, size: u64, extension: Option<&str>) -> (gst::FlowReturn, gst::Caps, gst::TypeFindProbability) {\n\n// unsafe { TODO: call gst_base_sys:gst_type_find_helper_get_range_full() }\n\n//}\n", "file_path": "gstreamer-base/src/auto/functions.rs", "rank": 39, "score": 215487.64895959437 }, { "content": "#[cfg(any(feature = \"v1_16\", feature = \"dox\"))]\n\npub fn type_find_helper_for_buffer_with_extension<P: IsA<gst::Object>>(\n\n obj: Option<&P>,\n\n buf: &gst::Buffer,\n\n extension: Option<&str>,\n\n) -> Option<(gst::Caps, gst::TypeFindProbability)> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut prob = mem::MaybeUninit::uninit();\n\n let ret = gst_base_sys::gst_type_find_helper_for_buffer_with_extension(\n\n obj.map(|p| p.as_ref()).to_glib_none().0,\n\n buf.to_glib_none().0,\n\n extension.to_glib_none().0,\n\n prob.as_mut_ptr(),\n\n );\n\n if ret.is_null() {\n\n None\n\n } else {\n\n Some((from_glib_full(ret), from_glib(prob.assume_init())))\n\n }\n\n }\n\n}\n", "file_path": "gstreamer-base/src/functions.rs", "rank": 40, "score": 215487.64895959437 }, { "content": "pub trait GstBinExtManual: 'static {\n\n fn add_many<E: IsA<Element>>(&self, elements: &[&E]) -> Result<(), glib::BoolError>;\n\n fn remove_many<E: IsA<Element>>(&self, elements: &[&E]) -> Result<(), glib::BoolError>;\n\n\n\n fn connect_do_latency<F: Fn(&Self) -> Result<(), LoggableError> + Send + Sync + 'static>(\n\n &self,\n\n f: F,\n\n ) -> SignalHandlerId;\n\n\n\n fn iterate_all_by_interface(&self, iface: glib::types::Type) -> ::Iterator<Element>;\n\n fn iterate_elements(&self) -> ::Iterator<Element>;\n\n fn iterate_recurse(&self) -> ::Iterator<Element>;\n\n fn iterate_sinks(&self) -> ::Iterator<Element>;\n\n fn iterate_sorted(&self) -> ::Iterator<Element>;\n\n fn iterate_sources(&self) -> ::Iterator<Element>;\n\n fn get_children(&self) -> Vec<Element>;\n\n\n\n fn debug_to_dot_data(&self, details: ::DebugGraphDetails) -> GString;\n\n fn debug_to_dot_file<Q: AsRef<path::Path>>(&self, details: ::DebugGraphDetails, file_name: Q);\n\n fn debug_to_dot_file_with_ts<Q: AsRef<path::Path>>(\n", "file_path": "gstreamer/src/bin.rs", "rank": 41, "score": 212795.18950135703 }, { "content": "pub trait GstBinExt: 'static {\n\n fn add<P: IsA<Element>>(&self, element: &P) -> Result<(), glib::error::BoolError>;\n\n\n\n //fn add_many<P: IsA<Element>>(&self, element_1: &P, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs);\n\n\n\n fn find_unlinked_pad(&self, direction: PadDirection) -> Option<Pad>;\n\n\n\n fn get_by_interface(&self, iface: glib::types::Type) -> Option<Element>;\n\n\n\n fn get_by_name(&self, name: &str) -> Option<Element>;\n\n\n\n fn get_by_name_recurse_up(&self, name: &str) -> Option<Element>;\n\n\n\n #[cfg(any(feature = \"v1_10\", feature = \"dox\"))]\n\n fn get_suppressed_flags(&self) -> ElementFlags;\n\n\n\n //fn iterate_all_by_interface(&self, iface: glib::types::Type) -> /*Ignored*/Option<Iterator>;\n\n\n\n //fn iterate_elements(&self) -> /*Ignored*/Option<Iterator>;\n\n\n", "file_path": "gstreamer/src/auto/bin.rs", "rank": 42, "score": 212795.18950135703 }, { "content": "pub trait GstPluginExtManual: 'static {\n\n fn get_plugin_flags(&self) -> PluginFlags;\n\n}\n\n\n\nimpl<O: IsA<::Plugin>> GstPluginExtManual for O {\n\n fn get_plugin_flags(&self) -> PluginFlags {\n\n unsafe {\n\n let ptr: *mut gst_sys::GstObject = self.as_ptr() as *mut _;\n\n let _guard = ::utils::MutexGuard::lock(&(*ptr).lock);\n\n from_glib((*ptr).flags)\n\n }\n\n }\n\n}\n", "file_path": "gstreamer/src/plugin.rs", "rank": 43, "score": 212795.18950135703 }, { "content": "pub trait GstPipelineExtManual: 'static {\n\n fn set_pipeline_flags(&self, flags: PipelineFlags);\n\n\n\n fn unset_pipeline_flags(&self, flags: PipelineFlags);\n\n\n\n fn get_pipeline_flags(&self) -> PipelineFlags;\n\n}\n\n\n\nimpl<O: IsA<::Pipeline>> GstPipelineExtManual for O {\n\n fn set_pipeline_flags(&self, flags: PipelineFlags) {\n\n unsafe {\n\n let ptr: *mut gst_sys::GstObject = self.as_ptr() as *mut _;\n\n let _guard = ::utils::MutexGuard::lock(&(*ptr).lock);\n\n (*ptr).flags |= flags.to_glib();\n\n }\n\n }\n\n\n\n fn unset_pipeline_flags(&self, flags: PipelineFlags) {\n\n unsafe {\n\n let ptr: *mut gst_sys::GstObject = self.as_ptr() as *mut _;\n", "file_path": "gstreamer/src/pipeline.rs", "rank": 44, "score": 212795.18950135703 }, { "content": "pub trait GstObjectExtManual: 'static {\n\n fn connect_deep_notify<F: Fn(&Self, &::Object, &glib::ParamSpec) + Send + Sync + 'static>(\n\n &self,\n\n name: Option<&str>,\n\n f: F,\n\n ) -> SignalHandlerId;\n\n\n\n fn set_object_flags(&self, flags: ObjectFlags);\n\n\n\n fn unset_object_flags(&self, flags: ObjectFlags);\n\n\n\n fn get_object_flags(&self) -> ObjectFlags;\n\n}\n\n\n\nimpl<O: IsA<::Object>> GstObjectExtManual for O {\n\n fn connect_deep_notify<F: Fn(&Self, &::Object, &glib::ParamSpec) + Send + Sync + 'static>(\n\n &self,\n\n name: Option<&str>,\n\n f: F,\n\n ) -> SignalHandlerId {\n", "file_path": "gstreamer/src/object.rs", "rank": 45, "score": 212795.18950135703 }, { "content": "pub trait GstObjectExt: 'static {\n\n //fn add_control_binding(&self, binding: /*Ignored*/&ControlBinding) -> bool;\n\n\n\n fn default_error(&self, error: &Error, debug: Option<&str>);\n\n\n\n //fn get_control_binding(&self, property_name: &str) -> /*Ignored*/Option<ControlBinding>;\n\n\n\n fn get_control_rate(&self) -> ClockTime;\n\n\n\n //fn get_g_value_array(&self, property_name: &str, timestamp: ClockTime, interval: ClockTime, values: /*Ignored*/&[&glib::Value]) -> bool;\n\n\n\n fn get_name(&self) -> GString;\n\n\n\n fn get_parent(&self) -> Option<Object>;\n\n\n\n fn get_path_string(&self) -> GString;\n\n\n\n //fn get_value(&self, property_name: &str, timestamp: ClockTime) -> /*Ignored*/Option<glib::Value>;\n\n\n\n //fn get_value_array(&self, property_name: &str, timestamp: ClockTime, interval: ClockTime, n_values: u32, values: /*Unimplemented*/Option<Fundamental: Pointer>) -> bool;\n", "file_path": "gstreamer/src/auto/object.rs", "rank": 46, "score": 212795.18950135703 }, { "content": "pub fn tag_get_description(name: &str) -> Option<&'static str> {\n\n unsafe {\n\n let ptr = gst_sys::gst_tag_get_description(name.to_glib_none().0);\n\n\n\n if ptr.is_null() {\n\n None\n\n } else {\n\n Some(CStr::from_ptr(ptr).to_str().unwrap())\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/tags.rs", "rank": 47, "score": 212596.6642980082 }, { "content": "pub fn tag_get_nick(name: &str) -> Option<&'static str> {\n\n unsafe {\n\n let ptr = gst_sys::gst_tag_get_nick(name.to_glib_none().0);\n\n\n\n if ptr.is_null() {\n\n None\n\n } else {\n\n Some(CStr::from_ptr(ptr).to_str().unwrap())\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/tags.rs", "rank": 48, "score": 212596.6642980082 }, { "content": "fn buffer_n_gl_memory(buffer: &gst::BufferRef) -> Option<u32> {\n\n unsafe {\n\n let buf = buffer.as_mut_ptr();\n\n let num = gst_sys::gst_buffer_n_memory(buf);\n\n for i in 0..num - 1 {\n\n let mem = gst_sys::gst_buffer_peek_memory(buf, i);\n\n if gst_gl_sys::gst_is_gl_memory(mem) != glib_sys::GTRUE {\n\n return None;\n\n }\n\n }\n\n Some(num as u32)\n\n }\n\n}\n", "file_path": "gstreamer-gl/src/gl_video_frame.rs", "rank": 49, "score": 212425.6520810233 }, { "content": "pub trait GObjectExtManualGst: 'static {\n\n fn set_property_from_str(&self, name: &str, value: &str);\n\n}\n\n\n\nimpl<O: IsA<glib::Object>> GObjectExtManualGst for O {\n\n fn set_property_from_str(&self, name: &str, value: &str) {\n\n unsafe {\n\n gst_sys::gst_util_set_object_arg(\n\n self.as_ref().to_glib_none().0,\n\n name.to_glib_none().0,\n\n value.to_glib_none().0,\n\n );\n\n }\n\n }\n\n}\n", "file_path": "gstreamer/src/gobject.rs", "rank": 50, "score": 209534.9176676413 }, { "content": "pub fn init() -> Result<(), BoolError> {\n\n if gst::init().is_err() {\n\n return Err(glib_bool_error!(\"Could not initialize GStreamer.\"));\n\n }\n\n\n\n unsafe {\n\n if from_glib(ges_sys::ges_init()) {\n\n Ok(())\n\n } else {\n\n Err(glib_bool_error!(\"Could not initialize GES.\"))\n\n }\n\n }\n\n}\n\n\n\npub unsafe fn deinit() {\n\n ges_sys::ges_deinit();\n\n}\n\n\n\nmacro_rules! assert_initialized_main_thread {\n\n () => {\n", "file_path": "gstreamer-editing-services/src/lib.rs", "rank": 51, "score": 204950.44822391355 }, { "content": "pub fn pb_utils_get_encoder_description(caps: &gst::CapsRef) -> Option<String> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib_full(gst_pbutils_sys::gst_pb_utils_get_encoder_description(\n\n caps.as_ptr(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gstreamer-pbutils/src/functions.rs", "rank": 52, "score": 202873.32114013107 }, { "content": "pub fn pb_utils_get_decoder_description(caps: &gst::CapsRef) -> Option<String> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib_full(gst_pbutils_sys::gst_pb_utils_get_decoder_description(\n\n caps.as_ptr(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gstreamer-pbutils/src/functions.rs", "rank": 53, "score": 202873.32114013107 }, { "content": "pub fn pb_utils_get_codec_description(caps: &gst::CapsRef) -> Option<String> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib_full(gst_pbutils_sys::gst_pb_utils_get_codec_description(\n\n caps.as_ptr(),\n\n ))\n\n }\n\n}\n", "file_path": "gstreamer-pbutils/src/functions.rs", "rank": 54, "score": 202873.32114013107 }, { "content": "pub fn merge_use_first(src: &Value) -> Value {\n\n assert_eq!(src.type_(), ::List::static_type());\n\n\n\n unsafe {\n\n use glib::translate::Uninitialized;\n\n\n\n let mut res = Value::uninitialized();\n\n gst_sys::gst_tag_merge_use_first(res.to_glib_none_mut().0, src.to_glib_none().0);\n\n res\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/tags.rs", "rank": 55, "score": 200987.44873502865 }, { "content": "pub fn parse_still_frame_event(event: &gst::EventRef) -> Option<StillFrameEvent> {\n\n unsafe {\n\n let mut in_still = mem::MaybeUninit::uninit();\n\n\n\n let res: bool = from_glib(gst_video_sys::gst_video_event_parse_still_frame(\n\n event.as_mut_ptr(),\n\n in_still.as_mut_ptr(),\n\n ));\n\n if res {\n\n Some(StillFrameEvent {\n\n in_still: from_glib(in_still.assume_init()),\n\n })\n\n } else {\n\n None\n\n }\n\n }\n\n}\n", "file_path": "gstreamer-video/src/video_event.rs", "rank": 56, "score": 198066.8027971264 }, { "content": "pub fn type_find_helper_for_data<P: IsA<gst::Object>, R: AsRef<[u8]>>(\n\n obj: Option<&P>,\n\n data: R,\n\n) -> Option<(gst::Caps, gst::TypeFindProbability)> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut prob = mem::MaybeUninit::uninit();\n\n let data = data.as_ref();\n\n let (ptr, len) = (data.as_ptr(), data.len());\n\n let ret = gst_base_sys::gst_type_find_helper_for_data(\n\n obj.map(|p| p.as_ref()).to_glib_none().0,\n\n mut_override(ptr),\n\n len,\n\n prob.as_mut_ptr(),\n\n );\n\n if ret.is_null() {\n\n None\n\n } else {\n\n Some((from_glib_full(ret), from_glib(prob.assume_init())))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer-base/src/functions.rs", "rank": 57, "score": 197581.29200045185 }, { "content": "#[cfg(any(feature = \"v1_16\", feature = \"dox\"))]\n\npub fn type_find_helper_for_data_with_extension<P: IsA<gst::Object>, R: AsRef<[u8]>>(\n\n obj: Option<&P>,\n\n data: R,\n\n extension: Option<&str>,\n\n) -> Option<(gst::Caps, gst::TypeFindProbability)> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut prob = mem::MaybeUninit::uninit();\n\n let data = data.as_ref();\n\n let (ptr, len) = (data.as_ptr(), data.len());\n\n let ret = gst_base_sys::gst_type_find_helper_for_data_with_extension(\n\n obj.map(|p| p.as_ref()).to_glib_none().0,\n\n mut_override(ptr),\n\n len,\n\n extension.to_glib_none().0,\n\n prob.as_mut_ptr(),\n\n );\n\n if ret.is_null() {\n\n None\n\n } else {\n\n Some((from_glib_full(ret), from_glib(prob.assume_init())))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer-base/src/functions.rs", "rank": 58, "score": 195036.5895312618 }, { "content": "pub fn parse_force_key_unit_event(event: &gst::EventRef) -> Option<ForceKeyUnitEvent> {\n\n if event.is_upstream() {\n\n parse_upstream_force_key_unit_event(event).map(ForceKeyUnitEvent::Upstream)\n\n } else {\n\n parse_downstream_force_key_unit_event(event).map(ForceKeyUnitEvent::Downstream)\n\n }\n\n}\n\n\n", "file_path": "gstreamer-video/src/video_event.rs", "rank": 59, "score": 193603.7733109168 }, { "content": "pub fn update_registry() -> Result<(), glib::error::BoolError> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n glib_result_from_gboolean!(\n\n gst_sys::gst_update_registry(),\n\n \"Failed to update the registry\"\n\n )\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 60, "score": 191396.51939864305 }, { "content": "pub fn new_downstream_force_key_unit_event<'a>() -> DownstreamForceKeyUnitEventBuilder<'a> {\n\n DownstreamForceKeyUnitEventBuilder::new()\n\n}\n\n\n\npub struct DownstreamForceKeyUnitEventBuilder<'a> {\n\n seqnum: Option<gst::Seqnum>,\n\n running_time_offset: Option<i64>,\n\n other_fields: Vec<(&'a str, &'a dyn ToSendValue)>,\n\n timestamp: gst::ClockTime,\n\n stream_time: gst::ClockTime,\n\n running_time: gst::ClockTime,\n\n all_headers: bool,\n\n count: u32,\n\n}\n\n\n\nimpl<'a> DownstreamForceKeyUnitEventBuilder<'a> {\n\n fn new() -> Self {\n\n skip_assert_initialized!();\n\n Self {\n\n seqnum: None,\n", "file_path": "gstreamer-video/src/video_event.rs", "rank": 61, "score": 179246.8489900821 }, { "content": "pub fn new_upstream_force_key_unit_event<'a>() -> UpstreamForceKeyUnitEventBuilder<'a> {\n\n UpstreamForceKeyUnitEventBuilder::new()\n\n}\n\n\n\npub struct UpstreamForceKeyUnitEventBuilder<'a> {\n\n seqnum: Option<gst::Seqnum>,\n\n running_time_offset: Option<i64>,\n\n other_fields: Vec<(&'a str, &'a dyn ToSendValue)>,\n\n running_time: gst::ClockTime,\n\n all_headers: bool,\n\n count: u32,\n\n}\n\n\n\nimpl<'a> UpstreamForceKeyUnitEventBuilder<'a> {\n\n fn new() -> Self {\n\n skip_assert_initialized!();\n\n Self {\n\n seqnum: None,\n\n running_time_offset: None,\n\n other_fields: Vec::new(),\n", "file_path": "gstreamer-video/src/video_event.rs", "rank": 62, "score": 179246.8489900821 }, { "content": "#[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\npub fn calculate_linear_regression(\n\n xy: &[(u64, u64)],\n\n temp: Option<&mut [(u64, u64)]>,\n\n) -> Option<(u64, u64, u64, u64, f64)> {\n\n unsafe {\n\n assert_eq!(mem::size_of::<u64>() * 2, mem::size_of::<(u64, u64)>());\n\n assert_eq!(mem::align_of::<u64>(), mem::align_of::<(u64, u64)>());\n\n assert!(temp.as_ref().map(|temp| temp.len()).unwrap_or(xy.len()) >= xy.len());\n\n\n\n let mut m_num = mem::MaybeUninit::uninit();\n\n let mut m_denom = mem::MaybeUninit::uninit();\n\n let mut b = mem::MaybeUninit::uninit();\n\n let mut xbase = mem::MaybeUninit::uninit();\n\n let mut r_squared = mem::MaybeUninit::uninit();\n\n\n\n let res = from_glib(gst_sys::gst_calculate_linear_regression(\n\n xy.as_ptr() as *const u64,\n\n temp.map(|temp| temp.as_mut_ptr() as *mut u64)\n\n .unwrap_or(ptr::null_mut()),\n\n xy.len() as u32,\n", "file_path": "gstreamer/src/functions.rs", "rank": 63, "score": 172753.564898781 }, { "content": "pub fn convert_sample(\n\n sample: &gst::Sample,\n\n caps: &gst::Caps,\n\n timeout: gst::ClockTime,\n\n) -> Result<gst::Sample, glib::Error> {\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = gst_video_sys::gst_video_convert_sample(\n\n sample.to_glib_none().0,\n\n caps.to_glib_none().0,\n\n timeout.to_glib(),\n\n &mut error,\n\n );\n\n\n\n if error.is_null() {\n\n Ok(from_glib_full(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer-video/src/functions.rs", "rank": 64, "score": 172753.564898781 }, { "content": "pub fn parse_launch_full(\n\n pipeline_description: &str,\n\n mut context: Option<&mut ParseContext>,\n\n flags: ParseFlags,\n\n) -> Result<Element, Error> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = gst_sys::gst_parse_launch_full(\n\n pipeline_description.to_glib_none().0,\n\n context.to_glib_none_mut().0,\n\n flags.to_glib(),\n\n &mut error,\n\n );\n\n if error.is_null() {\n\n Ok(from_glib_none(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/functions.rs", "rank": 65, "score": 172753.564898781 }, { "content": "pub fn parse_launchv_full(\n\n argv: &[&str],\n\n mut context: Option<&mut ParseContext>,\n\n flags: ParseFlags,\n\n) -> Result<Element, Error> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = gst_sys::gst_parse_launchv_full(\n\n argv.to_glib_none().0,\n\n context.to_glib_none_mut().0,\n\n flags.to_glib(),\n\n &mut error,\n\n );\n\n if error.is_null() {\n\n Ok(from_glib_none(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/functions.rs", "rank": 66, "score": 172753.564898781 }, { "content": "pub fn parse_bin_from_description_full(\n\n bin_description: &str,\n\n ghost_unlinked_pads: bool,\n\n mut context: Option<&mut ParseContext>,\n\n flags: ParseFlags,\n\n) -> Result<Element, Error> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = gst_sys::gst_parse_bin_from_description_full(\n\n bin_description.to_glib_none().0,\n\n ghost_unlinked_pads.to_glib(),\n\n context.to_glib_none_mut().0,\n\n flags.to_glib(),\n\n &mut error,\n\n );\n\n if error.is_null() {\n\n Ok(from_glib_none(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/functions.rs", "rank": 67, "score": 170377.8887983358 }, { "content": "pub fn audio_buffer_clip(\n\n buffer: gst::Buffer,\n\n segment: &gst::Segment,\n\n rate: u32,\n\n bpf: u32,\n\n) -> Option<gst::Buffer> {\n\n skip_assert_initialized!();\n\n\n\n unsafe {\n\n from_glib_full(gst_audio_sys::gst_audio_buffer_clip(\n\n buffer.into_ptr(),\n\n segment.to_glib_none().0,\n\n rate as i32,\n\n bpf as i32,\n\n ))\n\n }\n\n}\n\n\n\n// Re-export all the traits in a prelude module, so that applications\n\n// can always \"use gst::prelude::*\" without getting conflicts\n\npub mod prelude {\n\n pub use glib::prelude::*;\n\n pub use gst::prelude::*;\n\n\n\n pub use auto::traits::*;\n\n}\n", "file_path": "gstreamer-audio/src/lib.rs", "rank": 68, "score": 170377.8887983358 }, { "content": "pub fn parse_bin_from_description(\n\n bin_description: &str,\n\n ghost_unlinked_pads: bool,\n\n) -> Result<Bin, Error> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = gst_sys::gst_parse_bin_from_description(\n\n bin_description.to_glib_none().0,\n\n ghost_unlinked_pads.to_glib(),\n\n &mut error,\n\n );\n\n if error.is_null() {\n\n Ok(from_glib_none(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 69, "score": 170377.8887983358 }, { "content": "pub fn reorder_channels(\n\n data: &mut [u8],\n\n format: ::AudioFormat,\n\n channels: u32,\n\n from: &[AudioChannelPosition],\n\n to: &[AudioChannelPosition],\n\n) -> Result<(), glib::BoolError> {\n\n assert_initialized_main_thread!();\n\n\n\n if from.len() != to.len() || from.len() > 64 {\n\n return Err(glib_bool_error!(\"Invalid number of channels\"));\n\n }\n\n\n\n let from_len = from.len();\n\n let to_len = to.len();\n\n\n\n let from_raw: [gst_audio_sys::GstAudioChannelPosition; 64] = array_init::array_init_copy(|i| {\n\n if i >= from_len as usize {\n\n gst_audio_sys::GST_AUDIO_CHANNEL_POSITION_INVALID\n\n } else {\n", "file_path": "gstreamer-audio/src/audio_channel_position.rs", "rank": 70, "score": 168107.86188604683 }, { "content": "pub fn debug_print_stack_trace() {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n gst_sys::gst_debug_print_stack_trace();\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 71, "score": 168107.86188604683 }, { "content": "pub fn center_video_rectangle(\n\n src: &VideoRectangle,\n\n dst: &VideoRectangle,\n\n scale: bool,\n\n) -> VideoRectangle {\n\n let mut result = gst_video_sys::GstVideoRectangle {\n\n x: 0,\n\n y: 0,\n\n w: 0,\n\n h: 0,\n\n };\n\n let src_rect = gst_video_sys::GstVideoRectangle {\n\n x: src.x,\n\n y: src.y,\n\n w: src.w,\n\n h: src.h,\n\n };\n\n let dst_rect = gst_video_sys::GstVideoRectangle {\n\n x: dst.x,\n\n y: dst.y,\n\n w: dst.w,\n\n h: dst.h,\n\n };\n\n unsafe {\n\n gst_video_sys::gst_video_sink_center_rect(src_rect, dst_rect, &mut result, scale.to_glib());\n\n }\n\n VideoRectangle::new(result.x, result.y, result.w, result.h)\n\n}\n", "file_path": "gstreamer-video/src/video_rectangle.rs", "rank": 72, "score": 168107.86188604683 }, { "content": "pub fn buffer_reorder_channels(\n\n buffer: &mut gst::BufferRef,\n\n format: ::AudioFormat,\n\n channels: u32,\n\n from: &[AudioChannelPosition],\n\n to: &[AudioChannelPosition],\n\n) -> Result<(), glib::BoolError> {\n\n assert_initialized_main_thread!();\n\n\n\n if from.len() != to.len() || from.len() > 64 {\n\n return Err(glib_bool_error!(\"Invalid number of channels\"));\n\n }\n\n\n\n let from_len = from.len();\n\n let to_len = to.len();\n\n\n\n let from_raw: [gst_audio_sys::GstAudioChannelPosition; 64] = array_init::array_init_copy(|i| {\n\n if i >= from_len as usize {\n\n gst_audio_sys::GST_AUDIO_CHANNEL_POSITION_INVALID\n\n } else {\n", "file_path": "gstreamer-audio/src/audio_channel_position.rs", "rank": 73, "score": 165936.5899384639 }, { "content": "#[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\npub fn debug_remove_ring_buffer_logger() {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n gst_sys::gst_debug_remove_ring_buffer_logger();\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 74, "score": 165936.5899384639 }, { "content": "pub fn util_seqnum_next() -> ::Seqnum {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let v = from_glib(gst_sys::gst_util_seqnum_next());\n\n if v == ::SEQNUM_INVALID {\n\n return from_glib(gst_sys::gst_util_seqnum_next());\n\n }\n\n v\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/functions.rs", "rank": 75, "score": 164988.8450065541 }, { "content": "pub trait ClockExt: 'static {\n\n fn add_observation(&self, slave: ClockTime, master: ClockTime) -> Option<f64>;\n\n\n\n fn add_observation_unapplied(\n\n &self,\n\n slave: ClockTime,\n\n master: ClockTime,\n\n ) -> Option<(f64, ClockTime, ClockTime, ClockTime, ClockTime)>;\n\n\n\n fn adjust_unlocked(&self, internal: ClockTime) -> ClockTime;\n\n\n\n fn get_calibration(&self) -> (ClockTime, ClockTime, ClockTime, ClockTime);\n\n\n\n fn get_internal_time(&self) -> ClockTime;\n\n\n\n fn get_master(&self) -> Option<Clock>;\n\n\n\n fn get_resolution(&self) -> ClockTime;\n\n\n\n fn get_time(&self) -> ClockTime;\n", "file_path": "gstreamer/src/auto/clock.rs", "rank": 76, "score": 164984.24416937472 }, { "content": "pub trait PadExt: 'static {\n\n fn activate_mode(&self, mode: PadMode, active: bool) -> Result<(), glib::error::BoolError>;\n\n\n\n fn can_link<P: IsA<Pad>>(&self, sinkpad: &P) -> bool;\n\n\n\n fn check_reconfigure(&self) -> bool;\n\n\n\n fn create_stream_id<P: IsA<Element>>(\n\n &self,\n\n parent: &P,\n\n stream_id: Option<&str>,\n\n ) -> Option<GString>;\n\n\n\n //fn create_stream_id_printf<P: IsA<Element>>(&self, parent: &P, stream_id: Option<&str>, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) -> Option<GString>;\n\n\n\n //fn create_stream_id_printf_valist<P: IsA<Element>>(&self, parent: &P, stream_id: Option<&str>, var_args: /*Unknown conversion*//*Unimplemented*/Unsupported) -> Option<GString>;\n\n\n\n fn forward<P: FnMut(&Pad) -> bool>(&self, forward: P) -> bool;\n\n\n\n fn get_allowed_caps(&self) -> Option<Caps>;\n", "file_path": "gstreamer/src/auto/pad.rs", "rank": 77, "score": 164984.24416937472 }, { "content": "pub trait ElementExt: 'static {\n\n fn abort_state(&self);\n\n\n\n fn add_pad<P: IsA<Pad>>(&self, pad: &P) -> Result<(), glib::error::BoolError>;\n\n\n\n fn create_all_pads(&self);\n\n\n\n #[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n fn foreach_pad<P: FnMut(&Element, &Pad) -> bool>(&self, func: P) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n fn foreach_sink_pad<P: FnMut(&Element, &Pad) -> bool>(&self, func: P) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n fn foreach_src_pad<P: FnMut(&Element, &Pad) -> bool>(&self, func: P) -> bool;\n\n\n\n fn get_base_time(&self) -> ClockTime;\n\n\n\n fn get_bus(&self) -> Option<Bus>;\n\n\n", "file_path": "gstreamer/src/auto/element.rs", "rank": 78, "score": 164984.24416937472 }, { "content": "pub trait AllocatorExt: 'static {\n\n //fn alloc(&self, size: usize, params: /*Ignored*/Option<&mut AllocationParams>) -> /*Ignored*/Option<Memory>;\n\n\n\n //fn free(&self, memory: /*Ignored*/&mut Memory);\n\n\n\n fn set_default(&self);\n\n}\n\n\n\nimpl<O: IsA<Allocator>> AllocatorExt for O {\n\n //fn alloc(&self, size: usize, params: /*Ignored*/Option<&mut AllocationParams>) -> /*Ignored*/Option<Memory> {\n\n // unsafe { TODO: call gst_sys:gst_allocator_alloc() }\n\n //}\n\n\n\n //fn free(&self, memory: /*Ignored*/&mut Memory) {\n\n // unsafe { TODO: call gst_sys:gst_allocator_free() }\n\n //}\n\n\n\n fn set_default(&self) {\n\n unsafe {\n\n gst_sys::gst_allocator_set_default(self.as_ref().to_glib_full());\n\n }\n\n }\n\n}\n", "file_path": "gstreamer/src/auto/allocator.rs", "rank": 79, "score": 164984.24416937472 }, { "content": "pub trait PipelineExt: 'static {\n\n fn auto_clock(&self);\n\n\n\n fn get_auto_flush_bus(&self) -> bool;\n\n\n\n fn get_delay(&self) -> ClockTime;\n\n\n\n fn get_latency(&self) -> ClockTime;\n\n\n\n fn get_pipeline_clock(&self) -> Option<Clock>;\n\n\n\n fn set_auto_flush_bus(&self, auto_flush: bool);\n\n\n\n fn set_delay(&self, delay: ClockTime);\n\n\n\n fn set_latency(&self, latency: ClockTime);\n\n\n\n fn use_clock<P: IsA<Clock>>(&self, clock: Option<&P>);\n\n\n\n fn connect_property_auto_flush_bus_notify<F: Fn(&Self) + Send + Sync + 'static>(\n", "file_path": "gstreamer/src/auto/pipeline.rs", "rank": 80, "score": 164984.24416937472 }, { "content": "pub trait DeviceExt: 'static {\n\n fn create_element(&self, name: Option<&str>) -> Option<Element>;\n\n\n\n fn get_caps(&self) -> Option<Caps>;\n\n\n\n fn get_device_class(&self) -> GString;\n\n\n\n fn get_display_name(&self) -> GString;\n\n\n\n fn get_properties(&self) -> Option<Structure>;\n\n\n\n fn has_classes(&self, classes: &str) -> bool;\n\n\n\n fn has_classesv(&self, classes: &[&str]) -> bool;\n\n\n\n fn reconfigure_element<P: IsA<Element>>(\n\n &self,\n\n element: &P,\n\n ) -> Result<(), glib::error::BoolError>;\n\n\n", "file_path": "gstreamer/src/auto/device.rs", "rank": 81, "score": 164984.24416937472 }, { "content": "pub trait ClockExtManual: 'static {\n\n fn new_periodic_id(&self, start_time: ClockTime, interval: ClockTime) -> Option<ClockId>;\n\n\n\n fn periodic_id_reinit(\n\n &self,\n\n id: &ClockId,\n\n start_time: ClockTime,\n\n interval: ClockTime,\n\n ) -> Result<(), glib::BoolError>;\n\n\n\n fn new_single_shot_id(&self, time: ClockTime) -> Option<ClockId>;\n\n\n\n fn single_shot_id_reinit(&self, id: &ClockId, time: ClockTime) -> Result<(), glib::BoolError>;\n\n}\n\n\n\nimpl<O: IsA<Clock>> ClockExtManual for O {\n\n fn new_periodic_id(&self, start_time: ClockTime, interval: ClockTime) -> Option<ClockId> {\n\n unsafe {\n\n from_glib_full(gst_sys::gst_clock_new_periodic_id(\n\n self.as_ref().to_glib_none().0,\n", "file_path": "gstreamer/src/clock.rs", "rank": 82, "score": 164984.24416937472 }, { "content": "pub trait PadExtManual: 'static {\n\n fn add_probe<F>(&self, mask: PadProbeType, func: F) -> Option<PadProbeId>\n\n where\n\n F: Fn(&Self, &mut PadProbeInfo) -> PadProbeReturn + Send + Sync + 'static;\n\n fn remove_probe(&self, id: PadProbeId);\n\n\n\n fn chain(&self, buffer: Buffer) -> Result<FlowSuccess, FlowError>;\n\n fn push(&self, buffer: Buffer) -> Result<FlowSuccess, FlowError>;\n\n\n\n fn chain_list(&self, list: BufferList) -> Result<FlowSuccess, FlowError>;\n\n fn push_list(&self, list: BufferList) -> Result<FlowSuccess, FlowError>;\n\n\n\n fn pull_range(&self, offset: u64, size: u32) -> Result<Buffer, FlowError>;\n\n fn get_range(&self, offset: u64, size: u32) -> Result<Buffer, FlowError>;\n\n\n\n fn peer_query(&self, query: &mut QueryRef) -> bool;\n\n fn query(&self, query: &mut QueryRef) -> bool;\n\n fn query_default<P: IsA<::Object>>(&self, parent: Option<&P>, query: &mut QueryRef) -> bool;\n\n fn proxy_query_caps(&self, query: &mut QueryRef) -> bool;\n\n fn proxy_query_accept_caps(&self, query: &mut QueryRef) -> bool;\n", "file_path": "gstreamer/src/pad.rs", "rank": 83, "score": 164984.24416937472 }, { "content": "pub trait ElementExtManual: 'static {\n\n fn get_element_class(&self) -> &ElementClass;\n\n\n\n fn change_state(&self, transition: StateChange)\n\n -> Result<StateChangeSuccess, StateChangeError>;\n\n fn continue_state(\n\n &self,\n\n ret: StateChangeReturn,\n\n ) -> Result<StateChangeSuccess, StateChangeError>;\n\n\n\n fn get_state(\n\n &self,\n\n timeout: ClockTime,\n\n ) -> (Result<StateChangeSuccess, StateChangeError>, State, State);\n\n fn set_state(&self, state: State) -> Result<StateChangeSuccess, StateChangeError>;\n\n\n\n fn query(&self, query: &mut QueryRef) -> bool;\n\n\n\n fn send_event(&self, event: Event) -> bool;\n\n\n", "file_path": "gstreamer/src/element.rs", "rank": 84, "score": 164984.24416937472 }, { "content": "pub trait AllocatorExtManual: 'static {\n\n fn alloc(&self, size: usize, params: Option<&AllocationParams>) -> Option<Memory>;\n\n}\n\n\n\nimpl<O: IsA<Allocator>> AllocatorExtManual for O {\n\n fn alloc(&self, size: usize, params: Option<&AllocationParams>) -> Option<Memory> {\n\n unsafe {\n\n let ret = gst_sys::gst_allocator_alloc(\n\n self.as_ptr() as *mut _,\n\n size,\n\n match params {\n\n Some(val) => val.as_ptr() as *mut _,\n\n None => ptr::null_mut(),\n\n },\n\n );\n\n if ret.is_null() {\n\n None\n\n } else {\n\n Some(from_glib_full(ret))\n\n }\n\n }\n\n }\n\n}\n", "file_path": "gstreamer/src/allocator.rs", "rank": 85, "score": 164984.24416937472 }, { "content": "pub trait PresetExt: 'static {\n\n fn delete_preset(&self, name: &str) -> Result<(), glib::error::BoolError>;\n\n\n\n fn get_meta(&self, name: &str, tag: &str) -> Option<GString>;\n\n\n\n fn get_preset_names(&self) -> Vec<GString>;\n\n\n\n fn get_property_names(&self) -> Vec<GString>;\n\n\n\n fn is_editable(&self) -> bool;\n\n\n\n fn load_preset(&self, name: &str) -> Result<(), glib::error::BoolError>;\n\n\n\n fn rename_preset(&self, old_name: &str, new_name: &str) -> Result<(), glib::error::BoolError>;\n\n\n\n fn save_preset(&self, name: &str) -> Result<(), glib::error::BoolError>;\n\n\n\n fn set_meta(\n\n &self,\n\n name: &str,\n", "file_path": "gstreamer/src/auto/preset.rs", "rank": 86, "score": 164984.24416937472 }, { "content": "pub fn get_channel_reorder_map(\n\n from: &[AudioChannelPosition],\n\n to: &[AudioChannelPosition],\n\n reorder_map: &mut [usize],\n\n) -> Result<(), glib::BoolError> {\n\n assert_initialized_main_thread!();\n\n\n\n if from.len() != to.len() || from.len() != reorder_map.len() || from.len() > 64 {\n\n return Err(glib_bool_error!(\"Invalid number of channels\"));\n\n }\n\n\n\n let from_len = from.len();\n\n let to_len = to.len();\n\n\n\n let from_raw: [gst_audio_sys::GstAudioChannelPosition; 64] = array_init::array_init_copy(|i| {\n\n if i >= from_len as usize {\n\n gst_audio_sys::GST_AUDIO_CHANNEL_POSITION_INVALID\n\n } else {\n\n from[i].to_glib()\n\n }\n", "file_path": "gstreamer-audio/src/audio_channel_position.rs", "rank": 87, "score": 163857.76581285032 }, { "content": "pub fn version_string() -> GString {\n\n assert_initialized_main_thread!();\n\n unsafe { from_glib_full(gst_sys::gst_version_string()) }\n\n}\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 88, "score": 162718.81809426515 }, { "content": "pub trait AggregatorExtManual: 'static {\n\n fn finish_buffer(&self, buffer: gst::Buffer) -> Result<gst::FlowSuccess, gst::FlowError>;\n\n #[cfg(any(feature = \"v1_16\", feature = \"dox\"))]\n\n fn get_property_min_upstream_latency(&self) -> gst::ClockTime;\n\n\n\n #[cfg(any(feature = \"v1_16\", feature = \"dox\"))]\n\n fn set_property_min_upstream_latency(&self, min_upstream_latency: gst::ClockTime);\n\n\n\n #[cfg(any(feature = \"v1_16\", feature = \"dox\"))]\n\n fn connect_property_min_upstream_latency_notify<F: Fn(&Self) + Send + Sync + 'static>(\n\n &self,\n\n f: F,\n\n ) -> SignalHandlerId;\n\n}\n\n\n\nimpl<O: IsA<Aggregator>> AggregatorExtManual for O {\n\n fn finish_buffer(&self, buffer: gst::Buffer) -> Result<gst::FlowSuccess, gst::FlowError> {\n\n let ret: gst::FlowReturn = unsafe {\n\n from_glib(gst_base_sys::gst_aggregator_finish_buffer(\n\n self.as_ref().to_glib_none().0,\n", "file_path": "gstreamer-base/src/aggregator.rs", "rank": 89, "score": 162714.58348968482 }, { "content": "pub trait AggregatorExt: 'static {\n\n //#[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n //fn get_allocator(&self, allocator: /*Ignored*/gst::Allocator, params: /*Ignored*/gst::AllocationParams);\n\n\n\n #[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n fn get_buffer_pool(&self) -> Option<gst::BufferPool>;\n\n\n\n #[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n fn get_latency(&self) -> gst::ClockTime;\n\n\n\n #[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n fn set_latency(&self, min_latency: gst::ClockTime, max_latency: gst::ClockTime);\n\n\n\n #[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n fn set_src_caps(&self, caps: &gst::Caps);\n\n\n\n #[cfg(any(feature = \"v1_16\", feature = \"dox\"))]\n\n fn simple_get_next_time(&self) -> gst::ClockTime;\n\n\n\n fn get_property_start_time(&self) -> u64;\n", "file_path": "gstreamer-base/src/auto/aggregator.rs", "rank": 90, "score": 162714.58348968482 }, { "content": "pub fn parse_upstream_force_key_unit_event(\n\n event: &gst::EventRef,\n\n) -> Option<UpstreamForceKeyUnitEvent> {\n\n unsafe {\n\n let mut running_time = mem::MaybeUninit::uninit();\n\n let mut all_headers = mem::MaybeUninit::uninit();\n\n let mut count = mem::MaybeUninit::uninit();\n\n\n\n let res: bool = from_glib(\n\n gst_video_sys::gst_video_event_parse_upstream_force_key_unit(\n\n event.as_mut_ptr(),\n\n running_time.as_mut_ptr(),\n\n all_headers.as_mut_ptr(),\n\n count.as_mut_ptr(),\n\n ),\n\n );\n\n if res {\n\n Some(UpstreamForceKeyUnitEvent {\n\n running_time: from_glib(running_time.assume_init()),\n\n all_headers: from_glib(all_headers.assume_init()),\n", "file_path": "gstreamer-video/src/video_event.rs", "rank": 91, "score": 161865.6082584178 }, { "content": "pub fn pb_utils_add_codec_description_to_tag_list(\n\n taglist: &mut gst::TagListRef,\n\n caps: &gst::CapsRef,\n\n) -> Result<(), glib::BoolError> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n glib_result_from_gboolean!(\n\n gst_pbutils_sys::gst_pb_utils_add_codec_description_to_tag_list(\n\n taglist.as_mut_ptr(),\n\n ptr::null_mut(),\n\n caps.as_ptr(),\n\n ),\n\n \"Failed to find codec description\",\n\n )\n\n }\n\n}\n\n\n", "file_path": "gstreamer-pbutils/src/functions.rs", "rank": 92, "score": 161865.6082584178 }, { "content": "pub fn parse_downstream_force_key_unit_event(\n\n event: &gst::EventRef,\n\n) -> Option<DownstreamForceKeyUnitEvent> {\n\n unsafe {\n\n let mut timestamp = mem::MaybeUninit::uninit();\n\n let mut stream_time = mem::MaybeUninit::uninit();\n\n let mut running_time = mem::MaybeUninit::uninit();\n\n let mut all_headers = mem::MaybeUninit::uninit();\n\n let mut count = mem::MaybeUninit::uninit();\n\n\n\n let res: bool = from_glib(\n\n gst_video_sys::gst_video_event_parse_downstream_force_key_unit(\n\n event.as_mut_ptr(),\n\n timestamp.as_mut_ptr(),\n\n stream_time.as_mut_ptr(),\n\n running_time.as_mut_ptr(),\n\n all_headers.as_mut_ptr(),\n\n count.as_mut_ptr(),\n\n ),\n\n );\n", "file_path": "gstreamer-video/src/video_event.rs", "rank": 93, "score": 161865.6082584178 }, { "content": "pub fn util_get_timestamp() -> ClockTime {\n\n assert_initialized_main_thread!();\n\n unsafe { from_glib(gst_sys::gst_util_get_timestamp()) }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 94, "score": 160547.54614668223 }, { "content": "pub fn util_group_id_next() -> ::GroupId {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let v = from_glib(gst_sys::gst_util_group_id_next());\n\n if v == ::GROUP_ID_INVALID {\n\n return from_glib(gst_sys::gst_util_group_id_next());\n\n }\n\n v\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/functions.rs", "rank": 95, "score": 160547.54614668223 }, { "content": "pub trait PluginFeatureExt: 'static {\n\n fn check_version(&self, min_major: u32, min_minor: u32, min_micro: u32) -> bool;\n\n\n\n fn get_plugin(&self) -> Option<Plugin>;\n\n\n\n fn get_plugin_name(&self) -> Option<GString>;\n\n\n\n fn load(&self) -> Option<PluginFeature>;\n\n}\n\n\n\nimpl<O: IsA<PluginFeature>> PluginFeatureExt for O {\n\n fn check_version(&self, min_major: u32, min_minor: u32, min_micro: u32) -> bool {\n\n unsafe {\n\n from_glib(gst_sys::gst_plugin_feature_check_version(\n\n self.as_ref().to_glib_none().0,\n\n min_major,\n\n min_minor,\n\n min_micro,\n\n ))\n\n }\n", "file_path": "gstreamer/src/auto/plugin_feature.rs", "rank": 96, "score": 160543.6618421641 }, { "content": "pub trait ChildProxyExt: 'static {\n\n fn child_added<P: IsA<glib::Object>>(&self, child: &P, name: &str);\n\n\n\n fn child_removed<P: IsA<glib::Object>>(&self, child: &P, name: &str);\n\n\n\n //fn get(&self, first_property_name: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs);\n\n\n\n fn get_child_by_index(&self, index: u32) -> Option<glib::Object>;\n\n\n\n fn get_child_by_name(&self, name: &str) -> Option<glib::Object>;\n\n\n\n fn get_children_count(&self) -> u32;\n\n\n\n //fn get_property(&self, name: &str, value: /*Ignored*/glib::Value);\n\n\n\n //fn get_valist(&self, first_property_name: &str, var_args: /*Unknown conversion*//*Unimplemented*/Unsupported);\n\n\n\n //fn lookup(&self, name: &str, pspec: /*Ignored*/glib::ParamSpec) -> Option<glib::Object>;\n\n\n\n //fn set(&self, first_property_name: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs);\n", "file_path": "gstreamer/src/auto/child_proxy.rs", "rank": 97, "score": 160543.6618421641 } ]
Rust
tests/sweep.rs
hellow554/flo_curves
8807b446271f0d2212d3f27d86741beeab5abebb
use flo_curves::geo::{ sweep_against, sweep_self, BoundingBox, Bounds, Coord2, Coordinate, Coordinate2D, Coordinate3D, }; use rand::prelude::*; use std::cmp::Ordering; #[test] fn sweep_self_single_overlap() { let mut bounds = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(150.0, 250.0), Coord2(250.0, 350.0)), ]; bounds.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_self(bounds.iter()); assert!(collisions.count() == 1); } #[test] fn sweep_self_double_overlap() { let mut bounds = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(150.0, 250.0), Coord2(250.0, 350.0)), Bounds::from_min_max(Coord2(220.0, 330.0), Coord2(350.0, 450.0)), ]; bounds.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_self(bounds.iter()); assert!(collisions.count() == 2); } #[test] fn sweep_self_triple_overlap() { let mut bounds = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(150.0, 250.0), Coord2(250.0, 350.0)), Bounds::from_min_max(Coord2(190.0, 290.0), Coord2(290.0, 390.0)), ]; bounds.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_self(bounds.iter()); assert!(collisions.count() == 3); } #[test] fn sweep_self_quad_overlap() { let mut bounds = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(150.0, 250.0), Coord2(250.0, 350.0)), Bounds::from_min_max(Coord2(190.0, 290.0), Coord2(290.0, 390.0)), Bounds::from_min_max(Coord2(0.0, 0.0), Coord2(1000.0, 1000.0)), ]; bounds.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_self(bounds.iter()); assert!(collisions.count() == 6); } #[test] fn sweep_against_single_overlap() { let mut bounds1 = vec![Bounds::from_min_max( Coord2(100.0, 200.0), Coord2(200.0, 300.0), )]; let mut bounds2 = vec![Bounds::from_min_max( Coord2(150.0, 250.0), Coord2(250.0, 350.0), )]; bounds1.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); bounds2.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_against(bounds1.iter(), bounds2.iter()); assert!(collisions.count() == 1); } #[test] fn sweep_against_double_overlap_1() { let mut bounds1 = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(220.0, 330.0), Coord2(350.0, 450.0)), ]; let mut bounds2 = vec![Bounds::from_min_max( Coord2(150.0, 250.0), Coord2(250.0, 350.0), )]; bounds1.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); bounds2.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_against(bounds1.iter(), bounds2.iter()); assert!(collisions.count() == 2); } #[test] fn sweep_against_double_overlap_2() { let mut bounds1 = vec![Bounds::from_min_max( Coord2(150.0, 250.0), Coord2(250.0, 350.0), )]; let mut bounds2 = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(220.0, 330.0), Coord2(350.0, 450.0)), ]; bounds1.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); bounds2.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_against(bounds1.iter(), bounds2.iter()); assert!(collisions.count() == 2); } #[test] fn sweep_against_quad_overlap() { let mut bounds1 = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(150.0, 250.0), Coord2(250.0, 350.0)), ]; let mut bounds2 = vec![ Bounds::from_min_max(Coord2(190.0, 290.0), Coord2(290.0, 390.0)), Bounds::from_min_max(Coord2(0.0, 0.0), Coord2(1000.0, 1000.0)), ]; bounds1.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); bounds2.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_against(bounds1.iter(), bounds2.iter()); assert!(collisions.count() == 4); } #[test] fn sweep_self_1000_random() { let mut rng = StdRng::from_seed([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, ]); let mut bounds = (0..1000) .into_iter() .map(|_| { let x = rng.gen::<f64>() * 900.0; let y = rng.gen::<f64>() * 900.0; let w = rng.gen::<f64>() * 400.0; let h = rng.gen::<f64>() * 400.0; Bounds::from_min_max(Coord2(x, y), Coord2(x + w, y + h)) }) .collect::<Vec<_>>(); bounds.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_self(bounds.iter()).collect::<Vec<_>>(); let mut slow_collisions = vec![]; for i1 in 0..bounds.len() { for i2 in 0..i1 { if i1 == i2 { continue; } if bounds[i1].overlaps(&bounds[i2]) { slow_collisions.push((&bounds[i1], &bounds[i2])); } } } assert!(collisions.len() == slow_collisions.len()); }
use flo_curves::geo::{ sweep_against, sweep_self, BoundingBox, Bounds, Coord2, Coordinate, Coordinate2D, Coordinate3D, }; use rand::prelude::*; use std::cmp::Ordering; #[test] fn sweep_self_single_overlap() { let mut bounds = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(150.0, 250.0), Coord2(250.0, 350.0)), ]; bounds.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_self(bounds.iter()); assert!(collisions.count() == 1); } #[test] fn sweep_self_double_overlap() { let mut bounds = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(150.0, 250.0), Coord2(250.0, 350.0)), Bounds::from_min_max(Coord2(220.0, 330.0), Coord2(350.0, 450.0)), ]; bounds.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_self(bounds.iter()); assert!(collisions.count() == 2); } #[test] fn sweep_self_triple_overlap() { let mut bounds = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(150.0, 250.0), Coord2(250.0, 350.0)), Bounds::from_min_max(Coord2(190.0, 290.0), Coord2(290.0, 390.0)), ]; bounds.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_self(bounds.iter()); assert!(collisions.count() == 3); } #[test] fn sweep_self_quad_overlap() { let mut bounds = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(150.0, 250.0), Coord2(250.0, 350.0)), Bounds::from_min_max(Coord2(190.0, 290.0), Coord2(290.0, 390.0)), Bounds::from_min_max(Coord2(0.0, 0.0), Coord2(1000.0, 1000.0)), ]; bounds.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_self(bounds.iter()); assert!(collisions.count() == 6); } #[test] fn sweep_against_single_overlap() { let mut bounds1 = vec![Bounds::from_min_max( Coord2(100.0, 200.0), Coord2(200.0, 300.0), )]; let mut bounds2 = vec![Bounds::from_min_max( Coord2(150.0, 250.0), Coord2(250.0, 350.0), )]; bounds1.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); bounds2.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_against(bounds1.iter(), bounds2.iter()); assert!(collisions.count() == 1); } #[test] fn sweep_against_double_overlap_1() { let mut bounds1 = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(220.0, 330.0), Coord2(350.0, 450.0)), ]; let mut bounds2 = vec![Bounds::from_min_max( Coord2(150.0, 250.0), Coord2(250.0, 350.0), )]; bounds1.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); bounds2.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_against(bounds1.iter(), bounds2.iter()); assert!(collisions.count() == 2); } #[test] fn sweep_against_double_overlap_2() { let mut bounds1 = vec![Bounds::from_min_max( Coord2(150.0, 250.0), Coord2(250.0, 350.0), )]; let mut bounds2 = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(220.0, 330.0), Coord2(350.0, 450.0)), ]; bounds1.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); bounds2.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_against(bounds1.iter(), bounds2.iter()); assert!(collisions.count() == 2); } #[test]
#[test] fn sweep_self_1000_random() { let mut rng = StdRng::from_seed([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, ]); let mut bounds = (0..1000) .into_iter() .map(|_| { let x = rng.gen::<f64>() * 900.0; let y = rng.gen::<f64>() * 900.0; let w = rng.gen::<f64>() * 400.0; let h = rng.gen::<f64>() * 400.0; Bounds::from_min_max(Coord2(x, y), Coord2(x + w, y + h)) }) .collect::<Vec<_>>(); bounds.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_self(bounds.iter()).collect::<Vec<_>>(); let mut slow_collisions = vec![]; for i1 in 0..bounds.len() { for i2 in 0..i1 { if i1 == i2 { continue; } if bounds[i1].overlaps(&bounds[i2]) { slow_collisions.push((&bounds[i1], &bounds[i2])); } } } assert!(collisions.len() == slow_collisions.len()); }
fn sweep_against_quad_overlap() { let mut bounds1 = vec![ Bounds::from_min_max(Coord2(100.0, 200.0), Coord2(200.0, 300.0)), Bounds::from_min_max(Coord2(150.0, 250.0), Coord2(250.0, 350.0)), ]; let mut bounds2 = vec![ Bounds::from_min_max(Coord2(190.0, 290.0), Coord2(290.0, 390.0)), Bounds::from_min_max(Coord2(0.0, 0.0), Coord2(1000.0, 1000.0)), ]; bounds1.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); bounds2.sort_by(|b1, b2| { b1.min() .x() .partial_cmp(&b2.min().x()) .unwrap_or(Ordering::Equal) }); let collisions = sweep_against(bounds1.iter(), bounds2.iter()); assert!(collisions.count() == 4); }
function_block-full_function
[ { "content": "fn detect_collisions(mut graph_path: GraphPath<Coord2, ()>) {\n\n graph_path.self_collide(0.1);\n\n}\n\n\n", "file_path": "benches/sweep.rs", "rank": 0, "score": 169984.23598104744 }, { "content": "///\n\n/// Finds the upper and lower points in a cubic curve's bounding box\n\n///\n\npub fn bounding_box4<Point: Coordinate, Bounds: BoundingBox<Point = Point>>(\n\n w1: Point,\n\n w2: Point,\n\n w3: Point,\n\n w4: Point,\n\n) -> Bounds {\n\n // The 't' values where this curve has extremities we need to examine\n\n let t_extremes = find_extremities(w1, w2, w3, w4);\n\n\n\n // Start with the point at 0,0 as the minimum position\n\n let mut min_pos = de_casteljau4(0.0, w1, w2, w3, w4);\n\n let mut max_pos = min_pos;\n\n\n\n for t in t_extremes {\n\n let point = de_casteljau4(t, w1, w2, w3, w4);\n\n\n\n min_pos = Point::from_smallest_components(min_pos, point);\n\n max_pos = Point::from_biggest_components(max_pos, point);\n\n }\n\n\n\n Bounds::from_min_max(min_pos, max_pos)\n\n}\n", "file_path": "src/bezier/bounds.rs", "rank": 1, "score": 150049.83977328244 }, { "content": "fn create_graph_path(rng: &mut StdRng, n: usize) -> GraphPath<Coord2, ()> {\n\n let mut x = 100.0;\n\n let mut y = 100.0;\n\n let mut path_builder = BezierPathBuilder::<SimpleBezierPath>::start(Coord2(x, y));\n\n\n\n for _ in 0..n {\n\n let xo = rng.gen::<f64>() * 50.0;\n\n let yo = rng.gen::<f64>() * 50.0;\n\n\n\n x += xo;\n\n y += yo;\n\n\n\n path_builder = path_builder.line_to(Coord2(x, y));\n\n }\n\n\n\n let path = path_builder.build();\n\n\n\n GraphPath::from_path(&path, ())\n\n}\n\n\n", "file_path": "benches/sweep.rs", "rank": 2, "score": 148672.47842943025 }, { "content": "#[test]\n\nfn same_rects() {\n\n let r1 = (Coord2(30.0, 30.0), Coord2(60.0, 40.0));\n\n\n\n assert!(r1.overlaps(&r1));\n\n}\n\n\n", "file_path": "tests/bounds.rs", "rank": 3, "score": 147814.03185971995 }, { "content": "#[test]\n\nfn from_points() {\n\n let r = Bounds::<Coord2>::bounds_for_points(vec![\n\n Coord2(30.0, 30.0),\n\n Coord2(60.0, 40.0),\n\n Coord2(45.0, 70.0),\n\n Coord2(10.0, 35.0),\n\n ]);\n\n\n\n assert!(r.min() == Coord2(10.0, 30.0));\n\n assert!(r.max() == Coord2(60.0, 70.00));\n\n}\n", "file_path": "tests/bounds.rs", "rank": 4, "score": 147814.03185971995 }, { "content": "#[test]\n\nfn overlapping_rects() {\n\n let r1 = (Coord2(30.0, 30.0), Coord2(60.0, 40.0));\n\n let r2 = (Coord2(20.0, 25.0), Coord2(35.0, 35.0));\n\n\n\n assert!(r1.overlaps(&r2));\n\n}\n\n\n", "file_path": "tests/bounds.rs", "rank": 5, "score": 142931.0458140219 }, { "content": "#[test]\n\nfn touching_rects() {\n\n let r1 = (Coord2(30.0, 30.0), Coord2(60.0, 40.0));\n\n let r2 = (Coord2(20.0, 25.0), Coord2(30.0, 30.0));\n\n\n\n assert!(r1.overlaps(&r2));\n\n}\n\n\n", "file_path": "tests/bounds.rs", "rank": 6, "score": 142931.0458140219 }, { "content": "#[test]\n\nfn round_to_units() {\n\n assert!(Coord2(1.1111, 2.2222).round(1.0) == Coord2(1.0, 2.0));\n\n}\n\n\n", "file_path": "tests/coordinates.rs", "rank": 7, "score": 142863.09304787545 }, { "content": "#[test]\n\nfn round_up_to_units() {\n\n assert!(Coord2(1.1111, 2.5555).round(1.0) == Coord2(1.0, 3.0));\n\n}\n\n\n", "file_path": "tests/coordinates.rs", "rank": 8, "score": 142863.09304787545 }, { "content": "#[test]\n\nfn round_to_hundredths() {\n\n assert!(Coord2(1.1111, 2.2222).round(0.01) == Coord2(1.11, 2.22));\n\n}\n\n\n", "file_path": "tests/coordinates.rs", "rank": 9, "score": 142863.09304787545 }, { "content": "#[test]\n\nfn unit_vector_of_0_0_is_0_0() {\n\n assert!(Coord2(0.0, 0.0).to_unit_vector() == Coord2(0.0, 0.0));\n\n}\n\n\n", "file_path": "tests/coordinates.rs", "rank": 10, "score": 142863.09304787545 }, { "content": "#[test]\n\nfn get_straight_line_bounds() {\n\n let straight_line = bezier::Curve::from_points(\n\n Coord2(0.0, 1.0),\n\n (Coord2(0.5, 1.5), Coord2(1.5, 2.5)),\n\n Coord2(2.0, 3.0),\n\n );\n\n\n\n let bounds: (Coord2, Coord2) = straight_line.bounding_box();\n\n\n\n assert!(bounds == (Coord2(0.0, 1.0), Coord2(2.0, 3.0)));\n\n}\n\n\n", "file_path": "tests/bezier/bounds.rs", "rank": 11, "score": 142098.08270533144 }, { "content": "#[test]\n\nfn circle_path_bounds() {\n\n let center = Coord2(5.0, 5.0);\n\n let radius = 4.0;\n\n\n\n // Create a path from a circle\n\n let circle: SimpleBezierPath = Circle::new(center, radius).to_path();\n\n\n\n let bounds: (Coord2, Coord2) = circle.bounding_box();\n\n\n\n assert!(bounds.0.distance_to(&Coord2(1.0, 1.0)) < 0.1);\n\n assert!(bounds.1.distance_to(&Coord2(9.0, 9.0)) < 0.1);\n\n}\n\n\n", "file_path": "tests/bezier/path/bounds.rs", "rank": 12, "score": 142098.08270533144 }, { "content": "#[test]\n\nfn get_curved_line_bounds() {\n\n let curved_line = bezier::Curve::from_points(\n\n Coord2(0.0, 1.0),\n\n (Coord2(-1.1875291, 1.5), Coord2(1.5, 2.5)),\n\n Coord2(2.0, 3.0),\n\n );\n\n\n\n let bounds: (Coord2, Coord2) = curved_line.bounding_box();\n\n\n\n assert!(bounds.0.distance_to(&Coord2(-0.3, 1.0)) < 0.0001);\n\n assert!(bounds.1.distance_to(&Coord2(2.0, 3.0)) < 0.0001);\n\n}\n", "file_path": "tests/bezier/bounds.rs", "rank": 13, "score": 142098.08270533144 }, { "content": "#[test]\n\nfn circle_path_fast_bounds() {\n\n let center = Coord2(5.0, 5.0);\n\n let radius = 4.0;\n\n\n\n // Create a path from a circle\n\n let circle: SimpleBezierPath = Circle::new(center, radius).to_path();\n\n\n\n let bounds: (Coord2, Coord2) = circle.fast_bounding_box();\n\n\n\n assert!(bounds.0.x() <= 1.0);\n\n assert!(bounds.0.y() <= 1.0);\n\n assert!(bounds.1.x() >= 9.0);\n\n assert!(bounds.1.y() >= 9.0);\n\n}\n", "file_path": "tests/bezier/path/bounds.rs", "rank": 14, "score": 138625.3749553266 }, { "content": "#[test]\n\nfn overlap_interior_rect() {\n\n let r1 = (Coord2(30.0, 30.0), Coord2(60.0, 50.0));\n\n let r2 = (Coord2(35.0, 35.0), Coord2(55.0, 45.0));\n\n\n\n assert!(r1.overlaps(&r2));\n\n}\n\n\n", "file_path": "tests/bounds.rs", "rank": 15, "score": 138410.21026171395 }, { "content": "#[test]\n\nfn solve_t_for_out_of_bounds() {\n\n let curve1 = Curve::from_points(\n\n Coord2(10.0, 100.0),\n\n (Coord2(90.0, 30.0), Coord2(40.0, 140.0)),\n\n Coord2(220.0, 220.0),\n\n );\n\n\n\n let solved = curve1.t_for_point(&Coord2(45.0, 23.0));\n\n assert!(solved.is_none());\n\n}\n", "file_path": "tests/bezier/solve.rs", "rank": 16, "score": 138410.21026171395 }, { "content": "#[test]\n\nfn line_in_bounds() {\n\n let line = (Coord2(5.0, 3.0), Coord2(7.0, 9.0));\n\n let bounds = (Coord2(1.0, 1.0), Coord2(10.0, 10.0));\n\n let clipped = line_clip_to_bounds(&line, &bounds);\n\n\n\n assert!(clipped.is_some());\n\n\n\n let clipped = clipped.unwrap();\n\n assert!(clipped.0.distance_to(&Coord2(5.0, 3.0)) < 0.01);\n\n assert!(clipped.1.distance_to(&Coord2(7.0, 9.0)) < 0.01);\n\n}\n\n\n", "file_path": "tests/line/intersection.rs", "rank": 17, "score": 138410.21026171395 }, { "content": "#[test]\n\nfn overlap_exterior_rect() {\n\n let r1 = (Coord2(30.0, 30.0), Coord2(60.0, 40.0));\n\n let r2 = (Coord2(20.0, 20.0), Coord2(70.0, 50.0));\n\n\n\n assert!(r1.overlaps(&r2));\n\n}\n\n\n", "file_path": "tests/bounds.rs", "rank": 18, "score": 138410.21026171395 }, { "content": "#[test]\n\nfn non_overlapping_rects() {\n\n let r1 = (Coord2(30.0, 30.0), Coord2(60.0, 40.0));\n\n let r2 = (Coord2(20.0, 25.0), Coord2(9.0, 10.0));\n\n\n\n assert!(!r1.overlaps(&r2));\n\n}\n\n\n", "file_path": "tests/bounds.rs", "rank": 19, "score": 138410.21026171395 }, { "content": "#[test]\n\nfn search_for_x_coordinate() {\n\n // Initial curve\n\n let (w1, w2, w3, w4) = (1.0, -2.0, 3.0, 4.0);\n\n\n\n // Search for the t value for a particular X coord\n\n let x_coord = 1.5;\n\n let matching_values =\n\n bezier::search_bounds4(0.01, w1, w2, w3, w4, |p1, p2| p1 < x_coord && p2 > x_coord);\n\n\n\n // Should be only 1 coordinate with this curve\n\n assert!(matching_values.len() == 1);\n\n\n\n // Basis function should be within 0.01\n\n let actual_val = bezier::basis(matching_values[0], w1, w2, w3, w4);\n\n assert!((actual_val - x_coord).abs() < 0.01);\n\n}\n\n\n", "file_path": "tests/bezier/search.rs", "rank": 20, "score": 138344.7773806907 }, { "content": "#[test]\n\nfn unit_vector_0_degrees() {\n\n assert!(Coord2::unit_vector_at_angle(0.0).distance_to(&Coord2(1.0, 0.0)) < 0.001);\n\n}\n\n\n", "file_path": "tests/coordinates.rs", "rank": 21, "score": 138344.7773806907 }, { "content": "#[test]\n\nfn unit_vector_90_degrees() {\n\n assert!(\n\n Coord2::unit_vector_at_angle(f64::consts::PI / 2.0).distance_to(&Coord2(0.0, 1.0)) < 0.001\n\n );\n\n}\n", "file_path": "tests/coordinates.rs", "rank": 22, "score": 138344.7773806907 }, { "content": "///\n\n/// Chord-length parameterizes a set of points\n\n///\n\n/// This is an estimate of the 't' value for these points on the final curve.\n\n///\n\nfn chords_for_points<Point: Coordinate>(points: &[Point]) -> Vec<f64> {\n\n let mut distances = vec![];\n\n let mut total_distance = 0.0;\n\n\n\n // Compute the distances for each point\n\n distances.push(total_distance);\n\n for ps in points.windows(2) {\n\n total_distance += ps[0].distance_to(&ps[1]);\n\n distances.push(total_distance);\n\n }\n\n\n\n // Normalize to the range 0..1\n\n for distance in &mut distances {\n\n *distance /= total_distance;\n\n }\n\n\n\n distances\n\n}\n\n\n", "file_path": "src/bezier/fit.rs", "rank": 26, "score": 134926.19453921993 }, { "content": "#[test]\n\nfn line_out_of_bounds_left() {\n\n let line = (Coord2(-11.0, 9.5), Coord2(-20.0, 9.0));\n\n let bounds = (Coord2(1.0, 1.0), Coord2(10.0, 10.0));\n\n let clipped = line_clip_to_bounds(&line, &bounds);\n\n\n\n assert!(clipped.is_none());\n\n}\n\n\n", "file_path": "tests/line/intersection.rs", "rank": 27, "score": 134212.67704152095 }, { "content": "#[test]\n\nfn line_out_of_bounds_crossing() {\n\n let line = (Coord2(9.0, 0.0), Coord2(20.0, 9.0));\n\n let bounds = (Coord2(1.0, 1.0), Coord2(10.0, 10.0));\n\n let clipped = line_clip_to_bounds(&line, &bounds);\n\n\n\n assert!(clipped.is_none());\n\n}\n", "file_path": "tests/line/intersection.rs", "rank": 28, "score": 134212.67704152095 }, { "content": "#[test]\n\nfn line_out_of_bounds_right() {\n\n let line = (Coord2(11.0, 9.5), Coord2(20.0, 9.0));\n\n let bounds = (Coord2(1.0, 1.0), Coord2(10.0, 10.0));\n\n let clipped = line_clip_to_bounds(&line, &bounds);\n\n\n\n assert!(clipped.is_none());\n\n}\n\n\n", "file_path": "tests/line/intersection.rs", "rank": 29, "score": 134212.67704152095 }, { "content": "#[test]\n\nfn can_get_dot_product() {\n\n assert!(Coord2(2.0, 1.0).dot(&Coord2(3.0, 4.0)) == 10.0);\n\n}\n\n\n", "file_path": "tests/coordinates.rs", "rank": 30, "score": 134149.58383894732 }, { "content": "#[test]\n\nfn can_get_distance_between_points() {\n\n assert!(Coord2(1.0, 1.0).distance_to(&Coord2(1.0, 8.0)) == 7.0);\n\n}\n\n\n", "file_path": "tests/coordinates.rs", "rank": 31, "score": 134149.58383894732 }, { "content": "#[test]\n\nfn can_find_unit_vector() {\n\n assert!(Coord2(0.0, 1.0).to_unit_vector() == Coord2(0.0, 1.0));\n\n assert!(Coord2(0.0, 2.0).to_unit_vector() == Coord2(0.0, 1.0));\n\n\n\n assert!(\n\n f64::abs(\n\n Coord2(4.0, 2.0)\n\n .to_unit_vector()\n\n .distance_to(&Coord2(0.0, 0.0))\n\n - 1.0\n\n ) < 0.01\n\n );\n\n}\n\n\n", "file_path": "tests/coordinates.rs", "rank": 32, "score": 134149.58383894732 }, { "content": "///\n\n/// Finds the t values of the extremities of a curve (these are the points at which\n\n/// the x or y value is at a minimum or maximum)\n\n///\n\npub fn find_extremities<Point: Coordinate>(w1: Point, w2: Point, w3: Point, w4: Point) -> Vec<f64> {\n\n // The 't' values where this curve has extremities we need to examine\n\n let mut t_extremes = vec![1.0];\n\n\n\n // The derivative is a quadratic function, so we can compute the locations of these (t values) by solving the quadratic formula for them\n\n for component_index in 0..Point::len() {\n\n // Fetch the parameters for this component\n\n let p1 = w1.get(component_index);\n\n let p2 = w2.get(component_index);\n\n let p3 = w3.get(component_index);\n\n let p4 = w4.get(component_index);\n\n\n\n // Compute the bezier coefficients\n\n let a = (-p1 + p2 * 3.0 - p3 * 3.0 + p4) * 3.0;\n\n let b = (p1 - p2 * 2.0 + p3) * 6.0;\n\n let c = (p2 - p1) * 3.0;\n\n\n\n // Extremities are points at which the curve has a 0 gradient (in any of its dimensions)\n\n let root1 = (-b + f64::sqrt(b * b - a * c * 4.0)) / (a * 2.0);\n\n let root2 = (-b - f64::sqrt(b * b - a * c * 4.0)) / (a * 2.0);\n", "file_path": "src/bezier/bounds.rs", "rank": 33, "score": 134130.40503274425 }, { "content": "#[test]\n\nfn coordinate_outside_curve_produces_no_results() {\n\n // Initial curve\n\n let (w1, w2, w3, w4) = (1.0, -2.0, 3.0, 4.0);\n\n\n\n // Search for the t value for a particular X coord, which is outside the curve\n\n let x_coord = 5.0;\n\n let matching_values =\n\n bezier::search_bounds4(0.01, w1, w2, w3, w4, |p1, p2| p1 < x_coord && p2 > x_coord);\n\n\n\n // No points on the curve match this coordinate\n\n assert!(matching_values.is_empty());\n\n}\n", "file_path": "tests/bezier/search.rs", "rank": 38, "score": 126599.17027897271 }, { "content": "#[test]\n\nfn points_outside_bounds_are_outside_path() {\n\n // Path is a square\n\n let path = (\n\n Coord2(1.0, 2.0),\n\n vec![\n\n (Coord2(3.0, 2.0), Coord2(6.0, 2.0), Coord2(9.0, 2.0)),\n\n (Coord2(9.0, 4.0), Coord2(9.0, 6.0), Coord2(9.0, 8.0)),\n\n (Coord2(6.0, 8.0), Coord2(3.0, 8.0), Coord2(1.0, 8.0)),\n\n (Coord2(1.0, 6.0), Coord2(1.0, 4.0), Coord2(1.0, 2.0)),\n\n ],\n\n );\n\n\n\n // Points far outside the path should be outside\n\n assert!(!path_contains_point(&path, &Coord2(5.0, 20.0)));\n\n assert!(!path_contains_point(&path, &Coord2(5.0, -5.0)));\n\n assert!(!path_contains_point(&path, &Coord2(20.0, 5.0)));\n\n assert!(!path_contains_point(&path, &Coord2(-5.0, 5.0)));\n\n assert!(!path_contains_point(&path, &Coord2(3.0, 20.0)));\n\n}\n\n\n", "file_path": "tests/bezier/path/point.rs", "rank": 39, "score": 123246.68081812214 }, { "content": "fn merge_paths(path1: GraphPath<Coord2, ()>, path2: GraphPath<Coord2, ()>) {\n\n path1.collide(path2, 0.1);\n\n}\n\n\n", "file_path": "benches/sweep.rs", "rank": 40, "score": 122408.91518456495 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n let mut rng = StdRng::from_seed([\n\n 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24,\n\n 25, 26, 27, 28, 29, 30, 31,\n\n ]);\n\n let graph_path = create_graph_path(&mut rng, 1000);\n\n let merge_path = create_graph_path(&mut rng, 500);\n\n\n\n c.bench_function(\"detect_collisions 1000\", |b| {\n\n b.iter(|| detect_collisions(black_box(graph_path.clone())))\n\n });\n\n c.bench_function(\"merge_paths 1000\", |b| {\n\n b.iter(|| merge_paths(black_box(graph_path.clone()), black_box(merge_path.clone())))\n\n });\n\n\n\n c.bench_function(\"sweep 10\", |b| b.iter(|| sweep(black_box(10))));\n\n c.bench_function(\"sweep_slow 10\", |b| b.iter(|| sweep_slow(black_box(10))));\n\n\n\n c.bench_function(\"sweep 100\", |b| b.iter(|| sweep(black_box(100))));\n\n c.bench_function(\"sweep_slow 100\", |b| b.iter(|| sweep_slow(black_box(100))));\n\n\n\n c.bench_function(\"sweep 1000\", |b| b.iter(|| sweep(black_box(1000))));\n\n c.bench_function(\"sweep_slow 1000\", |b| {\n\n b.iter(|| sweep_slow(black_box(1000)))\n\n });\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "benches/sweep.rs", "rank": 41, "score": 121787.94213560846 }, { "content": "///\n\n/// Converts a set of points to a 'canonical' curve\n\n///\n\n/// This is the curve such that w1 = (0.0), w2 = (1, 0) and w3 = (1, 1), if such a curve exists. The return value is the point w4\n\n/// for this curve.\n\n///\n\nfn to_canonical_curve<Point: Coordinate + Coordinate2D>(\n\n w1: &Point,\n\n w2: &Point,\n\n w3: &Point,\n\n w4: &Point,\n\n) -> Option<Point> {\n\n // Retrieve the affine transform for the curve\n\n if let Some((a, b, c, d, e, f)) = canonical_curve_transform(w1, w2, w3) {\n\n // Calculate the free point w4 based on the transform\n\n let x4 = w4.x();\n\n let y4 = w4.y();\n\n\n\n let x = a * x4 + b * y4 + c;\n\n let y = d * x4 + e * y4 + f;\n\n\n\n Some(Point::from_components(&[x, y]))\n\n } else {\n\n None\n\n }\n\n}\n\n\n\n///\n\n/// Returns the category of a curve given its characteristic point in the canonical form\n\n///\n", "file_path": "src/bezier/characteristics.rs", "rank": 42, "score": 114041.09121588297 }, { "content": "///\n\n/// Sweeps two sets of objects to find the collisions between them\n\n///\n\n/// This will only collide between objects in src and objects in tgt. Both must be sorted into order by\n\n/// their min-x position, with the lowest first\n\n///\n\npub fn sweep_against<'a, TItem, SrcBoundsIter, TgtBoundsIter>(\n\n src: SrcBoundsIter,\n\n tgt: TgtBoundsIter,\n\n) -> impl 'a + Iterator<Item = (&'a TItem, &'a TItem)>\n\nwhere\n\n SrcBoundsIter: 'a + Iterator<Item = &'a TItem>,\n\n TgtBoundsIter: 'a + Iterator<Item = &'a TItem>,\n\n TItem: 'a + HasBoundingBox,\n\n TItem::Point: Coordinate2D,\n\n{\n\n SweepAgainstIterator {\n\n src_iterator: Some(src),\n\n tgt_iterator: tgt,\n\n pending: smallvec![],\n\n src_by_max_x: Vec::new(),\n\n src_last_min_x: f64::MIN,\n\n }\n\n}\n\n\n", "file_path": "src/geo/sweep.rs", "rank": 43, "score": 112309.32618058306 }, { "content": "///\n\n/// Computes an affine transform that translates from an arbitrary bezier curve to one that has the first three control points\n\n/// fixed at w1 = (0,0), w2 = (0, 1) and w3 = (1, 1).\n\n///\n\n/// Bezier curves maintain their properties when transformed so this provides a curve with equivalent properties to the input\n\n/// curve but only a single free point (w4). This will return 'None' for the degenerate cases: where two points overlap or\n\n/// where the points are collinear.\n\n///\n\nfn canonical_curve_transform<Point: Coordinate + Coordinate2D>(\n\n w1: &Point,\n\n w2: &Point,\n\n w3: &Point,\n\n) -> Option<(f64, f64, f64, f64, f64, f64)> {\n\n // Fetch the coordinates\n\n let (x0, y0) = (w1.x(), w1.y());\n\n let (x1, y1) = (w2.x(), w2.y());\n\n let (x2, y2) = (w3.x(), w3.y());\n\n\n\n let a_divisor = (y2 - y1) * (x0 - x1) - (x2 - x1) * (y0 - y1);\n\n if a_divisor.abs() > SMALL_DIVISOR {\n\n // Transform is:\n\n //\n\n // [ a, b, c ] [ x ]\n\n // [ d, e, f ] . [ y ]\n\n // [ 0, 0, 1 ] [ 1 ]\n\n //\n\n // This will move w1 to 0,0, w2 to 0, 1 and w3 to 1, 1, which will form our canonical curve that we use for the classification algorithm\n\n let a = (-(y0 - y1)) / a_divisor;\n", "file_path": "src/bezier/characteristics.rs", "rank": 44, "score": 111858.24538845241 }, { "content": "///\n\n/// Returns the features from a curve where we have discovered the canonical point\n\n///\n\nfn features_from_canonical_point<Point: Coordinate + Coordinate2D>(\n\n x: f64,\n\n y: f64,\n\n w1: &Point,\n\n w2: &Point,\n\n w3: &Point,\n\n w4: &Point,\n\n accuracy: f64,\n\n) -> CurveFeatures {\n\n match characterize_from_canonical_point((x, y)) {\n\n CurveCategory::Arch => CurveFeatures::Arch,\n\n CurveCategory::Linear => CurveFeatures::Linear,\n\n CurveCategory::Cusp => CurveFeatures::Cusp,\n\n CurveCategory::Parabolic => CurveFeatures::Parabolic,\n\n CurveCategory::Point => CurveFeatures::Point,\n\n CurveCategory::DoubleInflectionPoint | CurveCategory::SingleInflectionPoint => {\n\n find_inflection_points((x, y)).into()\n\n }\n\n CurveCategory::Loop => {\n\n let curve = Curve::from_points(*w1, (*w2, *w3), *w4);\n\n let loop_pos = find_self_intersection_point(&curve, accuracy);\n\n\n\n // TODO: if we can't find the loop_pos, we could probably find a cusp position instead\n\n loop_pos\n\n .map(|(t1, t2)| CurveFeatures::Loop(t1, t2))\n\n .unwrap_or(CurveFeatures::Arch)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/bezier/characteristics.rs", "rank": 45, "score": 111858.24538845241 }, { "content": "///\n\n/// Finds the bounds of a path using the looser 'fast' algorithm\n\n///\n\npub fn path_fast_bounding_box<P: BezierPath, Bounds: BoundingBox<Point = P::Point>>(\n\n path: &P,\n\n) -> Bounds {\n\n path_to_curves(path)\n\n .map(|curve: Curve<P::Point>| curve.fast_bounding_box())\n\n .reduce(|first: Bounds, second| first.union_bounds(second))\n\n .unwrap_or_else(|| Bounds::from_min_max(P::Point::origin(), P::Point::origin()))\n\n}\n", "file_path": "src/bezier/path/bounds.rs", "rank": 46, "score": 109390.16074592932 }, { "content": "///\n\n/// Sweeps a set of objects with bounding boxes to find the potential collisions between them\n\n///\n\n/// The objects must be sorted into order by their min-x position, with the lowest first\n\n///\n\npub fn sweep_self<'a, TItem, BoundsIter>(\n\n ordered_items: BoundsIter,\n\n) -> impl 'a + Iterator<Item = (&'a TItem, &'a TItem)>\n\nwhere\n\n BoundsIter: 'a + Iterator<Item = &'a TItem>,\n\n TItem: 'a + HasBoundingBox,\n\n TItem::Point: Coordinate2D,\n\n{\n\n SweepSelfIterator {\n\n bounds_iterator: ordered_items,\n\n pending: smallvec![],\n\n by_max_x: Vec::new(),\n\n }\n\n}\n\n\n", "file_path": "src/geo/sweep.rs", "rank": 47, "score": 108613.69381522806 }, { "content": "///\n\n/// Determines the characteristics of a paritcular bezier curve: whether or not it is an arch, or changes directions\n\n/// (has inflection points), or self-intersects (has a loop)\n\n///\n\npub fn features_for_cubic_bezier<Point: Coordinate + Coordinate2D>(\n\n w1: &Point,\n\n w2: &Point,\n\n w3: &Point,\n\n w4: &Point,\n\n accuracy: f64,\n\n) -> CurveFeatures {\n\n // b4 is the end point of an equivalent curve with the other control points fixed at (0, 0), (0, 1) and (1, 1)\n\n let b4 = to_canonical_curve(w1, w2, w3, w4);\n\n\n\n if let Some(b4) = b4 {\n\n // For the inflection points, we rely on the fact that the canonical curve is generated by an affine transform of the original\n\n // (and the features are invariant in such a situation)\n\n let x = b4.x();\n\n let y = b4.y();\n\n\n\n features_from_canonical_point(x, y, w1, w2, w3, w4, accuracy)\n\n } else {\n\n // Degenerate case: there's no canonical form for this curve\n\n if w2.is_near_to(w3, SMALL_DISTANCE) {\n", "file_path": "src/bezier/characteristics.rs", "rank": 48, "score": 106440.60308989766 }, { "content": "///\n\n/// Determines the characteristics of a particular bezier curve: whether or not it is an arch, or changes directions\n\n/// (has inflection points), or self-intersects (has a loop)\n\n///\n\npub fn characterize_cubic_bezier<Point: Coordinate + Coordinate2D>(\n\n w1: &Point,\n\n w2: &Point,\n\n w3: &Point,\n\n w4: &Point,\n\n) -> CurveCategory {\n\n // b4 is the end point of an equivalent curve with the other control points fixed at (0, 0), (0, 1) and (1, 1)\n\n let b4 = to_canonical_curve(w1, w2, w3, w4);\n\n\n\n if let Some(b4) = b4 {\n\n let x = b4.x();\n\n let y = b4.y();\n\n\n\n characterize_from_canonical_point((x, y))\n\n } else {\n\n // Degenerate case: there's no canonical form for this curve\n\n if w2.is_near_to(w3, SMALL_DISTANCE) {\n\n if w2.is_near_to(w1, SMALL_DISTANCE) {\n\n if w3.is_near_to(w4, SMALL_DISTANCE) {\n\n // All 4 control points at the same position\n", "file_path": "src/bezier/characteristics.rs", "rank": 49, "score": 106440.60308989766 }, { "content": "///\n\n/// Finds the bounds of a path\n\n///\n\npub fn path_bounding_box<P: BezierPath, Bounds: BoundingBox<Point = P::Point>>(path: &P) -> Bounds {\n\n path_to_curves(path)\n\n .map(|curve: Curve<P::Point>| curve.bounding_box())\n\n .reduce(|first: Bounds, second| first.union_bounds(second))\n\n .unwrap_or_else(|| Bounds::from_min_max(P::Point::origin(), P::Point::origin()))\n\n}\n\n\n", "file_path": "src/bezier/path/bounds.rs", "rank": 50, "score": 105538.00663529083 }, { "content": "///\n\n/// Writes out the graph path as an SVG string\n\n///\n\npub fn graph_path_svg_string<P: Coordinate + Coordinate2D>(\n\n path: &GraphPath<P, PathLabel>,\n\n rays: Vec<(P, P)>,\n\n) -> String {\n\n let mut result = String::new();\n\n\n\n let bounds = path\n\n .all_edges()\n\n .fold(Bounds::empty(), |a, b| a.union_bounds(b.bounding_box()));\n\n let offset = bounds.min();\n\n let scale = 1000.0 / (bounds.max() - bounds.min()).x();\n\n\n\n let mut index = 0;\n\n\n\n for kinds in vec![\n\n vec![\n\n GraphPathEdgeKind::Uncategorised,\n\n GraphPathEdgeKind::Visited,\n\n GraphPathEdgeKind::Interior,\n\n ],\n", "file_path": "src/debug/graph_path_debug.rs", "rank": 51, "score": 100973.45579082219 }, { "content": "#[test]\n\nfn intersection_at_0_0() {\n\n assert!(\n\n line_intersects_line(\n\n &(Coord2(-1.0, 0.0), Coord2(1.0, 0.0)),\n\n &(Coord2(0.0, 1.0), Coord2(0.0, -1.0))\n\n )\n\n .unwrap()\n\n .distance_to(&Coord2(0.0, 0.0))\n\n < 0.01\n\n );\n\n}\n\n\n", "file_path": "tests/line/intersection.rs", "rank": 52, "score": 100078.05453582745 }, { "content": "#[test]\n\nfn subdivide_1() {\n\n // Initial curve\n\n let (w1, w2, w3, w4) = (1.0, 2.0, 3.0, 4.0);\n\n\n\n // Subdivide at 33%, creating two curves\n\n let ((wa1, wa2, wa3, wa4), (_wb1, _wb2, _wb3, _wb4)) = bezier::subdivide4(0.33, w1, w2, w3, w4);\n\n\n\n // Check that the original curve corresponds to the basis function for wa\n\n for x in 0..100 {\n\n let t = (x as f64) / 100.0;\n\n\n\n let original = bezier::basis(t * 0.33, w1, w2, w3, w4);\n\n let subdivision = bezier::basis(t, wa1, wa2, wa3, wa4);\n\n\n\n assert!(approx_equal(original, subdivision));\n\n }\n\n}\n\n\n", "file_path": "tests/bezier/subdivide.rs", "rank": 53, "score": 100078.05453582745 }, { "content": "#[test]\n\nfn no_intersection() {\n\n assert!(\n\n line_intersects_line(\n\n &(Coord2(12.0, 13.0), Coord2(24.0, 30.0)),\n\n &(Coord2(1.0, 1.0), Coord2(0.0, -1.0))\n\n ) == None\n\n );\n\n}\n\n\n", "file_path": "tests/line/intersection.rs", "rank": 54, "score": 100078.05453582745 }, { "content": "#[test]\n\nfn subdivide_2() {\n\n // Initial curve\n\n let (w1, w2, w3, w4) = (1.0, 2.0, 3.0, 4.0);\n\n\n\n // Subdivide at 33%, creating two curves\n\n let ((_wa1, _wa2, _wa3, _wa4), (wb1, wb2, wb3, wb4)) = bezier::subdivide4(0.33, w1, w2, w3, w4);\n\n\n\n // Check that the original curve corresponds to the basis function for wb\n\n for x in 0..100 {\n\n let t = (x as f64) / 100.0;\n\n\n\n let original = bezier::basis(0.33 + (t * (1.0 - 0.33)), w1, w2, w3, w4);\n\n let subdivision = bezier::basis(t, wb1, wb2, wb3, wb4);\n\n\n\n assert!(approx_equal(original, subdivision));\n\n }\n\n}\n", "file_path": "tests/bezier/subdivide.rs", "rank": 55, "score": 100078.05453582745 }, { "content": "#[test]\n\nfn even_walk_6() {\n\n let c = Curve::from_points(\n\n Coord2(771.375, 195.0959930419922),\n\n (\n\n Coord2(771.375, 195.0959930419922),\n\n Coord2(629.2169799804688, 161.80499267578125),\n\n ),\n\n Coord2(622.0430297851563, 160.3459930419922),\n\n );\n\n let sections = walk_curve_evenly(&c, 2.0, 0.5).collect::<Vec<_>>();\n\n let actual_length = curve_length(&c, 0.1);\n\n\n\n let mut total_length = 0.0;\n\n let mut last_t = 0.0;\n\n for section in sections.iter() {\n\n println!(\n\n \"{:?} {:?}\",\n\n chord_length(section) - 2.0,\n\n section.original_curve_t_values()\n\n );\n", "file_path": "tests/bezier/walk.rs", "rank": 56, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn detect_loop_1() {\n\n let curve = Curve::from_points(\n\n Coord2(110.0, 150.0),\n\n (Coord2(287.0, 227.0), Coord2(70.0, 205.0)),\n\n Coord2(205.0, 159.0),\n\n );\n\n assert!(curve.characteristics() == bezier::CurveCategory::Loop);\n\n}\n\n\n", "file_path": "tests/bezier/characteristics.rs", "rank": 57, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn measure_length_4() {\n\n let c = Curve::from_points(\n\n Coord2(170.83203, 534.28906),\n\n (Coord2(35.15625, 502.65625), Coord2(0.52734375, 478.67188)),\n\n Coord2(262.95313, 533.2656),\n\n );\n\n let by_subdivision = subdivide_length(&c);\n\n let by_measuring = curve_length(&c, 0.5);\n\n\n\n assert!((by_measuring - by_subdivision).abs() < 1.0);\n\n}\n", "file_path": "tests/bezier/length.rs", "rank": 58, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn simple_offset_2() {\n\n let c = Curve::from_points(\n\n Coord2(110.0, 110.0),\n\n (Coord2(110.0, 300.0), Coord2(500.0, 300.0)),\n\n Coord2(500.0, 110.0),\n\n );\n\n let offset = offset(&c, 10.0, 10.0);\n\n let error = max_error(&c, &offset, 10.0, 10.0);\n\n\n\n assert!(error <= 2.0);\n\n}\n\n\n", "file_path": "tests/bezier/offset.rs", "rank": 59, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn resize_offset_3() {\n\n let c = Curve::from_points(\n\n Coord2(516.170654296875, 893.27001953125),\n\n (\n\n Coord2(445.1522921545783, 856.2028149461783),\n\n Coord2(447.7831664737134, 878.3276285260063),\n\n ),\n\n Coord2(450.51018453430754, 901.260980294519),\n\n );\n\n let offset = offset(&c, 10.0, 40.0);\n\n let error = max_error(&c, &offset, 10.0, 40.0);\n\n\n\n // The error seems to get so high because we're using the 't' value as a ratio for determining width rather than curve length\n\n // This also results in this offset curve not being particularly smooth\n\n assert!(error <= 15.0);\n\n}\n\n\n", "file_path": "tests/bezier/offset.rs", "rank": 60, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn overlapping_lines_same() {\n\n let curve1 = Curve::from_points(\n\n Coord2(10.0, 100.0),\n\n (Coord2(30.0, 100.0), Coord2(200.0, 100.0)),\n\n Coord2(220.0, 100.0),\n\n );\n\n let section = Curve::from_points(\n\n Coord2(10.0, 100.0),\n\n (Coord2(30.0, 100.0), Coord2(200.0, 100.0)),\n\n Coord2(220.0, 100.0),\n\n );\n\n\n\n let overlaps = overlapping_region(&curve1, &section).unwrap();\n\n\n\n assert!(((overlaps.0).0 - 0.0).abs() < 0.001);\n\n assert!(((overlaps.0).1 - 1.0).abs() < 0.001);\n\n}\n\n\n", "file_path": "tests/bezier/overlaps.rs", "rank": 61, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn solve_t_for_pos() {\n\n let curve1 = Curve::from_points(\n\n Coord2(10.0, 100.0),\n\n (Coord2(90.0, 30.0), Coord2(40.0, 140.0)),\n\n Coord2(220.0, 220.0),\n\n );\n\n\n\n let point_at_one_third = curve1.point_at_pos(0.3333);\n\n let solved = curve1.t_for_point(&point_at_one_third);\n\n\n\n assert!(solved.is_some());\n\n assert!((solved.unwrap() - 0.3333).abs() < 0.001);\n\n}\n\n\n", "file_path": "tests/bezier/solve.rs", "rank": 62, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn intersection_at_other_point() {\n\n assert!(\n\n line_intersects_line(\n\n &(Coord2(10.0, 20.0), Coord2(50.0, 60.0)),\n\n &(Coord2(10.0, 45.0), Coord2(50.0, 35.0))\n\n )\n\n .unwrap()\n\n .distance_to(&Coord2(30.0, 40.0))\n\n < 0.01\n\n );\n\n}\n\n\n", "file_path": "tests/line/intersection.rs", "rank": 63, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn detect_loop_2() {\n\n let curve = Curve::from_points(\n\n Coord2(549.2899780273438, 889.4202270507813),\n\n (\n\n Coord2(553.4288330078125, 893.8638305664063),\n\n Coord2(542.5203247070313, 889.04931640625),\n\n ),\n\n Coord2(548.051025390625, 891.1853637695313),\n\n );\n\n assert!(characterize_curve(&curve) == bezier::CurveCategory::Loop);\n\n}\n\n\n", "file_path": "tests/bezier/characteristics.rs", "rank": 64, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn even_walk_5() {\n\n let c = Curve::from_points(\n\n Coord2(128.51366414207797, 100.43540868606826),\n\n (\n\n Coord2(128.8517120419268, 100.53996562501626),\n\n Coord2(131.79687993559304, 99.36123524249854),\n\n ),\n\n Coord2(131.8239019605053, 99.36980615298116),\n\n );\n\n let sections = walk_curve_evenly(&c, 1.0, 0.1).collect::<Vec<_>>();\n\n let actual_length = curve_length(&c, 0.1);\n\n\n\n let mut total_length = 0.0;\n\n let mut last_t = 0.0;\n\n for section in sections.iter() {\n\n println!(\n\n \"{:?} {:?}\",\n\n chord_length(section) - 1.0,\n\n section.original_curve_t_values()\n\n );\n", "file_path": "tests/bezier/walk.rs", "rank": 65, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn measure_length_1() {\n\n let c = Curve::from_points(\n\n Coord2(412.0, 500.0),\n\n (Coord2(412.0, 500.0), Coord2(163.0, 504.0)),\n\n Coord2(308.0, 665.0),\n\n );\n\n let by_subdivision = subdivide_length(&c);\n\n let by_measuring = curve_length(&c, 0.5);\n\n\n\n assert!((by_measuring - by_subdivision).abs() < 1.0);\n\n}\n\n\n", "file_path": "tests/bezier/length.rs", "rank": 66, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn solve_t_for_end() {\n\n let curve1 = Curve::from_points(\n\n Coord2(10.0, 100.0),\n\n (Coord2(90.0, 30.0), Coord2(40.0, 140.0)),\n\n Coord2(220.0, 220.0),\n\n );\n\n\n\n let solved = curve1.t_for_point(&Coord2(220.0, 220.0));\n\n\n\n assert!(solved.is_some());\n\n assert!((solved.unwrap() - 1.0).abs() < 0.001);\n\n}\n\n\n", "file_path": "tests/bezier/solve.rs", "rank": 67, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn even_walk_4() {\n\n let c = Curve::from_points(\n\n Coord2(222.37538991853827, 99.16540392815092),\n\n (\n\n Coord2(224.47523575883392, 100.31557953334229),\n\n Coord2(223.19303980237945, 101.8075327562316),\n\n ),\n\n Coord2(225.42363518033414, 99.716688142193),\n\n );\n\n let sections = walk_curve_evenly(&c, 1.0, 0.1).collect::<Vec<_>>();\n\n let actual_length = curve_length(&c, 0.1);\n\n\n\n let mut total_length = 0.0;\n\n let mut last_t = 0.0;\n\n for section in sections.iter() {\n\n let (_, t_max) = section.original_curve_t_values();\n\n assert!(t_max > last_t);\n\n last_t = t_max;\n\n\n\n assert!(\n", "file_path": "tests/bezier/walk.rs", "rank": 68, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn measure_length_2() {\n\n let c = Curve::from_points(\n\n Coord2(987.7637, 993.9645),\n\n (Coord2(991.1699, 994.0231), Coord2(1043.5605, 853.44885)),\n\n Coord2(1064.9473, 994.277),\n\n );\n\n let by_subdivision = subdivide_length(&c);\n\n let by_measuring = curve_length(&c, 0.5);\n\n\n\n assert!((by_measuring - by_subdivision).abs() < 1.0);\n\n}\n\n\n", "file_path": "tests/bezier/length.rs", "rank": 69, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn simple_offset_5() {\n\n // This curve has a point approaching a cusp, so it produces 'strange' values\n\n\n\n // We bulge out slightly around the cusp so there's a large error\n\n let c = Curve::from_points(\n\n Coord2(170.83203, 534.28906),\n\n (Coord2(140.99219, 492.1289), Coord2(0.52734375, 478.67188)),\n\n Coord2(262.95313, 533.2656),\n\n );\n\n let offset_1 = offset(&c, 10.0, 10.0);\n\n let error_1 = max_error(&c, &offset_1, 10.0, 10.0);\n\n assert!(error_1 <= 12.0);\n\n\n\n // Offsetting too much 'inside' the curve starts to produce chaotic behaviour around the cusp with this algorithm\n\n let offset_2 = offset(&c, -2.0, -2.0);\n\n let error_2 = max_error(&c, &offset_2, 2.0, 2.0);\n\n assert!(error_2 <= 4.0);\n\n}\n\n\n", "file_path": "tests/bezier/offset.rs", "rank": 71, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn varying_walk_1() {\n\n let c = Curve::from_points(\n\n Coord2(412.0, 500.0),\n\n (Coord2(412.0, 500.0), Coord2(163.0, 504.0)),\n\n Coord2(308.0, 665.0),\n\n );\n\n let sections = walk_curve_evenly(&c, 1.0, 0.1)\n\n .vary_by(vec![1.0, 2.0, 3.0].into_iter().cycle())\n\n .collect::<Vec<_>>();\n\n let actual_length = curve_length(&c, 0.1);\n\n\n\n let mut total_length = 0.0;\n\n let mut last_t = 0.0;\n\n let mut expected_length = vec![1.0, 2.0, 3.0].into_iter().cycle();\n\n for section in sections.iter().take(sections.len() - 1) {\n\n let (_, t_max) = section.original_curve_t_values();\n\n assert!(t_max > last_t);\n\n last_t = t_max;\n\n\n\n let expected_length = expected_length.next().unwrap();\n\n assert!((chord_length(section) - expected_length).abs() <= 0.1);\n\n total_length += chord_length(section);\n\n }\n\n\n\n assert!(sections[sections.len() - 1].original_curve_t_values().1 == 1.0);\n\n\n\n println!(\"{:?}\", (total_length - actual_length).abs());\n\n assert!((total_length - actual_length).abs() < 4.0);\n\n}\n", "file_path": "tests/bezier/walk.rs", "rank": 72, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn even_walk_1() {\n\n let c = Curve::from_points(\n\n Coord2(412.0, 500.0),\n\n (Coord2(412.0, 500.0), Coord2(163.0, 504.0)),\n\n Coord2(308.0, 665.0),\n\n );\n\n let sections = walk_curve_evenly(&c, 1.0, 0.1).collect::<Vec<_>>();\n\n let actual_length = curve_length(&c, 0.1);\n\n\n\n let mut total_length = 0.0;\n\n let mut last_t = 0.0;\n\n for section in sections.iter() {\n\n let (_, t_max) = section.original_curve_t_values();\n\n assert!(t_max > last_t);\n\n last_t = t_max;\n\n\n\n assert!(\n\n (chord_length(section) - 1.0).abs() <= 0.1\n\n || (t_max >= 1.0 && chord_length(section) - 1.0 <= 0.0)\n\n );\n\n total_length += chord_length(section);\n\n }\n\n\n\n assert!(sections[sections.len() - 1].original_curve_t_values().1 == 1.0);\n\n\n\n println!(\"{:?}\", (total_length - actual_length).abs());\n\n assert!((total_length - actual_length).abs() < 4.0);\n\n}\n\n\n", "file_path": "tests/bezier/walk.rs", "rank": 74, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn measure_length_3() {\n\n let c = Curve::from_points(\n\n Coord2(170.83203, 534.28906),\n\n (Coord2(140.99219, 492.1289), Coord2(0.52734375, 478.67188)),\n\n Coord2(262.95313, 533.2656),\n\n );\n\n let by_subdivision = subdivide_length(&c);\n\n let by_measuring = curve_length(&c, 0.5);\n\n\n\n assert!((by_measuring - by_subdivision).abs() < 1.0);\n\n}\n\n\n", "file_path": "tests/bezier/length.rs", "rank": 75, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn even_walk_2() {\n\n let c = Curve::from_points(\n\n Coord2(170.83203, 534.28906),\n\n (Coord2(140.99219, 492.1289), Coord2(0.52734375, 478.67188)),\n\n Coord2(262.95313, 533.2656),\n\n );\n\n let sections = walk_curve_evenly(&c, 1.0, 0.1).collect::<Vec<_>>();\n\n let actual_length = curve_length(&c, 0.1);\n\n\n\n let mut total_length = 0.0;\n\n let mut last_t = 0.0;\n\n for section in sections.iter() {\n\n let (_, t_max) = section.original_curve_t_values();\n\n assert!(t_max > last_t);\n\n last_t = t_max;\n\n\n\n assert!(\n\n (chord_length(section) - 1.0).abs() <= 0.1\n\n || (t_max >= 1.0 && chord_length(section) - 1.0 <= 0.0)\n\n );\n\n total_length += chord_length(section);\n\n }\n\n\n\n assert!(sections[sections.len() - 1].original_curve_t_values().1 == 1.0);\n\n\n\n println!(\"{:?}\", (total_length - actual_length).abs());\n\n assert!((total_length - actual_length).abs() < 4.0);\n\n}\n\n\n", "file_path": "tests/bezier/walk.rs", "rank": 76, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn even_walk_3() {\n\n let c = Curve::from_points(\n\n Coord2(987.7637, 993.9645),\n\n (Coord2(991.1699, 994.0231), Coord2(1043.5605, 853.44885)),\n\n Coord2(1064.9473, 994.277),\n\n );\n\n let sections = walk_curve_evenly(&c, 1.0, 0.1).collect::<Vec<_>>();\n\n let actual_length = curve_length(&c, 0.1);\n\n\n\n let mut total_length = 0.0;\n\n let mut last_t = 0.0;\n\n for section in sections.iter() {\n\n let (_, t_max) = section.original_curve_t_values();\n\n assert!(t_max > last_t);\n\n last_t = t_max;\n\n\n\n assert!(\n\n (chord_length(section) - 1.0).abs() <= 0.1\n\n || (t_max >= 1.0 && chord_length(section) - 1.0 <= 0.0)\n\n );\n\n total_length += chord_length(section);\n\n }\n\n\n\n assert!(sections[sections.len() - 1].original_curve_t_values().1 == 1.0);\n\n\n\n println!(\"{:?}\", (total_length - actual_length).abs());\n\n assert!((total_length - actual_length).abs() < 4.0);\n\n}\n\n\n", "file_path": "tests/bezier/walk.rs", "rank": 77, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn simple_offset_4() {\n\n // This curve seems to produce a huge spike\n\n let c = Curve::from_points(\n\n Coord2(987.7637, 993.9645),\n\n (Coord2(991.1699, 994.0231), Coord2(1043.5605, 853.44885)),\n\n Coord2(1064.9473, 994.277),\n\n );\n\n let offset = offset(&c, 10.0, 10.0);\n\n let error = max_error(&c, &offset, 10.0, 10.0);\n\n\n\n assert!(error <= 10.0);\n\n}\n\n\n", "file_path": "tests/bezier/offset.rs", "rank": 78, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn move_offset_1() {\n\n let c = Curve::from_points(\n\n Coord2(163.0, 579.0),\n\n (Coord2(163.0, 579.0), Coord2(405.0, 684.0)),\n\n Coord2(405.0, 684.0),\n\n );\n\n let offset = offset(&c, 10.0, 10.0);\n\n let error = max_error(&c, &offset, 10.0, 10.0);\n\n\n\n assert!(offset.len() == 1);\n\n\n\n let w1 = offset[0].start_point();\n\n let (w2, w3) = offset[0].control_points();\n\n let w4 = offset[0].end_point();\n\n\n\n assert!((w2, w3).distance_to(&w1) < 0.01);\n\n assert!((w2, w3).distance_to(&w4) < 0.01);\n\n assert!(error <= 1.0);\n\n}\n\n\n", "file_path": "tests/bezier/offset.rs", "rank": 79, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn resizing_offset_1() {\n\n let c = Curve::from_points(\n\n Coord2(412.0, 500.0),\n\n (Coord2(163.0, 589.0), Coord2(163.0, 504.0)),\n\n Coord2(308.0, 665.0),\n\n );\n\n let offset = offset(&c, 10.0, 40.0);\n\n let error = max_error(&c, &offset, 10.0, 40.0);\n\n\n\n assert!(error <= 2.0);\n\n}\n\n\n", "file_path": "tests/bezier/offset.rs", "rank": 80, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn simple_offset_6() {\n\n let c = Curve::from_points(\n\n Coord2(170.83203, 534.28906),\n\n (Coord2(35.15625, 502.65625), Coord2(0.52734375, 478.67188)),\n\n Coord2(262.95313, 533.2656),\n\n );\n\n\n\n // This is a very tight curve, so there's no good solution in this direction for large offsets (the scaling algorithm produces a very chaotic curve)\n\n let offset_1 = offset(&c, 2.0, 2.0);\n\n let error_1 = max_error(&c, &offset_1, 2.0, 2.0);\n\n assert!(error_1 <= 2.0);\n\n\n\n let offset_2 = offset(&c, -10.0, -10.0);\n\n let error_2 = max_error(&c, &offset_2, 10.0, 10.0);\n\n\n\n assert!(error_2 <= 1.0);\n\n}\n\n\n", "file_path": "tests/bezier/offset.rs", "rank": 81, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn simple_offset_1() {\n\n let c = Curve::from_points(\n\n Coord2(412.0, 500.0),\n\n (Coord2(163.0, 589.0), Coord2(163.0, 504.0)),\n\n Coord2(308.0, 665.0),\n\n );\n\n let offset = offset(&c, 10.0, 10.0);\n\n let error = max_error(&c, &offset, 10.0, 10.0);\n\n\n\n assert!(error <= 2.0);\n\n}\n\n\n", "file_path": "tests/bezier/offset.rs", "rank": 82, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn uneven_walk_1() {\n\n let c = Curve::from_points(\n\n Coord2(412.0, 500.0),\n\n (Coord2(412.0, 500.0), Coord2(163.0, 504.0)),\n\n Coord2(308.0, 665.0),\n\n );\n\n let sections = walk_curve_unevenly(&c, 10).collect::<Vec<_>>();\n\n\n\n assert!(sections.len() == 10);\n\n assert!(sections[0].original_curve_t_values() == (0.0, 0.1));\n\n\n\n for section_num in 0..10 {\n\n let expected_t_min = (section_num as f64) / 10.0;\n\n let expected_t_max = (section_num as f64) / 10.0 + 0.1;\n\n\n\n let (actual_t_min, actual_t_max) = sections[section_num].original_curve_t_values();\n\n\n\n assert!((actual_t_min - expected_t_min).abs() < 0.0001);\n\n assert!((actual_t_max - expected_t_max).abs() < 0.0001);\n\n }\n\n}\n\n\n", "file_path": "tests/bezier/walk.rs", "rank": 83, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn section_of_section() {\n\n let original_curve = Curve::from_points(\n\n Coord2(2.0, 3.0),\n\n (Coord2(4.0, 5.0), Coord2(5.0, 0.0)),\n\n Coord2(6.0, 2.0),\n\n );\n\n let mut mid_section = original_curve.section(0.25, 0.75);\n\n mid_section = mid_section.subsection(0.25, 0.75);\n\n\n\n for t in 0..=10 {\n\n let t = (t as f64) / 10.0;\n\n let t2 = t * 0.25 + 0.375;\n\n\n\n let p1 = mid_section.point_at_pos(t);\n\n let p2 = original_curve.point_at_pos(t2);\n\n\n\n assert!(p1.distance_to(&p2) < 0.0001);\n\n }\n\n}\n\n\n", "file_path": "tests/bezier/section.rs", "rank": 84, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn solve_t_for_start() {\n\n let curve1 = Curve::from_points(\n\n Coord2(10.0, 100.0),\n\n (Coord2(90.0, 30.0), Coord2(40.0, 140.0)),\n\n Coord2(220.0, 220.0),\n\n );\n\n\n\n let solved = curve1.t_for_point(&Coord2(10.0, 100.0));\n\n\n\n assert!(solved.is_some());\n\n assert!((solved.unwrap() - 0.0).abs() < 0.001);\n\n}\n\n\n", "file_path": "tests/bezier/solve.rs", "rank": 85, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn resizing_offset_2() {\n\n let c = Curve::from_points(\n\n Coord2(110.0, 110.0),\n\n (Coord2(110.0, 300.0), Coord2(500.0, 300.0)),\n\n Coord2(500.0, 110.0),\n\n );\n\n let offset = offset(&c, 10.0, 40.0);\n\n let error = max_error(&c, &offset, 10.0, 40.0);\n\n\n\n assert!(error <= 6.0);\n\n}\n\n\n", "file_path": "tests/bezier/offset.rs", "rank": 86, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn simple_offset_3() {\n\n // This curve doesn't produce a very satisfying result, so it's interesting it has a low error value\n\n let c = Curve::from_points(\n\n Coord2(516.170654296875, 893.27001953125),\n\n (\n\n Coord2(445.1522921545783, 856.2028149461783),\n\n Coord2(447.7831664737134, 878.3276285260063),\n\n ),\n\n Coord2(450.51018453430754, 901.260980294519),\n\n );\n\n let offset = offset(&c, 10.0, 10.0);\n\n let error = max_error(&c, &offset, 10.0, 10.0);\n\n\n\n assert!(error <= 2.0);\n\n}\n\n\n", "file_path": "tests/bezier/offset.rs", "rank": 87, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn normal_for_point() {\n\n let line = bezier::Curve::from_points(\n\n Coord2(0.0, 0.0),\n\n (Coord2(0.0, 0.0), Coord2(0.0, 0.0)),\n\n Coord2(0.0, 0.0),\n\n );\n\n let normal = line.normal_at_pos(0.5);\n\n\n\n // Normal should be the (0,0) vector (points don't have normals)\n\n assert!(normal.x().abs() < 0.0001);\n\n assert!(normal.y() < 0.0001);\n\n}\n\n\n", "file_path": "tests/bezier/normal.rs", "rank": 88, "score": 97146.33185312434 }, { "content": "#[test]\n\nfn overlaps_with_known_curve_3() {\n\n // These curves should overlap\n\n let curve1 = Curve::from_points(\n\n Coord2(510.6888427734375, 684.9293212890625),\n\n (\n\n Coord2(511.68206787109375, 683.7874145507813),\n\n Coord2(512.7827758789063, 682.6954345703125),\n\n ),\n\n Coord2(513.9757080078125, 681.668212890625),\n\n );\n\n let curve2 = Curve::from_points(\n\n Coord2(510.6888427734375, 684.9293212890625),\n\n (\n\n Coord2(511.66473388671875, 683.8077392578125),\n\n Coord2(512.7447509765625, 682.73388671875),\n\n ),\n\n Coord2(513.9143676757813, 681.7202758789063),\n\n );\n\n\n\n assert!(overlapping_region(&curve1, &curve2).is_some());\n\n assert!(overlapping_region(&curve2, &curve1).is_some());\n\n}\n", "file_path": "tests/bezier/overlaps.rs", "rank": 89, "score": 94424.2679313882 }, { "content": "#[test]\n\nfn simple_overlapping_curves() {\n\n let curve1 = Curve::from_points(\n\n Coord2(10.0, 100.0),\n\n (Coord2(90.0, 30.0), Coord2(40.0, 140.0)),\n\n Coord2(220.0, 220.0),\n\n );\n\n let section = curve1.section(0.3333, 0.6666);\n\n\n\n let overlaps = overlapping_region(&curve1, &section).unwrap();\n\n\n\n assert!(((overlaps.0).0 - 0.3333).abs() < 0.001);\n\n assert!(((overlaps.0).1 - 0.6666).abs() < 0.001);\n\n}\n\n\n", "file_path": "tests/bezier/overlaps.rs", "rank": 90, "score": 94424.2679313882 }, { "content": "#[test]\n\nfn simple_overlapping_curves_same() {\n\n let curve1 = Curve::from_points(\n\n Coord2(10.0, 100.0),\n\n (Coord2(90.0, 30.0), Coord2(40.0, 140.0)),\n\n Coord2(220.0, 220.0),\n\n );\n\n let section = Curve::from_points(\n\n Coord2(10.0, 100.0),\n\n (Coord2(90.0, 30.0), Coord2(40.0, 140.0)),\n\n Coord2(220.0, 220.0),\n\n );\n\n\n\n let overlaps = overlapping_region(&curve1, &section).unwrap();\n\n\n\n assert!(((overlaps.0).0 - 0.0).abs() < 0.001);\n\n assert!(((overlaps.0).1 - 1.0).abs() < 0.001);\n\n}\n\n\n", "file_path": "tests/bezier/overlaps.rs", "rank": 91, "score": 94424.2679313882 }, { "content": "#[test]\n\nfn measure_point_length() {\n\n let c = Curve::from_points(\n\n Coord2(412.0, 500.0),\n\n (Coord2(412.0, 500.0), Coord2(412.0, 500.0)),\n\n Coord2(412.0, 500.0),\n\n );\n\n let by_subdivision = subdivide_length(&c);\n\n let by_measuring = curve_length(&c, 0.5);\n\n\n\n assert!((by_measuring - by_subdivision).abs() < 1.0);\n\n assert!(by_measuring.abs() < 0.1);\n\n}\n\n\n", "file_path": "tests/bezier/length.rs", "rank": 92, "score": 94424.2679313882 }, { "content": "#[test]\n\nfn overlaps_with_known_curve_1() {\n\n // These curves should overlap\n\n let curve1 = Curve::from_points(\n\n Coord2(346.69864, 710.2048),\n\n (Coord2(350.41446, 706.8076), Coord2(353.61026, 702.4266)),\n\n Coord2(356.28525, 698.20306),\n\n );\n\n let curve2 = Curve::from_points(\n\n Coord2(350.22574, 706.551),\n\n (Coord2(354.72943, 701.2933), Coord2(358.0882, 695.26)),\n\n Coord2(361.0284, 690.2511),\n\n );\n\n\n\n // They currently don't\n\n assert!(\n\n curve1.t_for_point(&curve2.start_point()).is_some()\n\n || curve2.t_for_point(&curve1.start_point()).is_some()\n\n );\n\n assert!(\n\n curve1.t_for_point(&curve2.end_point()).is_some()\n\n || curve2.t_for_point(&curve1.end_point()).is_some()\n\n );\n\n\n\n assert!(overlapping_region(&curve1, &curve2).is_none());\n\n}\n\n\n", "file_path": "tests/bezier/overlaps.rs", "rank": 93, "score": 94424.2679313882 }, { "content": "#[test]\n\nfn overlaps_with_known_curve_2() {\n\n // These curves should overlap\n\n let curve1 = Curve::from_points(\n\n Coord2(305.86907958984375, 882.2529296875),\n\n (\n\n Coord2(305.41015625, 880.7345581054688),\n\n Coord2(303.0707092285156, 879.744140625),\n\n ),\n\n Coord2(298.0640869140625, 875.537353515625),\n\n );\n\n let curve2 = Curve::from_points(\n\n Coord2(302.7962341308594, 879.1681518554688),\n\n (\n\n Coord2(299.5769348144531, 876.8582763671875),\n\n Coord2(297.1976318359375, 874.7939453125),\n\n ),\n\n Coord2(301.4282531738281, 878.26220703125),\n\n );\n\n\n\n // They currently don't\n", "file_path": "tests/bezier/overlaps.rs", "rank": 94, "score": 94424.2679313882 }, { "content": "#[test]\n\nfn read_curve_points() {\n\n let curve = bezier::Curve::from_points(\n\n Coord2(1.0, 1.0),\n\n (Coord2(3.0, 3.0), Coord2(4.0, 4.0)),\n\n Coord2(2.0, 2.0),\n\n );\n\n\n\n for x in 0..100 {\n\n let t = (x as f64) / 100.0;\n\n\n\n let point = curve.point_at_pos(t);\n\n let another_point = bezier::de_casteljau4(\n\n t,\n\n Coord2(1.0, 1.0),\n\n Coord2(3.0, 3.0),\n\n Coord2(4.0, 4.0),\n\n Coord2(2.0, 2.0),\n\n );\n\n\n\n assert!(point.distance_to(&another_point) < 0.001);\n\n }\n\n}\n", "file_path": "tests/bezier/mod.rs", "rank": 95, "score": 94424.2679313882 }, { "content": "#[test]\n\nfn simple_overlapping_lines() {\n\n let curve1 = Curve::from_points(\n\n Coord2(10.0, 100.0),\n\n (Coord2(30.0, 100.0), Coord2(200.0, 100.0)),\n\n Coord2(220.0, 100.0),\n\n );\n\n let section = curve1.section(0.3333, 0.6666);\n\n\n\n let overlaps = overlapping_region(&curve1, &section).unwrap();\n\n\n\n assert!(((overlaps.0).0 - 0.3333).abs() < 0.001);\n\n assert!(((overlaps.0).1 - 0.6666).abs() < 0.001);\n\n}\n\n\n", "file_path": "tests/bezier/overlaps.rs", "rank": 96, "score": 94424.2679313882 }, { "content": "#[test]\n\nfn basis_at_t1_is_w4() {\n\n assert!(bezier::basis(1.0, 2.0, 3.0, 4.0, 5.0) == 5.0);\n\n}\n\n\n", "file_path": "tests/bezier/basis.rs", "rank": 97, "score": 94424.2679313882 }, { "content": "#[test]\n\nfn normal_for_line_is_straight_up() {\n\n let line = bezier::Curve::from_points(\n\n Coord2(0.0, 0.0),\n\n (Coord2(3.0, 0.0), Coord2(7.0, 0.0)),\n\n Coord2(10.0, 0.0),\n\n );\n\n let normal = line.normal_at_pos(0.5);\n\n\n\n // Normal should be a line facing up\n\n assert!(normal.x().abs() < 0.01);\n\n assert!(normal.y() > 0.01);\n\n}\n\n\n", "file_path": "tests/bezier/normal.rs", "rank": 98, "score": 94424.2679313882 }, { "content": "#[test]\n\nfn basis_at_t0_is_w1() {\n\n assert!(bezier::basis(0.0, 2.0, 3.0, 4.0, 5.0) == 2.0);\n\n}\n\n\n", "file_path": "tests/bezier/basis.rs", "rank": 99, "score": 94424.2679313882 } ]
Rust
crates/examples/src/readobj/mod.rs
sunfishcode/object
aaf312e51fc6e4511e19a32c05d4b2ddf248b5b6
use std::io::Write; use std::{fmt, str}; use object::read::archive::ArchiveFile; use object::read::macho::{FatArch, FatHeader}; use object::Endianness; pub fn print(w: &'_ mut dyn Write, e: &'_ mut dyn Write, file: &[u8]) { let mut printer = Printer::new(w, e); print_object(&mut printer, &*file); } struct Printer<'a> { w: &'a mut dyn Write, e: &'a mut dyn Write, indent: usize, } impl<'a> Printer<'a> { fn new(w: &'a mut dyn Write, e: &'a mut dyn Write) -> Self { Self { w, e, indent: 0 } } fn w(&mut self) -> &mut dyn Write { self.w } fn blank(&mut self) { writeln!(self.w).unwrap(); } fn print_indent(&mut self) { if self.indent != 0 { write!(self.w, "{:-1$}", " ", self.indent * 4).unwrap(); } } fn print_string(&mut self, s: &[u8]) { if let Ok(s) = str::from_utf8(s) { write!(self.w, "\"{}\"", s).unwrap(); } else { write!(self.w, "{:X?}", s).unwrap(); } } fn indent<F: FnOnce(&mut Self)>(&mut self, f: F) { self.indent += 1; f(self); self.indent -= 1; } fn group<F: FnOnce(&mut Self)>(&mut self, name: &str, f: F) { self.print_indent(); writeln!(self.w, "{} {{", name).unwrap(); self.indent(f); self.print_indent(); writeln!(self.w, "}}").unwrap(); } fn field_name(&mut self, name: &str) { self.print_indent(); if !name.is_empty() { write!(self.w, "{}: ", name).unwrap(); } } fn field<T: fmt::Display>(&mut self, name: &str, value: T) { self.field_name(name); writeln!(self.w, "{}", value).unwrap(); } fn field_hex<T: fmt::UpperHex>(&mut self, name: &str, value: T) { self.field_name(name); writeln!(self.w, "0x{:X}", value).unwrap(); } fn field_bytes(&mut self, name: &str, value: &[u8]) { self.field_name(name); writeln!(self.w, "{:X?}", value).unwrap(); } fn field_string_option<T: fmt::UpperHex>(&mut self, name: &str, value: T, s: Option<&[u8]>) { if let Some(s) = s { self.field_name(name); self.print_string(s); writeln!(self.w, " (0x{:X})", value).unwrap(); } else { self.field_hex(name, value); } } fn field_string<T: fmt::UpperHex, E: fmt::Display>( &mut self, name: &str, value: T, s: Result<&[u8], E>, ) { let s = s.print_err(self); self.field_string_option(name, value, s); } fn field_inline_string(&mut self, name: &str, s: &[u8]) { self.field_name(name); self.print_string(s); writeln!(self.w).unwrap(); } fn field_enum<T: Eq + fmt::UpperHex>(&mut self, name: &str, value: T, flags: &[Flag<T>]) { for flag in flags { if value == flag.value { self.field_name(name); writeln!(self.w, "{} (0x{:X})", flag.name, value).unwrap(); return; } } self.field_hex(name, value); } fn field_enums<T: Eq + fmt::UpperHex>(&mut self, name: &str, value: T, enums: &[&[Flag<T>]]) { for flags in enums { for flag in *flags { if value == flag.value { self.field_name(name); writeln!(self.w, "{} (0x{:X})", flag.name, value).unwrap(); return; } } } self.field_hex(name, value); } fn flags<T: Into<u64>, U: Copy + Into<u64>>(&mut self, value: T, mask: U, flags: &[Flag<U>]) { let value = value.into(); let mask = mask.into(); self.indent(|p| { if mask != 0 { for flag in flags { if value & mask == flag.value.into() { p.print_indent(); writeln!(p.w, "{} (0x{:X})", flag.name, flag.value.into()).unwrap(); return; } } p.print_indent(); writeln!(p.w, "<unknown> (0x{:X})", value & mask).unwrap(); } else { for flag in flags { if value & flag.value.into() == flag.value.into() { p.print_indent(); writeln!(p.w, "{} (0x{:X})", flag.name, flag.value.into()).unwrap(); } } } }); } } struct Flag<T> { value: T, name: &'static str, } macro_rules! flags { ($($name:ident),+ $(,)?) => ( [ $(Flag { value: $name, name: stringify!($name), }),+ ] ) } fn print_object(p: &mut Printer<'_>, data: &[u8]) { let kind = match object::FileKind::parse(data) { Ok(file) => file, Err(err) => { println!("Failed to parse file: {}", err); return; } }; match kind { object::FileKind::Archive => print_archive(p, data), object::FileKind::Coff => pe::print_coff(p, data), object::FileKind::DyldCache => macho::print_dyld_cache(p, data), object::FileKind::Elf32 => elf::print_elf32(p, data), object::FileKind::Elf64 => elf::print_elf64(p, data), object::FileKind::MachO32 => macho::print_macho32(p, data, 0), object::FileKind::MachO64 => macho::print_macho64(p, data, 0), object::FileKind::MachOFat32 => macho::print_macho_fat32(p, data), object::FileKind::MachOFat64 => macho::print_macho_fat64(p, data), object::FileKind::Pe32 => pe::print_pe32(p, data), object::FileKind::Pe64 => pe::print_pe64(p, data), _ => {} } } fn print_object_at(p: &mut Printer<'_>, data: &[u8], offset: u64) { let kind = match object::FileKind::parse_at(data, offset) { Ok(file) => file, Err(err) => { println!("Failed to parse file: {}", err); return; } }; match kind { object::FileKind::MachO32 => macho::print_macho32(p, data, offset), object::FileKind::MachO64 => macho::print_macho64(p, data, offset), _ => {} } } fn print_archive(p: &mut Printer<'_>, data: &[u8]) { if let Some(archive) = ArchiveFile::parse(data).print_err(p) { p.field("Format", format!("Archive ({:?})", archive.kind())); for member in archive.members() { if let Some(member) = member.print_err(p) { p.blank(); p.field("Member", String::from_utf8_lossy(member.name())); if let Some(data) = member.data(data).print_err(p) { print_object(p, data); } } } } } trait PrintErr<T> { fn print_err(self, p: &mut Printer<'_>) -> Option<T>; } impl<T, E: fmt::Display> PrintErr<T> for Result<T, E> { fn print_err(self, p: &mut Printer<'_>) -> Option<T> { match self { Ok(val) => Some(val), Err(err) => { writeln!(p.e, "Error: {}", err).unwrap(); None } } } } mod elf; mod macho; mod pe;
use std::io::Write; use std::{fmt, str}; use object::read::archive::ArchiveFile; use object::read::macho::{FatArch, FatHeader}; use object::Endianness; pub fn print(w: &'_ mut dyn Write, e: &'_ mut dyn Write, file: &[u8]) { let mut printer = Printer::new(w, e); print_object(&mut printer, &*file); } struct Printer<'a> { w: &'a mut dyn Write, e: &'a mut dyn Write, indent: usize, } impl<'a> Printer<'a> { fn new(w: &'a mut dyn Write, e: &'a mut dyn Write) -> Self { Self { w, e, indent: 0 } } fn w(&mut self) -> &mut dyn Write { self.w } fn blank(&mut self) { writeln!(self.w).unwrap(); } fn print_indent(&mut self) { if self.indent != 0 { write!(self.w, "{:-1$}", " ", self.indent * 4).unwrap(); } } fn print_string(&mut self, s: &[u8]) { if let Ok(s) = str::from_utf8(s) { write!(self.w, "\"{}\"", s).unwrap(); } else { write!(self.w, "{:X?}", s).unwrap(); } } fn indent<F: FnOnce(&mut Self)>(&mut self, f: F) { self.indent += 1; f(self); self.indent -= 1; } fn group<F: FnOnce(&mut Self)>(&mut self, name: &str, f: F) { self.print_indent(); writeln!(self.w, "{} {{", name).unwrap(); self.indent(f); self.print_indent(); writeln!(self.w, "}}").unwrap(); }
fn field<T: fmt::Display>(&mut self, name: &str, value: T) { self.field_name(name); writeln!(self.w, "{}", value).unwrap(); } fn field_hex<T: fmt::UpperHex>(&mut self, name: &str, value: T) { self.field_name(name); writeln!(self.w, "0x{:X}", value).unwrap(); } fn field_bytes(&mut self, name: &str, value: &[u8]) { self.field_name(name); writeln!(self.w, "{:X?}", value).unwrap(); } fn field_string_option<T: fmt::UpperHex>(&mut self, name: &str, value: T, s: Option<&[u8]>) { if let Some(s) = s { self.field_name(name); self.print_string(s); writeln!(self.w, " (0x{:X})", value).unwrap(); } else { self.field_hex(name, value); } } fn field_string<T: fmt::UpperHex, E: fmt::Display>( &mut self, name: &str, value: T, s: Result<&[u8], E>, ) { let s = s.print_err(self); self.field_string_option(name, value, s); } fn field_inline_string(&mut self, name: &str, s: &[u8]) { self.field_name(name); self.print_string(s); writeln!(self.w).unwrap(); } fn field_enum<T: Eq + fmt::UpperHex>(&mut self, name: &str, value: T, flags: &[Flag<T>]) { for flag in flags { if value == flag.value { self.field_name(name); writeln!(self.w, "{} (0x{:X})", flag.name, value).unwrap(); return; } } self.field_hex(name, value); } fn field_enums<T: Eq + fmt::UpperHex>(&mut self, name: &str, value: T, enums: &[&[Flag<T>]]) { for flags in enums { for flag in *flags { if value == flag.value { self.field_name(name); writeln!(self.w, "{} (0x{:X})", flag.name, value).unwrap(); return; } } } self.field_hex(name, value); } fn flags<T: Into<u64>, U: Copy + Into<u64>>(&mut self, value: T, mask: U, flags: &[Flag<U>]) { let value = value.into(); let mask = mask.into(); self.indent(|p| { if mask != 0 { for flag in flags { if value & mask == flag.value.into() { p.print_indent(); writeln!(p.w, "{} (0x{:X})", flag.name, flag.value.into()).unwrap(); return; } } p.print_indent(); writeln!(p.w, "<unknown> (0x{:X})", value & mask).unwrap(); } else { for flag in flags { if value & flag.value.into() == flag.value.into() { p.print_indent(); writeln!(p.w, "{} (0x{:X})", flag.name, flag.value.into()).unwrap(); } } } }); } } struct Flag<T> { value: T, name: &'static str, } macro_rules! flags { ($($name:ident),+ $(,)?) => ( [ $(Flag { value: $name, name: stringify!($name), }),+ ] ) } fn print_object(p: &mut Printer<'_>, data: &[u8]) { let kind = match object::FileKind::parse(data) { Ok(file) => file, Err(err) => { println!("Failed to parse file: {}", err); return; } }; match kind { object::FileKind::Archive => print_archive(p, data), object::FileKind::Coff => pe::print_coff(p, data), object::FileKind::DyldCache => macho::print_dyld_cache(p, data), object::FileKind::Elf32 => elf::print_elf32(p, data), object::FileKind::Elf64 => elf::print_elf64(p, data), object::FileKind::MachO32 => macho::print_macho32(p, data, 0), object::FileKind::MachO64 => macho::print_macho64(p, data, 0), object::FileKind::MachOFat32 => macho::print_macho_fat32(p, data), object::FileKind::MachOFat64 => macho::print_macho_fat64(p, data), object::FileKind::Pe32 => pe::print_pe32(p, data), object::FileKind::Pe64 => pe::print_pe64(p, data), _ => {} } } fn print_object_at(p: &mut Printer<'_>, data: &[u8], offset: u64) { let kind = match object::FileKind::parse_at(data, offset) { Ok(file) => file, Err(err) => { println!("Failed to parse file: {}", err); return; } }; match kind { object::FileKind::MachO32 => macho::print_macho32(p, data, offset), object::FileKind::MachO64 => macho::print_macho64(p, data, offset), _ => {} } } fn print_archive(p: &mut Printer<'_>, data: &[u8]) { if let Some(archive) = ArchiveFile::parse(data).print_err(p) { p.field("Format", format!("Archive ({:?})", archive.kind())); for member in archive.members() { if let Some(member) = member.print_err(p) { p.blank(); p.field("Member", String::from_utf8_lossy(member.name())); if let Some(data) = member.data(data).print_err(p) { print_object(p, data); } } } } } trait PrintErr<T> { fn print_err(self, p: &mut Printer<'_>) -> Option<T>; } impl<T, E: fmt::Display> PrintErr<T> for Result<T, E> { fn print_err(self, p: &mut Printer<'_>) -> Option<T> { match self { Ok(val) => Some(val), Err(err) => { writeln!(p.e, "Error: {}", err).unwrap(); None } } } } mod elf; mod macho; mod pe;
fn field_name(&mut self, name: &str) { self.print_indent(); if !name.is_empty() { write!(self.w, "{}: ", name).unwrap(); } }
function_block-full_function
[ { "content": "fn dump_parsed_object<W: Write, E: Write>(w: &mut W, e: &mut E, file: &object::File) -> Result<()> {\n\n writeln!(\n\n w,\n\n \"Format: {:?} {:?}-endian {}-bit\",\n\n file.format(),\n\n file.endianness(),\n\n if file.is_64() { \"64\" } else { \"32\" }\n\n )?;\n\n writeln!(w, \"Kind: {:?}\", file.kind())?;\n\n writeln!(w, \"Architecture: {:?}\", file.architecture())?;\n\n writeln!(w, \"Flags: {:x?}\", file.flags())?;\n\n writeln!(\n\n w,\n\n \"Relative Address Base: {:x?}\",\n\n file.relative_address_base()\n\n )?;\n\n writeln!(w, \"Entry Address: {:x?}\", file.entry())?;\n\n\n\n match file.mach_uuid() {\n\n Ok(Some(uuid)) => writeln!(w, \"Mach UUID: {:x?}\", uuid)?,\n", "file_path": "crates/examples/src/objdump.rs", "rank": 1, "score": 398684.58445641876 }, { "content": "fn dump_object<W: Write, E: Write>(w: &mut W, e: &mut E, data: &[u8]) -> Result<()> {\n\n match object::File::parse(data) {\n\n Ok(file) => {\n\n dump_parsed_object(w, e, &file)?;\n\n }\n\n Err(err) => {\n\n writeln!(e, \"Failed to parse file: {}\", err)?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/examples/src/objdump.rs", "rank": 2, "score": 390225.5889113902 }, { "content": "pub fn print<W: Write, E: Write>(\n\n w: &mut W,\n\n e: &mut E,\n\n file: &[u8],\n\n member_names: Vec<String>,\n\n) -> Result<()> {\n\n let mut member_names: Vec<_> = member_names.into_iter().map(|name| (name, false)).collect();\n\n\n\n if let Ok(archive) = ArchiveFile::parse(&*file) {\n\n writeln!(w, \"Format: Archive (kind: {:?})\", archive.kind())?;\n\n for member in archive.members() {\n\n match member {\n\n Ok(member) => {\n\n if find_member(&mut member_names, member.name()) {\n\n writeln!(w)?;\n\n writeln!(w, \"{}:\", String::from_utf8_lossy(member.name()))?;\n\n if let Ok(data) = member.data(&*file) {\n\n dump_object(w, e, data)?;\n\n }\n\n }\n", "file_path": "crates/examples/src/objdump.rs", "rank": 3, "score": 334058.58511203434 }, { "content": "// Multi-key quicksort.\n\n//\n\n// Ordering is such that if a string is a suffix of at least one other string,\n\n// then it is placed immediately after one of those strings. That is:\n\n// - comparison starts at the end of the string\n\n// - shorter strings come later\n\n//\n\n// Based on the implementation in LLVM.\n\nfn sort(mut ids: &mut [usize], mut pos: usize, strings: &IndexSet<&[u8]>) {\n\n loop {\n\n if ids.len() <= 1 {\n\n return;\n\n }\n\n\n\n let pivot = byte(ids[0], pos, strings);\n\n let mut lower = 0;\n\n let mut upper = ids.len();\n\n let mut i = 1;\n\n while i < upper {\n\n let b = byte(ids[i], pos, strings);\n\n if b > pivot {\n\n ids.swap(lower, i);\n\n lower += 1;\n\n i += 1;\n\n } else if b < pivot {\n\n upper -= 1;\n\n ids.swap(upper, i);\n\n } else {\n", "file_path": "src/write/string.rs", "rank": 4, "score": 275500.36293229123 }, { "content": "/// Calculate the SysV hash for a symbol name.\n\n///\n\n/// Used for `SHT_HASH`.\n\npub fn hash(name: &[u8]) -> u32 {\n\n let mut hash = 0u32;\n\n for byte in name {\n\n hash = hash.wrapping_mul(16).wrapping_add(u32::from(*byte));\n\n hash ^= (hash >> 24) & 0xf0;\n\n }\n\n hash & 0xfff_ffff\n\n}\n\n\n\n/// Header of `SHT_GNU_HASH` section.\n\n#[derive(Debug, Clone, Copy)]\n\n#[repr(C)]\n\npub struct GnuHashHeader<E: Endian> {\n\n /// The number of hash buckets.\n\n pub bucket_count: U32<E>,\n\n /// The symbol table index of the first symbol in the hash.\n\n pub symbol_base: U32<E>,\n\n /// The number of words in the bloom filter.\n\n ///\n\n /// Must be a non-zero power of 2.\n", "file_path": "src/elf.rs", "rank": 5, "score": 269099.16254672105 }, { "content": "#[inline]\n\npub fn from_bytes_mut<T: Pod>(data: &mut [u8]) -> Result<(&mut T, &mut [u8])> {\n\n let size = mem::size_of::<T>();\n\n if size > data.len() {\n\n return Err(());\n\n }\n\n let (data, tail) = data.split_at_mut(size);\n\n let ptr = data.as_mut_ptr();\n\n if (ptr as usize) % mem::align_of::<T>() != 0 {\n\n return Err(());\n\n }\n\n // Safety:\n\n // The alignment and size are checked by this function.\n\n // The Pod trait ensures the type is valid to cast from bytes.\n\n let val = unsafe { &mut *ptr.cast() };\n\n Ok((val, tail))\n\n}\n\n\n\n/// Cast a byte slice to a slice of a `Pod` type.\n\n///\n\n/// Returns the type slice and the tail of the byte slice.\n", "file_path": "src/pod.rs", "rank": 6, "score": 265547.96726003004 }, { "content": "/// Calculate the GNU hash for a symbol name.\n\n///\n\n/// Used for `SHT_GNU_HASH`.\n\npub fn gnu_hash(name: &[u8]) -> u32 {\n\n let mut hash = 5381u32;\n\n for byte in name {\n\n hash = hash.wrapping_mul(33).wrapping_add(u32::from(*byte));\n\n }\n\n hash\n\n}\n\n\n\n// Motorola 68k specific definitions.\n\n\n\n// m68k values for `Rel*::r_type`.\n\n\n\n/// No reloc\n\npub const R_68K_NONE: u32 = 0;\n\n/// Direct 32 bit\n\npub const R_68K_32: u32 = 1;\n\n/// Direct 16 bit\n\npub const R_68K_16: u32 = 2;\n\n/// Direct 8 bit\n\npub const R_68K_8: u32 = 3;\n", "file_path": "src/elf.rs", "rank": 7, "score": 264330.14775179466 }, { "content": "fn testfile<F>(path: &str, data: &[u8], ext: &str, f: F) -> bool\n\nwhere\n\n F: FnOnce(&mut dyn Write, &mut dyn Write, &[u8]),\n\n{\n\n if glob::glob(&format!(\"crates/examples/{}.{}*\", path, ext))\n\n .unwrap()\n\n .find_map(Result::ok)\n\n .is_none()\n\n {\n\n return false;\n\n }\n\n\n\n // TODO: print diffs for mismatches\n\n let mut fail = false;\n\n let mut out = Vec::new();\n\n let mut err = Vec::new();\n\n f(&mut out, &mut err, data);\n\n\n\n // Check exact match of output.\n\n let out_path = &format!(\"crates/examples/{}.{}\", path, ext);\n", "file_path": "crates/examples/tests/testfiles.rs", "rank": 8, "score": 262951.5459115139 }, { "content": "#[inline]\n\npub fn bytes_of_mut<T: Pod>(val: &mut T) -> &mut [u8] {\n\n let size = mem::size_of::<T>();\n\n // Safety:\n\n // Any alignment is allowed.\n\n // The size is determined in this function.\n\n // The Pod trait ensures the type is valid to cast to bytes.\n\n unsafe { slice::from_raw_parts_mut(slice::from_mut(val).as_mut_ptr().cast(), size) }\n\n}\n\n\n\n/// Cast a slice of a `Pod` type to a byte slice.\n", "file_path": "src/pod.rs", "rank": 11, "score": 255103.8613870678 }, { "content": "#[inline]\n\npub fn bytes_of_slice_mut<T: Pod>(val: &mut [T]) -> &mut [u8] {\n\n let size = val.len().wrapping_mul(mem::size_of::<T>());\n\n // Safety:\n\n // Any alignment is allowed.\n\n // The size is determined in this function.\n\n // The Pod trait ensures the type is valid to cast to bytes.\n\n unsafe { slice::from_raw_parts_mut(val.as_mut_ptr().cast(), size) }\n\n}\n\n\n\nmacro_rules! unsafe_impl_pod {\n\n ($($struct_name:ident),+ $(,)?) => {\n\n $(\n\n unsafe impl Pod for $struct_name { }\n\n )+\n\n }\n\n}\n\n\n\nunsafe_impl_pod!(u8, u16, u32, u64);\n\n\n\n#[cfg(test)]\n", "file_path": "src/pod.rs", "rank": 12, "score": 251646.38411224203 }, { "content": "fn print_file(p: &mut Printer<'_>, header: &ImageFileHeader) {\n\n p.group(\"ImageFileHeader\", |p| {\n\n p.field_enum(\"Machine\", header.machine.get(LE), FLAGS_IMAGE_FILE_MACHINE);\n\n p.field(\"NumberOfSections\", header.number_of_sections.get(LE));\n\n p.field(\"TimeDateStamp\", header.time_date_stamp.get(LE));\n\n p.field_hex(\n\n \"PointerToSymbolTable\",\n\n header.pointer_to_symbol_table.get(LE),\n\n );\n\n p.field(\"NumberOfSymbols\", header.number_of_symbols.get(LE));\n\n p.field_hex(\n\n \"SizeOfOptionalHeader\",\n\n header.size_of_optional_header.get(LE),\n\n );\n\n p.field_hex(\"Characteristics\", header.characteristics.get(LE));\n\n p.flags(header.characteristics.get(LE), 0, FLAGS_IMAGE_FILE);\n\n });\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/pe.rs", "rank": 13, "score": 245526.9507212098 }, { "content": "fn print_elf<Elf: FileHeader<Endian = Endianness>>(p: &mut Printer<'_>, elf: &Elf, data: &[u8]) {\n\n if let Some(endian) = elf.endian().print_err(p) {\n\n print_file_header(p, endian, elf);\n\n if let Some(segments) = elf.program_headers(endian, data).print_err(p) {\n\n print_program_headers(p, endian, data, elf, segments);\n\n }\n\n if let Some(sections) = elf.sections(endian, data).print_err(p) {\n\n print_section_headers(p, endian, data, elf, &sections);\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 14, "score": 244013.42704011634 }, { "content": "fn find_member(member_names: &mut [(String, bool)], name: &[u8]) -> bool {\n\n if member_names.is_empty() {\n\n return true;\n\n }\n\n match member_names.iter().position(|x| x.0.as_bytes() == name) {\n\n Some(i) => {\n\n member_names[i].1 = true;\n\n true\n\n }\n\n None => false,\n\n }\n\n}\n\n\n", "file_path": "crates/examples/src/objdump.rs", "rank": 16, "score": 237875.83884146146 }, { "content": "fn print_pe<Pe: ImageNtHeaders>(p: &mut Printer<'_>, data: &[u8]) {\n\n if let Some(dos_header) = ImageDosHeader::parse(data).print_err(p) {\n\n p.group(\"ImageDosHeader\", |p| {\n\n p.field_hex(\"Magic\", dos_header.e_magic.get(LE));\n\n p.field_hex(\"CountBytesLastPage\", dos_header.e_cblp.get(LE));\n\n p.field_hex(\"CountPages\", dos_header.e_cp.get(LE));\n\n p.field_hex(\"CountRelocations\", dos_header.e_crlc.get(LE));\n\n p.field_hex(\"CountHeaderParagraphs\", dos_header.e_cparhdr.get(LE));\n\n p.field_hex(\"MinAllocParagraphs\", dos_header.e_minalloc.get(LE));\n\n p.field_hex(\"MaxAllocParagraphs\", dos_header.e_maxalloc.get(LE));\n\n p.field_hex(\"StackSegment\", dos_header.e_ss.get(LE));\n\n p.field_hex(\"StackPointer\", dos_header.e_sp.get(LE));\n\n p.field_hex(\"Checksum\", dos_header.e_csum.get(LE));\n\n p.field_hex(\"InstructionPointer\", dos_header.e_ip.get(LE));\n\n p.field_hex(\"CodeSegment\", dos_header.e_cs.get(LE));\n\n p.field_hex(\"AddressOfRelocations\", dos_header.e_lfarlc.get(LE));\n\n p.field_hex(\"OverlayNumber\", dos_header.e_ovno.get(LE));\n\n p.field_hex(\"OemId\", dos_header.e_oemid.get(LE));\n\n p.field_hex(\"OemInfo\", dos_header.e_oeminfo.get(LE));\n\n p.field_hex(\"AddressOfNewHeader\", dos_header.e_lfanew.get(LE));\n", "file_path": "crates/examples/src/readobj/pe.rs", "rank": 17, "score": 235888.34384913818 }, { "content": "fn byte(id: usize, pos: usize, strings: &IndexSet<&[u8]>) -> u8 {\n\n let string = strings.get_index(id).unwrap();\n\n let len = string.len();\n\n if len >= pos {\n\n string[len - pos]\n\n } else {\n\n // We know the strings don't contain null bytes.\n\n 0\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn string_table() {\n\n let mut table = StringTable::default();\n\n let id0 = table.add(b\"\");\n\n let id1 = table.add(b\"foo\");\n", "file_path": "src/write/string.rs", "rank": 18, "score": 220765.7692455441 }, { "content": "#[inline]\n\npub fn slice_from_bytes<T: Pod>(data: &[u8], count: usize) -> Result<(&[T], &[u8])> {\n\n let size = count.checked_mul(mem::size_of::<T>()).ok_or(())?;\n\n let tail = data.get(size..).ok_or(())?;\n\n let ptr = data.as_ptr();\n\n if (ptr as usize) % mem::align_of::<T>() != 0 {\n\n return Err(());\n\n }\n\n // Safety:\n\n // The alignment and size are checked by this function.\n\n // The Pod trait ensures the type is valid to cast from bytes.\n\n let slice = unsafe { slice::from_raw_parts(ptr.cast(), count) };\n\n Ok((slice, tail))\n\n}\n\n\n\n/// Cast a mutable byte slice to a slice of a `Pod` type.\n\n///\n\n/// Returns the type slice and the tail of the byte slice.\n", "file_path": "src/pod.rs", "rank": 19, "score": 217793.49810803216 }, { "content": "fn print_file_header<Elf: FileHeader>(p: &mut Printer<'_>, endian: Elf::Endian, elf: &Elf) {\n\n p.group(\"FileHeader\", |p| {\n\n p.group(\"Ident\", |p| print_ident(p, elf.e_ident()));\n\n p.field_enum(\"Type\", elf.e_type(endian), &FLAGS_ET);\n\n p.field_enum(\"Machine\", elf.e_machine(endian), &FLAGS_EM);\n\n let version = elf.e_version(endian);\n\n if version < 256 {\n\n p.field_enum(\"Version\", version as u8, &FLAGS_EV);\n\n } else {\n\n p.field_hex(\"Version\", version);\n\n }\n\n p.field_enum(\"Type\", elf.e_type(endian), &FLAGS_ET);\n\n p.field_hex(\"Entry\", elf.e_entry(endian).into());\n\n p.field_hex(\"ProgramHeaderOffset\", elf.e_phoff(endian).into());\n\n p.field_hex(\"SectionHeaderOffset\", elf.e_shoff(endian).into());\n\n let flags = elf.e_flags(endian);\n\n p.field_hex(\"Flags\", flags);\n\n match elf.e_machine(endian) {\n\n EM_SPARC => p.flags(flags, 0, &FLAGS_EF_SPARC),\n\n EM_SPARCV9 => p.flags(flags, 0, &FLAGS_EF_SPARCV9),\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 20, "score": 210746.39887378365 }, { "content": "fn print_ident(p: &mut Printer<'_>, ident: &Ident) {\n\n p.field(\"Magic\", format!(\"{:X?}\", ident.magic));\n\n p.field_enum(\"Class\", ident.class, &FLAGS_EI_CLASS);\n\n p.field_enum(\"Data\", ident.data, &FLAGS_EI_DATA);\n\n p.field_enum(\"Version\", ident.version, &FLAGS_EV);\n\n p.field_enum(\"OsAbi\", ident.os_abi, &FLAGS_EI_OSABI);\n\n p.field_hex(\"AbiVersion\", ident.abi_version);\n\n p.field(\"Unused\", format!(\"{:X?}\", ident.padding));\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 21, "score": 200940.07519823324 }, { "content": "struct MachO64<E> {\n\n endian: E,\n\n}\n\n\n\nimpl<E: Endian> MachO for MachO64<E> {\n\n fn mach_header_size(&self) -> usize {\n\n mem::size_of::<macho::MachHeader64<E>>()\n\n }\n\n\n\n fn segment_command_size(&self) -> usize {\n\n mem::size_of::<macho::SegmentCommand64<E>>()\n\n }\n\n\n\n fn section_header_size(&self) -> usize {\n\n mem::size_of::<macho::Section64<E>>()\n\n }\n\n\n\n fn nlist_size(&self) -> usize {\n\n mem::size_of::<macho::Nlist64<E>>()\n\n }\n", "file_path": "src/write/macho.rs", "rank": 22, "score": 195975.23401006678 }, { "content": "struct MachO32<E> {\n\n endian: E,\n\n}\n\n\n\nimpl<E: Endian> MachO for MachO32<E> {\n\n fn mach_header_size(&self) -> usize {\n\n mem::size_of::<macho::MachHeader32<E>>()\n\n }\n\n\n\n fn segment_command_size(&self) -> usize {\n\n mem::size_of::<macho::SegmentCommand32<E>>()\n\n }\n\n\n\n fn section_header_size(&self) -> usize {\n\n mem::size_of::<macho::Section32<E>>()\n\n }\n\n\n\n fn nlist_size(&self) -> usize {\n\n mem::size_of::<macho::Nlist32<E>>()\n\n }\n", "file_path": "src/write/macho.rs", "rank": 23, "score": 195975.23401006678 }, { "content": "#[inline]\n\npub fn from_bytes<T: Pod>(data: &[u8]) -> Result<(&T, &[u8])> {\n\n let size = mem::size_of::<T>();\n\n let tail = data.get(size..).ok_or(())?;\n\n let ptr = data.as_ptr();\n\n if (ptr as usize) % mem::align_of::<T>() != 0 {\n\n return Err(());\n\n }\n\n // Safety:\n\n // The alignment and size are checked by this function.\n\n // The Pod trait ensures the type is valid to cast from bytes.\n\n let val = unsafe { &*ptr.cast() };\n\n Ok((val, tail))\n\n}\n\n\n\n/// Cast a mutable byte slice to a `Pod` type.\n\n///\n\n/// Returns the type and the tail of the slice.\n", "file_path": "src/pod.rs", "rank": 24, "score": 194127.36860674055 }, { "content": "fn print_optional(p: &mut Printer<'_>, header: &impl ImageOptionalHeader) {\n\n p.group(\"ImageOptionalHeader\", |p| {\n\n p.field_hex(\"Magic\", header.magic());\n\n p.field(\"MajorLinkerVersion\", header.major_linker_version());\n\n p.field(\"MinorLinkerVersion\", header.minor_linker_version());\n\n p.field_hex(\"SizeOfCode\", header.size_of_code());\n\n p.field_hex(\"SizeOfInitializedData\", header.size_of_initialized_data());\n\n p.field_hex(\n\n \"SizeOfUninitializedData\",\n\n header.size_of_uninitialized_data(),\n\n );\n\n p.field_hex(\"AddressOfEntryPoint\", header.address_of_entry_point());\n\n p.field_hex(\"BaseOfCode\", header.base_of_code());\n\n p.field_hex(\"ImageBase\", header.image_base());\n\n p.field_hex(\"SectionAlignment\", header.section_alignment());\n\n p.field(\n\n \"MajorOperatingSystemVersion\",\n\n header.major_operating_system_version(),\n\n );\n\n p.field(\n", "file_path": "crates/examples/src/readobj/pe.rs", "rank": 25, "score": 187955.2425391192 }, { "content": "fn print_cputype(p: &mut Printer<'_>, cputype: u32, cpusubtype: u32) {\n\n let proc = match cputype {\n\n CPU_TYPE_ANY => FLAGS_CPU_SUBTYPE_ANY,\n\n CPU_TYPE_VAX => FLAGS_CPU_SUBTYPE_VAX,\n\n CPU_TYPE_MC680X0 => FLAGS_CPU_SUBTYPE_MC680X0,\n\n CPU_TYPE_X86 => FLAGS_CPU_SUBTYPE_X86,\n\n CPU_TYPE_X86_64 => FLAGS_CPU_SUBTYPE_X86_64,\n\n CPU_TYPE_MIPS => FLAGS_CPU_SUBTYPE_MIPS,\n\n CPU_TYPE_MC98000 => FLAGS_CPU_SUBTYPE_MC98000,\n\n CPU_TYPE_HPPA => FLAGS_CPU_SUBTYPE_HPPA,\n\n CPU_TYPE_ARM => FLAGS_CPU_SUBTYPE_ARM,\n\n CPU_TYPE_ARM64 => FLAGS_CPU_SUBTYPE_ARM64,\n\n CPU_TYPE_ARM64_32 => FLAGS_CPU_SUBTYPE_ARM64_32,\n\n CPU_TYPE_MC88000 => FLAGS_CPU_SUBTYPE_MC88000,\n\n CPU_TYPE_SPARC => FLAGS_CPU_SUBTYPE_SPARC,\n\n CPU_TYPE_I860 => FLAGS_CPU_SUBTYPE_I860,\n\n CPU_TYPE_POWERPC | CPU_TYPE_POWERPC64 => FLAGS_CPU_SUBTYPE_POWERPC,\n\n _ => &[],\n\n };\n\n p.field_enum(\"CpuType\", cputype, FLAGS_CPU_TYPE);\n", "file_path": "crates/examples/src/readobj/macho.rs", "rank": 26, "score": 186429.2299398622 }, { "content": "fn parse_sysv_extended_name<'data>(digits: &[u8], names: &'data [u8]) -> Result<&'data [u8], ()> {\n\n let offset = parse_u64_digits(digits, 10).ok_or(())?;\n\n let offset = offset.try_into().map_err(|_| ())?;\n\n let name_data = names.get(offset..).ok_or(())?;\n\n let name = match memchr::memchr2(b'/', b'\\0', name_data) {\n\n Some(len) => &name_data[..len],\n\n None => name_data,\n\n };\n\n Ok(name)\n\n}\n\n\n", "file_path": "src/read/archive.rs", "rank": 27, "score": 186405.24571294826 }, { "content": "#[inline]\n\npub fn bytes_of<T: Pod>(val: &T) -> &[u8] {\n\n let size = mem::size_of::<T>();\n\n // Safety:\n\n // Any alignment is allowed.\n\n // The size is determined in this function.\n\n // The Pod trait ensures the type is valid to cast to bytes.\n\n unsafe { slice::from_raw_parts(slice::from_ref(val).as_ptr().cast(), size) }\n\n}\n\n\n\n/// Cast a `Pod` type to a mutable byte slice.\n", "file_path": "src/pod.rs", "rank": 28, "score": 185488.74966241745 }, { "content": "#[inline]\n\npub fn bytes_of_slice<T: Pod>(val: &[T]) -> &[u8] {\n\n let size = val.len().wrapping_mul(mem::size_of::<T>());\n\n // Safety:\n\n // Any alignment is allowed.\n\n // The size is determined in this function.\n\n // The Pod trait ensures the type is valid to cast to bytes.\n\n unsafe { slice::from_raw_parts(val.as_ptr().cast(), size) }\n\n}\n\n\n\n/// Cast a slice of a `Pod` type to a mutable byte slice.\n", "file_path": "src/pod.rs", "rank": 29, "score": 182275.69241439167 }, { "content": "// JamCRC\n\nfn checksum(data: &[u8]) -> u32 {\n\n let mut hasher = crc32fast::Hasher::new_with_initial(0xffff_ffff);\n\n hasher.update(data);\n\n !hasher.finalize()\n\n}\n", "file_path": "src/write/coff.rs", "rank": 30, "score": 180237.56767462185 }, { "content": "// Only for Debug impl of `Bytes`.\n\nfn debug_list_bytes(bytes: &[u8], fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let mut list = fmt.debug_list();\n\n list.entries(bytes.iter().take(8).copied().map(DebugByte));\n\n if bytes.len() > 8 {\n\n list.entry(&DebugLen(bytes.len()));\n\n }\n\n list.finish()\n\n}\n\n\n", "file_path": "src/read/util.rs", "rank": 31, "score": 179776.49809568157 }, { "content": "#[inline]\n\npub fn slice_from_bytes_mut<T: Pod>(\n\n data: &mut [u8],\n\n count: usize,\n\n) -> Result<(&mut [T], &mut [u8])> {\n\n let size = count.checked_mul(mem::size_of::<T>()).ok_or(())?;\n\n if size > data.len() {\n\n return Err(());\n\n }\n\n let (data, tail) = data.split_at_mut(size);\n\n let ptr = data.as_mut_ptr();\n\n if (ptr as usize) % mem::align_of::<T>() != 0 {\n\n return Err(());\n\n }\n\n // Safety:\n\n // The alignment and size are checked by this function.\n\n // The Pod trait ensures the type is valid to cast from bytes.\n\n let slice = unsafe { slice::from_raw_parts_mut(ptr.cast(), count) };\n\n Ok((slice, tail))\n\n}\n\n\n\n/// Cast a `Pod` type to a byte slice.\n", "file_path": "src/pod.rs", "rank": 32, "score": 177003.75362288835 }, { "content": "fn print_symbols(p: &mut Printer<'_>, sections: Option<&SectionTable>, symbols: &SymbolTable) {\n\n for (index, symbol) in symbols.iter() {\n\n p.group(\"ImageSymbol\", |p| {\n\n p.field(\"Index\", index);\n\n if let Some(name) = symbol.name(symbols.strings()).print_err(p) {\n\n p.field_inline_string(\"Name\", name);\n\n } else {\n\n p.field(\"Name\", format!(\"{:X?}\", symbol.name));\n\n }\n\n p.field_hex(\"Value\", symbol.value.get(LE));\n\n let section = symbol.section_number.get(LE);\n\n if section == 0 || section >= IMAGE_SYM_SECTION_MAX {\n\n p.field_enum(\"Section\", section, FLAGS_IMAGE_SYM);\n\n } else {\n\n let section_name = sections.and_then(|sections| {\n\n sections\n\n .section(section.into())\n\n .and_then(|section| section.name(symbols.strings()))\n\n .print_err(p)\n\n });\n", "file_path": "crates/examples/src/readobj/pe.rs", "rank": 33, "score": 175125.02965427545 }, { "content": "fn print_mach_header<Mach: MachHeader>(p: &mut Printer<'_>, endian: Mach::Endian, header: &Mach) {\n\n p.group(\"MachHeader\", |p| {\n\n p.field_hex(\"Magic\", header.magic().to_be());\n\n print_cputype(p, header.cputype(endian), header.cpusubtype(endian));\n\n p.field_enum(\"FileType\", header.filetype(endian), FLAGS_MH_FILETYPE);\n\n p.field(\"NumberOfCmds\", header.ncmds(endian));\n\n p.field_hex(\"SizeOfCmds\", header.sizeofcmds(endian));\n\n p.field_enum(\"Flags\", header.flags(endian), FLAGS_MH);\n\n });\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/macho.rs", "rank": 34, "score": 164186.4243053087 }, { "content": "struct DebugByte(u8);\n\n\n\nimpl fmt::Debug for DebugByte {\n\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(fmt, \"0x{:02x}\", self.0)\n\n }\n\n}\n\n\n", "file_path": "src/read/util.rs", "rank": 36, "score": 150564.34601490063 }, { "content": "fn find_subslice(haystack: &[u8], needle: &[u8]) -> bool {\n\n haystack\n\n .windows(needle.len())\n\n .any(|window| window == needle)\n\n}\n", "file_path": "crates/examples/tests/testfiles.rs", "rank": 37, "score": 143203.66192565026 }, { "content": "/// An object file.\n\npub trait Object<'data: 'file, 'file>: read::private::Sealed {\n\n /// A segment in the object file.\n\n type Segment: ObjectSegment<'data>;\n\n\n\n /// An iterator over the segments in the object file.\n\n type SegmentIterator: Iterator<Item = Self::Segment>;\n\n\n\n /// A section in the object file.\n\n type Section: ObjectSection<'data>;\n\n\n\n /// An iterator over the sections in the object file.\n\n type SectionIterator: Iterator<Item = Self::Section>;\n\n\n\n /// A COMDAT section group in the object file.\n\n type Comdat: ObjectComdat<'data>;\n\n\n\n /// An iterator over the COMDAT section groups in the object file.\n\n type ComdatIterator: Iterator<Item = Self::Comdat>;\n\n\n\n /// A symbol in the object file.\n", "file_path": "src/read/traits.rs", "rank": 38, "score": 139568.16542258556 }, { "content": "#[allow(missing_docs)]\n\npub trait Dyn: Debug + Pod {\n\n type Word: Into<u64>;\n\n type Endian: endian::Endian;\n\n\n\n fn d_tag(&self, endian: Self::Endian) -> Self::Word;\n\n fn d_val(&self, endian: Self::Endian) -> Self::Word;\n\n\n\n /// Try to convert the tag to a `u32`.\n\n fn tag32(&self, endian: Self::Endian) -> Option<u32> {\n\n self.d_tag(endian).into().try_into().ok()\n\n }\n\n\n\n /// Try to convert the value to a `u32`.\n\n fn val32(&self, endian: Self::Endian) -> Option<u32> {\n\n self.d_val(endian).into().try_into().ok()\n\n }\n\n\n\n /// Return true if the value is an offset in the dynamic string table.\n\n fn is_string(&self, endian: Self::Endian) -> bool {\n\n if let Some(tag) = self.tag32(endian) {\n", "file_path": "src/read/elf/dynamic.rs", "rank": 39, "score": 136848.22417634295 }, { "content": "/// Find the optional header and read the `optional_header.magic`.\n\n///\n\n/// It can be useful to know this magic value before trying to\n\n/// fully parse the NT headers.\n\npub fn optional_header_magic<'data, R: ReadRef<'data>>(data: R) -> Result<u16> {\n\n let dos_header = pe::ImageDosHeader::parse(data)?;\n\n // NT headers are at an offset specified in the DOS header.\n\n let offset = dos_header.nt_headers_offset().into();\n\n // It doesn't matter which NT header type is used for the purpose\n\n // of reading the optional header magic.\n\n let nt_headers = data\n\n .read_at::<pe::ImageNtHeaders32>(offset)\n\n .read_error(\"Invalid NT headers offset, size, or alignment\")?;\n\n if nt_headers.signature() != pe::IMAGE_NT_SIGNATURE {\n\n return Err(Error(\"Invalid PE magic\"));\n\n }\n\n Ok(nt_headers.optional_header().magic())\n\n}\n\n\n\n/// A trait for generic access to `ImageNtHeaders32` and `ImageNtHeaders64`.\n", "file_path": "src/read/pe/file.rs", "rank": 40, "score": 136663.41185695204 }, { "content": "#[test]\n\nfn parse_self() {\n\n let exe = env::current_exe().unwrap();\n\n let data = fs::read(exe).unwrap();\n\n let object = File::parse(&*data).unwrap();\n\n assert!(object.entry() != 0);\n\n assert!(object.sections().count() != 0);\n\n}\n\n\n", "file_path": "tests/parse_self.rs", "rank": 41, "score": 134719.4327631728 }, { "content": "#[cfg(feature = \"std\")]\n\n#[test]\n\nfn parse_self_cache() {\n\n use object::read::{ReadCache, ReadRef};\n\n let exe = env::current_exe().unwrap();\n\n let file = fs::File::open(exe).unwrap();\n\n let cache = ReadCache::new(file);\n\n let data = cache.range(0, cache.len().unwrap());\n\n let object = File::parse(data).unwrap();\n\n assert!(object.entry() != 0);\n\n assert!(object.sections().count() != 0);\n\n}\n", "file_path": "tests/parse_self.rs", "rank": 42, "score": 132347.8254631897 }, { "content": "struct Nlist {\n\n n_strx: u32,\n\n n_type: u8,\n\n n_sect: u8,\n\n n_desc: u16,\n\n n_value: u64,\n\n}\n\n\n", "file_path": "src/write/macho.rs", "rank": 43, "score": 128465.92092833004 }, { "content": "fn parse_ordinal(digits: &[u8]) -> Option<u32> {\n\n if digits.is_empty() {\n\n return None;\n\n }\n\n let mut result: u32 = 0;\n\n for &c in digits {\n\n let x = (c as char).to_digit(10)?;\n\n result = result.checked_mul(10)?.checked_add(x)?;\n\n }\n\n Some(result)\n\n}\n", "file_path": "src/read/pe/export.rs", "rank": 44, "score": 126611.41534659904 }, { "content": "struct MachHeader {\n\n cputype: u32,\n\n cpusubtype: u32,\n\n filetype: u32,\n\n ncmds: u32,\n\n sizeofcmds: u32,\n\n flags: u32,\n\n}\n\n\n", "file_path": "src/write/macho.rs", "rank": 45, "score": 125656.876684435 }, { "content": "#[derive(Default, Clone, Copy)]\n\nstruct SectionOffsets {\n\n offset: usize,\n\n str_id: Option<StringId>,\n\n reloc_offset: usize,\n\n selection: u8,\n\n associative_section: u16,\n\n}\n\n\n", "file_path": "src/write/coff.rs", "rank": 46, "score": 125656.876684435 }, { "content": "struct SegmentCommand {\n\n cmdsize: u32,\n\n segname: [u8; 16],\n\n vmaddr: u64,\n\n vmsize: u64,\n\n fileoff: u64,\n\n filesize: u64,\n\n maxprot: u32,\n\n initprot: u32,\n\n nsects: u32,\n\n flags: u32,\n\n}\n\n\n\npub struct SectionHeader {\n\n sectname: [u8; 16],\n\n segname: [u8; 16],\n\n addr: u64,\n\n size: u64,\n\n offset: u32,\n\n align: u32,\n\n reloff: u32,\n\n nreloc: u32,\n\n flags: u32,\n\n}\n\n\n", "file_path": "src/write/macho.rs", "rank": 47, "score": 125656.876684435 }, { "content": "#[derive(Default, Clone, Copy)]\n\nstruct SectionOffsets {\n\n index: usize,\n\n offset: usize,\n\n address: u64,\n\n reloc_offset: usize,\n\n}\n\n\n", "file_path": "src/write/macho.rs", "rank": 48, "score": 125656.876684435 }, { "content": "#[derive(Default, Clone, Copy)]\n\nstruct SymbolOffsets {\n\n index: usize,\n\n str_id: Option<StringId>,\n\n aux_count: u8,\n\n}\n\n\n\nimpl<'a> Object<'a> {\n\n pub(crate) fn coff_section_info(\n\n &self,\n\n section: StandardSection,\n\n ) -> (&'static [u8], &'static [u8], SectionKind) {\n\n match section {\n\n StandardSection::Text => (&[], &b\".text\"[..], SectionKind::Text),\n\n StandardSection::Data => (&[], &b\".data\"[..], SectionKind::Data),\n\n StandardSection::ReadOnlyData\n\n | StandardSection::ReadOnlyDataWithRel\n\n | StandardSection::ReadOnlyString => (&[], &b\".rdata\"[..], SectionKind::ReadOnlyData),\n\n StandardSection::UninitializedData => {\n\n (&[], &b\".bss\"[..], SectionKind::UninitializedData)\n\n }\n", "file_path": "src/write/coff.rs", "rank": 49, "score": 125656.876684435 }, { "content": "#[derive(Default, Clone, Copy)]\n\nstruct SymbolOffsets {\n\n emit: bool,\n\n index: usize,\n\n str_id: Option<StringId>,\n\n}\n\n\n\nimpl<'a> Object<'a> {\n\n pub(crate) fn macho_set_subsections_via_symbols(&mut self) {\n\n let flags = match self.flags {\n\n FileFlags::MachO { flags } => flags,\n\n _ => 0,\n\n };\n\n self.flags = FileFlags::MachO {\n\n flags: flags | macho::MH_SUBSECTIONS_VIA_SYMBOLS,\n\n };\n\n }\n\n\n\n pub(crate) fn macho_segment_name(&self, segment: StandardSegment) -> &'static [u8] {\n\n match segment {\n\n StandardSegment::Text => &b\"__TEXT\"[..],\n", "file_path": "src/write/macho.rs", "rank": 50, "score": 125656.876684435 }, { "content": "#[derive(Clone, Copy)]\n\nstruct SectionOffsets {\n\n index: SectionIndex,\n\n offset: usize,\n\n str_id: StringId,\n\n reloc_offset: usize,\n\n reloc_str_id: Option<StringId>,\n\n}\n\n\n", "file_path": "src/write/elf/object.rs", "rank": 51, "score": 123007.99934467851 }, { "content": "#[derive(Clone, Copy)]\n\nstruct ComdatOffsets {\n\n offset: usize,\n\n str_id: StringId,\n\n}\n\n\n", "file_path": "src/write/elf/object.rs", "rank": 52, "score": 123007.99934467851 }, { "content": "#[derive(Default, Clone, Copy)]\n\nstruct SymbolOffsets {\n\n index: SymbolIndex,\n\n str_id: Option<StringId>,\n\n}\n\n\n\nimpl<'a> Object<'a> {\n\n pub(crate) fn elf_section_info(\n\n &self,\n\n section: StandardSection,\n\n ) -> (&'static [u8], &'static [u8], SectionKind) {\n\n match section {\n\n StandardSection::Text => (&[], &b\".text\"[..], SectionKind::Text),\n\n StandardSection::Data => (&[], &b\".data\"[..], SectionKind::Data),\n\n StandardSection::ReadOnlyData | StandardSection::ReadOnlyString => {\n\n (&[], &b\".rodata\"[..], SectionKind::ReadOnlyData)\n\n }\n\n StandardSection::ReadOnlyDataWithRel => (&[], b\".data.rel.ro\", SectionKind::Data),\n\n StandardSection::UninitializedData => {\n\n (&[], &b\".bss\"[..], SectionKind::UninitializedData)\n\n }\n", "file_path": "src/write/elf/object.rs", "rank": 53, "score": 123007.99934467851 }, { "content": "struct DebugLen(usize);\n\n\n\nimpl fmt::Debug for DebugLen {\n\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(fmt, \"...; {}\", self.0)\n\n }\n\n}\n\n\n\n/// A newtype for byte strings.\n\n///\n\n/// For byte slices that are strings of an unknown encoding.\n\n///\n\n/// Provides a `Debug` implementation that interprets the bytes as UTF-8.\n\n#[derive(Default, Clone, Copy, PartialEq, Eq)]\n\npub(crate) struct ByteString<'data>(pub &'data [u8]);\n\n\n\nimpl<'data> fmt::Debug for ByteString<'data> {\n\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(fmt, \"\\\"{}\\\"\", String::from_utf8_lossy(self.0))\n\n }\n", "file_path": "src/read/util.rs", "rank": 54, "score": 120979.4556807818 }, { "content": "#[allow(missing_docs)]\n\npub trait FileHeader: Debug + Pod {\n\n // Ideally this would be a `u64: From<Word>`, but can't express that.\n\n type Word: Into<u64>;\n\n type Sword: Into<i64>;\n\n type Endian: endian::Endian;\n\n type ProgramHeader: ProgramHeader<Elf = Self, Endian = Self::Endian, Word = Self::Word>;\n\n type SectionHeader: SectionHeader<Elf = Self, Endian = Self::Endian, Word = Self::Word>;\n\n type CompressionHeader: CompressionHeader<Endian = Self::Endian, Word = Self::Word>;\n\n type NoteHeader: NoteHeader<Endian = Self::Endian>;\n\n type Dyn: Dyn<Endian = Self::Endian, Word = Self::Word>;\n\n type Sym: Sym<Endian = Self::Endian, Word = Self::Word>;\n\n type Rel: Rel<Endian = Self::Endian, Word = Self::Word>;\n\n type Rela: Rela<Endian = Self::Endian, Word = Self::Word> + From<Self::Rel>;\n\n\n\n /// Return true if this type is a 64-bit header.\n\n ///\n\n /// This is a property of the type, not a value in the header data.\n\n fn is_type_64(&self) -> bool;\n\n\n\n fn e_ident(&self) -> &elf::Ident;\n", "file_path": "src/read/elf/file.rs", "rank": 55, "score": 120417.95447304158 }, { "content": "#[allow(clippy::len_without_is_empty)]\n\npub trait WritableBuffer {\n\n /// Returns position/offset for data to be written at.\n\n ///\n\n /// Should only be used in debug assertions\n\n fn len(&self) -> usize;\n\n\n\n /// Reserves specified number of bytes in the buffer.\n\n ///\n\n /// This will be called exactly once before writing anything to the buffer,\n\n /// and the given size is the exact total number of bytes that will be written.\n\n fn reserve(&mut self, size: usize) -> Result<(), ()>;\n\n\n\n /// Writes zero bytes at the end of the buffer until the buffer\n\n /// has the specified length.\n\n fn resize(&mut self, new_len: usize);\n\n\n\n /// Writes the specified slice of bytes at the end of the buffer.\n\n fn write_bytes(&mut self, val: &[u8]);\n\n\n\n /// Writes the specified `Pod` type at the end of the buffer.\n", "file_path": "src/write/util.rs", "rank": 56, "score": 119401.9828763757 }, { "content": "// Ignores bytes starting from the first space.\n\nfn parse_u64_digits(digits: &[u8], radix: u32) -> Option<u64> {\n\n if let [b' ', ..] = digits {\n\n return None;\n\n }\n\n let mut result: u64 = 0;\n\n for &c in digits {\n\n if c == b' ' {\n\n return Some(result);\n\n } else {\n\n let x = (c as char).to_digit(radix)?;\n\n result = result\n\n .checked_mul(u64::from(radix))?\n\n .checked_add(u64::from(x))?;\n\n }\n\n }\n\n Some(result)\n\n}\n\n\n", "file_path": "src/read/archive.rs", "rank": 57, "score": 117164.51614234617 }, { "content": "// Test that segment size is valid when the first section needs alignment.\n\nfn issue_286_segment_file_size() {\n\n let mut object = write::Object::new(\n\n BinaryFormat::MachO,\n\n Architecture::X86_64,\n\n Endianness::Little,\n\n );\n\n\n\n let text = object.section_id(write::StandardSection::Text);\n\n object.append_section_data(text, &[1; 30], 0x1000);\n\n\n\n let bytes = &*object.write().unwrap();\n\n let header = macho::MachHeader64::parse(bytes, 0).unwrap();\n\n let endian: Endianness = header.endian().unwrap();\n\n let mut commands = header.load_commands(endian, bytes, 0).unwrap();\n\n let command = commands.next().unwrap().unwrap();\n\n let (segment, _) = command.segment_64().unwrap().unwrap();\n\n assert_eq!(segment.vmsize.get(endian), 30);\n\n assert_eq!(segment.filesize.get(endian), 30);\n\n}\n", "file_path": "tests/round_trip/macho.rs", "rank": 58, "score": 115353.2903302381 }, { "content": "fn parse_relocation<Elf: FileHeader>(\n\n header: &Elf,\n\n endian: Elf::Endian,\n\n reloc: Elf::Rela,\n\n implicit_addend: bool,\n\n) -> Relocation {\n\n let mut encoding = RelocationEncoding::Generic;\n\n let is_mips64el = header.is_mips64el(endian);\n\n let (kind, size) = match header.e_machine(endian) {\n\n elf::EM_AARCH64 => match reloc.r_type(endian, false) {\n\n elf::R_AARCH64_ABS64 => (RelocationKind::Absolute, 64),\n\n elf::R_AARCH64_ABS32 => (RelocationKind::Absolute, 32),\n\n elf::R_AARCH64_ABS16 => (RelocationKind::Absolute, 16),\n\n elf::R_AARCH64_PREL64 => (RelocationKind::Relative, 64),\n\n elf::R_AARCH64_PREL32 => (RelocationKind::Relative, 32),\n\n elf::R_AARCH64_PREL16 => (RelocationKind::Relative, 16),\n\n elf::R_AARCH64_CALL26 => {\n\n encoding = RelocationEncoding::AArch64Call;\n\n (RelocationKind::PltRelative, 26)\n\n }\n", "file_path": "src/read/elf/relocation.rs", "rank": 59, "score": 108318.4746441135 }, { "content": "#[allow(missing_docs)]\n\npub trait MachHeader: Debug + Pod {\n\n type Word: Into<u64>;\n\n type Endian: endian::Endian;\n\n type Segment: Segment<Endian = Self::Endian, Section = Self::Section>;\n\n type Section: Section<Endian = Self::Endian>;\n\n type Nlist: Nlist<Endian = Self::Endian>;\n\n\n\n /// Return true if this type is a 64-bit header.\n\n ///\n\n /// This is a property of the type, not a value in the header data.\n\n fn is_type_64(&self) -> bool;\n\n\n\n /// Return true if the `magic` field signifies big-endian.\n\n fn is_big_endian(&self) -> bool;\n\n\n\n /// Return true if the `magic` field signifies little-endian.\n\n fn is_little_endian(&self) -> bool;\n\n\n\n fn magic(&self) -> u32;\n\n fn cputype(&self, endian: Self::Endian) -> u32;\n", "file_path": "src/read/macho/file.rs", "rank": 60, "score": 108260.98317195613 }, { "content": "fn print_version<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n versions: Option<&VersionTable<Elf>>,\n\n version_index: VersionIndex,\n\n) {\n\n match versions.and_then(|versions| versions.version(version_index).print_err(p)) {\n\n Some(Some(version)) => {\n\n p.field_string_option(\"Version\", version_index.0, Some(version.name()))\n\n }\n\n _ => p.field_enum(\"Version\", version_index.0, FLAGS_VER_NDX),\n\n }\n\n p.flags(version_index.0, 0, FLAGS_VERSYM);\n\n}\n\n\n\nstatic FLAGS_EI_CLASS: &[Flag<u8>] = &flags!(ELFCLASSNONE, ELFCLASS32, ELFCLASS64);\n\nstatic FLAGS_EI_DATA: &[Flag<u8>] = &flags!(ELFDATANONE, ELFDATA2LSB, ELFDATA2MSB);\n\nstatic FLAGS_EV: &[Flag<u8>] = &flags!(EV_NONE, EV_CURRENT);\n\nstatic FLAGS_EI_OSABI: &[Flag<u8>] = &flags!(\n\n ELFOSABI_SYSV,\n\n ELFOSABI_HPUX,\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 61, "score": 106187.54402888087 }, { "content": "fn print_dynamic<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n elf: &Elf,\n\n dynamic: &[Elf::Dyn],\n\n dynstr: StringTable,\n\n) {\n\n let proc = match elf.e_machine(endian) {\n\n EM_SPARC => FLAGS_DT_SPARC,\n\n EM_MIPS => FLAGS_DT_MIPS,\n\n EM_ALPHA => FLAGS_DT_ALPHA,\n\n EM_PPC => FLAGS_DT_PPC,\n\n EM_PPC64 => FLAGS_DT_PPC64,\n\n EM_IA_64 => FLAGS_DT_IA_64,\n\n EM_ALTERA_NIOS2 => FLAGS_DT_NIOS2,\n\n _ => &[],\n\n };\n\n for d in dynamic {\n\n let tag = d.d_tag(endian).into();\n\n let val = d.d_val(endian).into();\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 62, "score": 106187.54402888087 }, { "content": "fn print_notes<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n mut notes: NoteIterator<Elf>,\n\n) {\n\n while let Some(Some(note)) = notes.next().print_err(p) {\n\n p.group(\"Note\", |p| {\n\n let name = note.name();\n\n p.field_string_option(\"Name\", note.n_namesz(endian), Some(name));\n\n let flags = if name == ELF_NOTE_CORE || name == ELF_NOTE_LINUX {\n\n FLAGS_NT_CORE\n\n } else if name == ELF_NOTE_SOLARIS {\n\n FLAGS_NT_SOLARIS\n\n } else if name == ELF_NOTE_GNU {\n\n FLAGS_NT_GNU\n\n } else {\n\n // TODO: NT_VERSION\n\n &[]\n\n };\n\n p.field_enum(\"Type\", note.n_type(endian), flags);\n\n // TODO: interpret desc\n\n p.field_bytes(\"Desc\", note.desc());\n\n });\n\n }\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 63, "score": 106187.54402888087 }, { "content": "fn print_hash<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n _elf: &Elf,\n\n _sections: &SectionTable<Elf>,\n\n section: &Elf::SectionHeader,\n\n) {\n\n if let Some(Some(hash)) = section.hash_header(endian, data).print_err(p) {\n\n p.group(\"Hash\", |p| {\n\n p.field(\"BucketCount\", hash.bucket_count.get(endian));\n\n p.field(\"ChainCount\", hash.chain_count.get(endian));\n\n });\n\n }\n\n /* TODO: add this in a test somewhere\n\n if let Ok(Some((hash_table, link))) = section.hash(endian, data) {\n\n if let Ok(symbols) = _sections.symbol_table_by_index(endian, data, link) {\n\n if let Ok(versions) = _sections.versions(endian, data) {\n\n for (index, symbol) in symbols.symbols().iter().enumerate() {\n\n let name = symbols.symbol_name(endian, symbol).unwrap();\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 64, "score": 106187.54402888087 }, { "content": "#[allow(missing_docs)]\n\npub trait ImageOptionalHeader: Debug + Pod {\n\n // Standard fields.\n\n fn magic(&self) -> u16;\n\n fn major_linker_version(&self) -> u8;\n\n fn minor_linker_version(&self) -> u8;\n\n fn size_of_code(&self) -> u32;\n\n fn size_of_initialized_data(&self) -> u32;\n\n fn size_of_uninitialized_data(&self) -> u32;\n\n fn address_of_entry_point(&self) -> u32;\n\n fn base_of_code(&self) -> u32;\n\n\n\n // NT additional fields.\n\n fn image_base(&self) -> u64;\n\n fn section_alignment(&self) -> u32;\n\n fn file_alignment(&self) -> u32;\n\n fn major_operating_system_version(&self) -> u16;\n\n fn minor_operating_system_version(&self) -> u16;\n\n fn major_image_version(&self) -> u16;\n\n fn minor_image_version(&self) -> u16;\n\n fn major_subsystem_version(&self) -> u16;\n", "file_path": "src/read/pe/file.rs", "rank": 65, "score": 106126.70308331796 }, { "content": "#[allow(missing_docs)]\n\npub trait ImageNtHeaders: Debug + Pod {\n\n type ImageOptionalHeader: ImageOptionalHeader;\n\n type ImageThunkData: ImageThunkData;\n\n\n\n /// Return true if this type is a 64-bit header.\n\n ///\n\n /// This is a property of the type, not a value in the header data.\n\n fn is_type_64(&self) -> bool;\n\n\n\n /// Return true if the magic field in the optional header is valid.\n\n fn is_valid_optional_magic(&self) -> bool;\n\n\n\n /// Return the signature\n\n fn signature(&self) -> u32;\n\n\n\n /// Return the file header.\n\n fn file_header(&self) -> &pe::ImageFileHeader;\n\n\n\n /// Return the optional header.\n\n fn optional_header(&self) -> &Self::ImageOptionalHeader;\n", "file_path": "src/read/pe/file.rs", "rank": 66, "score": 106126.70308331796 }, { "content": "fn print_section_notes<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n _elf: &Elf,\n\n section: &Elf::SectionHeader,\n\n) {\n\n if let Some(Some(notes)) = section.notes(endian, data).print_err(p) {\n\n print_notes(p, endian, notes);\n\n }\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 67, "score": 104162.95666130047 }, { "content": "fn print_gnu_hash<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n _elf: &Elf,\n\n _sections: &SectionTable<Elf>,\n\n section: &Elf::SectionHeader,\n\n) {\n\n if let Some(Some(hash)) = section.gnu_hash_header(endian, data).print_err(p) {\n\n p.group(\"GnuHash\", |p| {\n\n p.field(\"BucketCount\", hash.bucket_count.get(endian));\n\n p.field(\"SymbolBase\", hash.symbol_base.get(endian));\n\n p.field(\"BloomCount\", hash.bloom_count.get(endian));\n\n p.field(\"BloomShift\", hash.bloom_shift.get(endian));\n\n });\n\n }\n\n /* TODO: add this in a test somewhere\n\n if let Ok(Some((hash_table, link))) = section.gnu_hash(endian, data) {\n\n if let Ok(symbols) = _sections.symbol_table_by_index(endian, data, link) {\n\n if let Ok(versions) = _sections.versions(endian, data) {\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 68, "score": 104162.95666130047 }, { "content": "fn print_gnu_verdef<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n _elf: &Elf,\n\n sections: &SectionTable<Elf>,\n\n section: &Elf::SectionHeader,\n\n) {\n\n if let Some(Some((mut verdefs, link))) = section.gnu_verdef(endian, data).print_err(p) {\n\n let strings = sections.strings(endian, data, link).unwrap_or_default();\n\n while let Some(Some((verdef, mut verdauxs))) = verdefs.next().print_err(p) {\n\n p.group(\"VersionDefinition\", |p| {\n\n p.field(\"Version\", verdef.vd_version.get(endian));\n\n p.field_hex(\"Flags\", verdef.vd_flags.get(endian));\n\n p.flags(verdef.vd_flags.get(endian), 0, FLAGS_VER_FLG);\n\n p.field(\"Index\", verdef.vd_ndx.get(endian));\n\n p.field(\"AuxCount\", verdef.vd_cnt.get(endian));\n\n p.field_hex(\"Hash\", verdef.vd_hash.get(endian));\n\n p.field(\"AuxOffset\", verdef.vd_aux.get(endian));\n\n p.field(\"NextOffset\", verdef.vd_next.get(endian));\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 69, "score": 104162.95666130047 }, { "content": "fn print_section_symbols<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n elf: &Elf,\n\n sections: &SectionTable<Elf>,\n\n section_index: SectionIndex,\n\n section: &Elf::SectionHeader,\n\n) {\n\n if let Some(Some(symbols)) = section\n\n .symbols(endian, data, sections, section_index)\n\n .print_err(p)\n\n {\n\n let versions = if section.sh_type(endian) == SHT_DYNSYM {\n\n sections.versions(endian, data).print_err(p).flatten()\n\n } else {\n\n None\n\n };\n\n let os_stt = match elf.e_ident().os_abi {\n\n ELFOSABI_GNU => FLAGS_STT_GNU,\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 70, "score": 104162.95666130047 }, { "content": "fn print_segment_dynamic<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n elf: &Elf,\n\n segments: &[Elf::ProgramHeader],\n\n segment: &Elf::ProgramHeader,\n\n) {\n\n if let Some(Some(dynamic)) = segment.dynamic(endian, data).print_err(p) {\n\n // TODO: add a helper API for this and the other mandatory tags?\n\n let mut strtab = 0;\n\n let mut strsz = 0;\n\n for d in dynamic {\n\n let tag = d.d_tag(endian).into();\n\n if tag == DT_STRTAB.into() {\n\n strtab = d.d_val(endian).into();\n\n } else if tag == DT_STRSZ.into() {\n\n strsz = d.d_val(endian).into();\n\n }\n\n }\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 71, "score": 104162.95666130047 }, { "content": "fn print_gnu_verneed<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n _elf: &Elf,\n\n sections: &SectionTable<Elf>,\n\n section: &Elf::SectionHeader,\n\n) {\n\n if let Some(Some((mut verneeds, link))) = section.gnu_verneed(endian, data).print_err(p) {\n\n let strings = sections.strings(endian, data, link).unwrap_or_default();\n\n while let Some(Some((verneed, mut vernauxs))) = verneeds.next().print_err(p) {\n\n p.group(\"VersionNeed\", |p| {\n\n p.field(\"Version\", verneed.vn_version.get(endian));\n\n p.field(\"AuxCount\", verneed.vn_cnt.get(endian));\n\n p.field_string(\n\n \"Filename\",\n\n verneed.vn_file.get(endian),\n\n verneed.file(endian, strings),\n\n );\n\n p.field(\"AuxOffset\", verneed.vn_aux.get(endian));\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 72, "score": 104162.95666130047 }, { "content": "fn print_section_group<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n _elf: &Elf,\n\n sections: &SectionTable<Elf>,\n\n section: &Elf::SectionHeader,\n\n) {\n\n if let Some(Some((flag, members))) = section.group(endian, data).print_err(p) {\n\n p.field_enum(\"GroupFlag\", flag, FLAGS_GRP);\n\n p.group(\"GroupSections\", |p| {\n\n for member in members {\n\n let index = member.get(endian);\n\n p.print_indent();\n\n if let Some(section) = sections.section(SectionIndex(index as usize)).print_err(p) {\n\n if let Some(name) = sections.section_name(endian, section).print_err(p) {\n\n p.print_string(name);\n\n writeln!(p.w, \" ({})\", index).unwrap();\n\n } else {\n\n writeln!(p.w, \"{}\", index).unwrap();\n\n }\n\n } else {\n\n writeln!(p.w, \"{}\", index).unwrap();\n\n }\n\n }\n\n });\n\n }\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 73, "score": 104162.95666130047 }, { "content": "fn print_section_rela<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n elf: &Elf,\n\n sections: &SectionTable<Elf>,\n\n section: &Elf::SectionHeader,\n\n) {\n\n if let Some(Some((relocations, link))) = section.rela(endian, data).print_err(p) {\n\n let symbols = sections\n\n .symbol_table_by_index(endian, data, link)\n\n .print_err(p);\n\n let proc = rel_flag_type(endian, elf);\n\n for relocation in relocations {\n\n p.group(\"Relocation\", |p| {\n\n p.field_hex(\"Offset\", relocation.r_offset(endian).into());\n\n p.field_enum(\n\n \"Type\",\n\n relocation.r_type(endian, elf.is_mips64el(endian)),\n\n proc,\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 74, "score": 104162.95666130047 }, { "content": "fn print_section_rel<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n elf: &Elf,\n\n sections: &SectionTable<Elf>,\n\n section: &Elf::SectionHeader,\n\n) {\n\n if let Some(Some((relocations, link))) = section.rel(endian, data).print_err(p) {\n\n let symbols = sections\n\n .symbol_table_by_index(endian, data, link)\n\n .print_err(p);\n\n let proc = rel_flag_type(endian, elf);\n\n for relocation in relocations {\n\n p.group(\"Relocation\", |p| {\n\n p.field_hex(\"Offset\", relocation.r_offset(endian).into());\n\n p.field_enum(\"Type\", relocation.r_type(endian), proc);\n\n let sym = relocation.r_sym(endian);\n\n print_rel_symbol(p, endian, symbols, sym);\n\n });\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 75, "score": 104162.95666130047 }, { "content": "fn print_gnu_versym<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n _elf: &Elf,\n\n sections: &SectionTable<Elf>,\n\n section: &Elf::SectionHeader,\n\n) {\n\n if let Some(Some((syms, _link))) = section.gnu_versym(endian, data).print_err(p) {\n\n let versions = sections.versions(endian, data).print_err(p).flatten();\n\n for (index, sym) in syms.iter().enumerate() {\n\n let version_index = VersionIndex(sym.0.get(endian));\n\n p.group(\"VersionSymbol\", |p| {\n\n p.field(\"Index\", index);\n\n print_version(p, versions.as_ref(), version_index);\n\n });\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 76, "score": 104162.95666130047 }, { "content": "fn print_program_headers<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n elf: &Elf,\n\n segments: &[Elf::ProgramHeader],\n\n) {\n\n for segment in segments {\n\n p.group(\"ProgramHeader\", |p| {\n\n let proc = match elf.e_machine(endian) {\n\n EM_MIPS => FLAGS_PT_MIPS,\n\n EM_PARISC => FLAGS_PT_PARISC,\n\n EM_ARM => FLAGS_PT_ARM,\n\n EM_IA_64 => FLAGS_PT_IA_64,\n\n _ => &[],\n\n };\n\n let os = match elf.e_ident().os_abi {\n\n ELFOSABI_HPUX => FLAGS_PT_HP,\n\n _ => &[],\n\n };\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 77, "score": 104162.95666130047 }, { "content": "fn print_section_headers<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n elf: &Elf,\n\n sections: &SectionTable<Elf>,\n\n) {\n\n for (index, section) in sections.iter().enumerate() {\n\n let index = SectionIndex(index);\n\n p.group(\"SectionHeader\", |p| {\n\n p.field(\"Index\", index.0);\n\n p.field_string(\n\n \"Name\",\n\n section.sh_name(endian),\n\n sections.section_name(endian, section),\n\n );\n\n\n\n let proc = match elf.e_machine(endian) {\n\n EM_MIPS => FLAGS_SHT_MIPS,\n\n EM_PARISC => FLAGS_SHT_PARISC,\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 78, "score": 104162.95666130047 }, { "content": "fn print_section_dynamic<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n elf: &Elf,\n\n sections: &SectionTable<Elf>,\n\n section: &Elf::SectionHeader,\n\n) {\n\n if let Some(Some((dynamic, index))) = section.dynamic(endian, data).print_err(p) {\n\n let strings = sections.strings(endian, data, index).unwrap_or_default();\n\n print_dynamic(p, endian, elf, dynamic, strings);\n\n }\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 79, "score": 104162.95666130047 }, { "content": "fn print_segment_notes<Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n data: &[u8],\n\n _elf: &Elf,\n\n segment: &Elf::ProgramHeader,\n\n) {\n\n if let Some(Some(notes)) = segment.notes(endian, data).print_err(p) {\n\n print_notes(p, endian, notes);\n\n }\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 80, "score": 104162.95666130047 }, { "content": "fn section_code_to_id(code: wp::SectionCode) -> usize {\n\n match code {\n\n wp::SectionCode::Custom { .. } => SECTION_CUSTOM,\n\n wp::SectionCode::Type => SECTION_TYPE,\n\n wp::SectionCode::Import => SECTION_IMPORT,\n\n wp::SectionCode::Function => SECTION_FUNCTION,\n\n wp::SectionCode::Table => SECTION_TABLE,\n\n wp::SectionCode::Memory => SECTION_MEMORY,\n\n wp::SectionCode::Global => SECTION_GLOBAL,\n\n wp::SectionCode::Export => SECTION_EXPORT,\n\n wp::SectionCode::Start => SECTION_START,\n\n wp::SectionCode::Element => SECTION_ELEMENT,\n\n wp::SectionCode::Code => SECTION_CODE,\n\n wp::SectionCode::Data => SECTION_DATA,\n\n wp::SectionCode::DataCount => SECTION_DATA_COUNT,\n\n }\n\n}\n", "file_path": "src/read/wasm.rs", "rank": 81, "score": 101134.61103989641 }, { "content": "fn print_rel_symbol<'data, Elf: FileHeader>(\n\n p: &mut Printer<'_>,\n\n endian: Elf::Endian,\n\n symbols: Option<SymbolTable<'data, Elf>>,\n\n sym: u32,\n\n) {\n\n let name = symbols.and_then(|symbols| {\n\n symbols\n\n .symbol(sym as usize)\n\n .and_then(|symbol| symbol.name(endian, symbols.strings()))\n\n .print_err(p)\n\n });\n\n p.field_string_option(\"Symbol\", sym, name);\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 82, "score": 100376.11128595192 }, { "content": "/// Modifies `data` to start after the extended name.\n\nfn parse_bsd_extended_name<'data, R: ReadRef<'data>>(\n\n digits: &[u8],\n\n data: R,\n\n offset: &mut u64,\n\n size: &mut u64,\n\n) -> Result<&'data [u8], ()> {\n\n let len = parse_u64_digits(digits, 10).ok_or(())?;\n\n *size = size.checked_sub(len).ok_or(())?;\n\n let name_data = data.read_bytes(offset, len)?;\n\n let name = match memchr::memchr(b'\\0', name_data) {\n\n Some(len) => &name_data[..len],\n\n None => name_data,\n\n };\n\n Ok(name)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "src/read/archive.rs", "rank": 83, "score": 99203.43654268561 }, { "content": "fn rel_flag_type<Elf: FileHeader>(endian: Elf::Endian, elf: &Elf) -> &'static [Flag<u32>] {\n\n match elf.e_machine(endian) {\n\n EM_68K => FLAGS_R_68K,\n\n EM_386 => FLAGS_R_386,\n\n EM_SPARC => FLAGS_R_SPARC,\n\n EM_MIPS => FLAGS_R_MIPS,\n\n EM_PARISC => FLAGS_R_PARISC,\n\n EM_ALPHA => FLAGS_R_ALPHA,\n\n EM_PPC => FLAGS_R_PPC,\n\n EM_PPC64 => FLAGS_R_PPC64,\n\n EM_AARCH64 => FLAGS_R_AARCH64,\n\n EM_ARM => FLAGS_R_ARM,\n\n EM_CSKY => FLAGS_R_CKCORE,\n\n EM_IA_64 => FLAGS_R_IA64,\n\n EM_SH => FLAGS_R_SH,\n\n EM_S390 => FLAGS_R_390,\n\n EM_CRIS => FLAGS_R_CRIS,\n\n EM_X86_64 => FLAGS_R_X86_64,\n\n EM_MN10300 => FLAGS_R_MN10300,\n\n EM_M32R => FLAGS_R_M32R,\n", "file_path": "crates/examples/src/readobj/elf.rs", "rank": 84, "score": 81204.50334053487 }, { "content": "#[derive(Default)]\n\nstruct MachState {\n\n section_index: usize,\n\n}\n\n\n", "file_path": "crates/examples/src/readobj/macho.rs", "rank": 85, "score": 77008.55386959088 }, { "content": "#[test]\n\nfn testfiles() {\n\n // Move from crates/examples to the workspace root.\n\n env::set_current_dir(\"../..\").unwrap();\n\n\n\n let mut fail = false;\n\n for dir in glob::glob(\"testfiles/*\").unwrap().filter_map(Result::ok) {\n\n let dir = dir.to_str().unwrap();\n\n for path in glob::glob(&format!(\"{}/*\", dir))\n\n .unwrap()\n\n .filter_map(Result::ok)\n\n {\n\n let path = path.to_str().unwrap();\n\n if glob::glob(&format!(\"crates/examples/{}.*\", path))\n\n .unwrap()\n\n .find_map(Result::ok)\n\n .is_none()\n\n {\n\n continue;\n\n }\n\n\n", "file_path": "crates/examples/tests/testfiles.rs", "rank": 86, "score": 76491.54479641121 }, { "content": "#[test]\n\nfn note() {\n\n let endian = Endianness::Little;\n\n let mut object = write::Object::new(BinaryFormat::Elf, Architecture::X86_64, endian);\n\n\n\n // Add note section with align = 4.\n\n let mut buffer = Vec::new();\n\n\n\n buffer\n\n .write(object::bytes_of(&elf::NoteHeader32 {\n\n n_namesz: U32::new(endian, 6),\n\n n_descsz: U32::new(endian, 11),\n\n n_type: U32::new(endian, 1),\n\n }))\n\n .unwrap();\n\n buffer.write(b\"name1\\0\\0\\0\").unwrap();\n\n buffer.write(b\"descriptor\\0\\0\").unwrap();\n\n\n\n buffer\n\n .write(object::bytes_of(&elf::NoteHeader32 {\n\n n_namesz: U32::new(endian, 6),\n", "file_path": "tests/round_trip/elf.rs", "rank": 87, "score": 76491.54479641121 }, { "content": "#[test]\n\nfn elf_any() {\n\n for (arch, endian) in [\n\n (Architecture::Aarch64, Endianness::Little),\n\n (Architecture::Arm, Endianness::Little),\n\n (Architecture::Avr, Endianness::Little),\n\n (Architecture::Bpf, Endianness::Little),\n\n (Architecture::I386, Endianness::Little),\n\n (Architecture::X86_64, Endianness::Little),\n\n (Architecture::X86_64_X32, Endianness::Little),\n\n (Architecture::Hexagon, Endianness::Little),\n\n (Architecture::Mips, Endianness::Little),\n\n (Architecture::Mips64, Endianness::Little),\n\n (Architecture::Msp430, Endianness::Little),\n\n (Architecture::PowerPc, Endianness::Big),\n\n (Architecture::PowerPc64, Endianness::Big),\n\n (Architecture::Riscv32, Endianness::Little),\n\n (Architecture::Riscv64, Endianness::Little),\n\n (Architecture::S390x, Endianness::Big),\n\n (Architecture::Sparc64, Endianness::Big),\n\n ]\n", "file_path": "tests/round_trip/mod.rs", "rank": 88, "score": 76491.54479641121 }, { "content": " pub trait Sealed {}\n\n}\n\n\n\n/// The error type used within the read module.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub struct Error(&'static str);\n\n\n\nimpl fmt::Display for Error {\n\n #[inline]\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_str(self.0)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"std\")]\n\nimpl std::error::Error for Error {}\n\n\n\n/// The result type used within the read module.\n\npub type Result<T> = result::Result<T, Error>;\n\n\n", "file_path": "src/read/mod.rs", "rank": 89, "score": 75998.70962185824 }, { "content": "#[cfg(feature = \"compression\")]\n\n#[test]\n\nfn compression_zlib() {\n\n use object::read::ObjectSection;\n\n use object::LittleEndian as LE;\n\n\n\n let data = b\"test data data data\";\n\n let len = data.len() as u64;\n\n\n\n let mut ch = object::elf::CompressionHeader64::<LE>::default();\n\n ch.ch_type.set(LE, object::elf::ELFCOMPRESS_ZLIB);\n\n ch.ch_size.set(LE, len);\n\n ch.ch_addralign.set(LE, 1);\n\n\n\n let mut buf = Vec::new();\n\n buf.write(object::bytes_of(&ch)).unwrap();\n\n let mut encoder = flate2::write::ZlibEncoder::new(buf, flate2::Compression::default());\n\n encoder.write_all(data).unwrap();\n\n let compressed = encoder.finish().unwrap();\n\n\n\n let mut object =\n\n write::Object::new(BinaryFormat::Elf, Architecture::X86_64, Endianness::Little);\n", "file_path": "tests/round_trip/elf.rs", "rank": 90, "score": 75161.49722807689 }, { "content": "#[cfg(feature = \"compression\")]\n\n#[test]\n\nfn compression_gnu() {\n\n use object::read::ObjectSection;\n\n use std::io::Write;\n\n\n\n let data = b\"test data data data\";\n\n let len = data.len() as u32;\n\n\n\n let mut buf = Vec::new();\n\n buf.write_all(b\"ZLIB\\0\\0\\0\\0\").unwrap();\n\n buf.write_all(&len.to_be_bytes()).unwrap();\n\n let mut encoder = flate2::write::ZlibEncoder::new(buf, flate2::Compression::default());\n\n encoder.write_all(data).unwrap();\n\n let compressed = encoder.finish().unwrap();\n\n\n\n let mut object =\n\n write::Object::new(BinaryFormat::Elf, Architecture::X86_64, Endianness::Little);\n\n let section = object.add_section(\n\n Vec::new(),\n\n b\".zdebug_info\".to_vec(),\n\n object::SectionKind::Other,\n", "file_path": "tests/round_trip/elf.rs", "rank": 91, "score": 75161.49722807689 }, { "content": "#[test]\n\nfn coff_x86_64() {\n\n let mut object =\n\n write::Object::new(BinaryFormat::Coff, Architecture::X86_64, Endianness::Little);\n\n\n\n object.add_file_symbol(b\"file.c\".to_vec());\n\n\n\n let text = object.section_id(write::StandardSection::Text);\n\n object.append_section_data(text, &[1; 30], 4);\n\n\n\n let func1_offset = object.append_section_data(text, &[1; 30], 4);\n\n assert_eq!(func1_offset, 32);\n\n let func1_symbol = object.add_symbol(write::Symbol {\n\n name: b\"func1\".to_vec(),\n\n value: func1_offset,\n\n size: 32,\n\n kind: SymbolKind::Text,\n\n scope: SymbolScope::Linkage,\n\n weak: false,\n\n section: write::SymbolSection::Section(text),\n\n flags: SymbolFlags::None,\n", "file_path": "tests/round_trip/mod.rs", "rank": 92, "score": 75161.49722807689 }, { "content": "#[test]\n\nfn elf_x86_64() {\n\n let mut object =\n\n write::Object::new(BinaryFormat::Elf, Architecture::X86_64, Endianness::Little);\n\n\n\n object.add_file_symbol(b\"file.c\".to_vec());\n\n\n\n let text = object.section_id(write::StandardSection::Text);\n\n object.append_section_data(text, &[1; 30], 4);\n\n\n\n let func1_offset = object.append_section_data(text, &[1; 30], 4);\n\n assert_eq!(func1_offset, 32);\n\n let func1_symbol = object.add_symbol(write::Symbol {\n\n name: b\"func1\".to_vec(),\n\n value: func1_offset,\n\n size: 32,\n\n kind: SymbolKind::Text,\n\n scope: SymbolScope::Linkage,\n\n weak: false,\n\n section: write::SymbolSection::Section(text),\n\n flags: SymbolFlags::None,\n", "file_path": "tests/round_trip/mod.rs", "rank": 93, "score": 75161.49722807689 }, { "content": "#[test]\n\nfn macho_x86_64() {\n\n let mut object = write::Object::new(\n\n BinaryFormat::MachO,\n\n Architecture::X86_64,\n\n Endianness::Little,\n\n );\n\n\n\n object.add_file_symbol(b\"file.c\".to_vec());\n\n\n\n let text = object.section_id(write::StandardSection::Text);\n\n object.append_section_data(text, &[1; 30], 4);\n\n\n\n let func1_offset = object.append_section_data(text, &[1; 30], 4);\n\n assert_eq!(func1_offset, 32);\n\n let func1_symbol = object.add_symbol(write::Symbol {\n\n name: b\"func1\".to_vec(),\n\n value: func1_offset,\n\n size: 32,\n\n kind: SymbolKind::Text,\n\n scope: SymbolScope::Linkage,\n", "file_path": "tests/round_trip/mod.rs", "rank": 94, "score": 75161.49722807689 }, { "content": "#[test]\n\nfn symtab_shndx() {\n\n let mut object =\n\n write::Object::new(BinaryFormat::Elf, Architecture::X86_64, Endianness::Little);\n\n\n\n for i in 0..0x10000 {\n\n let name = format!(\"func{}\", i).into_bytes();\n\n let (section, offset) =\n\n object.add_subsection(write::StandardSection::Text, &name, &[0xcc], 1);\n\n object.add_symbol(write::Symbol {\n\n name,\n\n value: offset,\n\n size: 1,\n\n kind: SymbolKind::Text,\n\n scope: SymbolScope::Linkage,\n\n weak: false,\n\n section: write::SymbolSection::Section(section),\n\n flags: SymbolFlags::None,\n\n });\n\n }\n\n let bytes = object.write().unwrap();\n", "file_path": "tests/round_trip/elf.rs", "rank": 95, "score": 75161.49722807689 }, { "content": "#[derive(Clone, Debug)]\n\nstruct WasmSymbolInternal<'data> {\n\n name: &'data str,\n\n address: u64,\n\n size: u64,\n\n kind: SymbolKind,\n\n section: SymbolSection,\n\n scope: SymbolScope,\n\n}\n\n\n\nimpl<'data, 'file> read::private::Sealed for WasmSymbol<'data, 'file> {}\n\n\n\nimpl<'data, 'file> ObjectSymbol<'data> for WasmSymbol<'data, 'file> {\n\n #[inline]\n\n fn index(&self) -> SymbolIndex {\n\n self.index\n\n }\n\n\n\n #[inline]\n\n fn name_bytes(&self) -> read::Result<&'data [u8]> {\n\n Ok(self.symbol.name.as_bytes())\n", "file_path": "src/read/wasm.rs", "rank": 96, "score": 74151.66662663879 }, { "content": "#[test]\n\nfn coff_x86_64_bss() {\n\n let mut object =\n\n write::Object::new(BinaryFormat::Coff, Architecture::X86_64, Endianness::Little);\n\n\n\n let section = object.section_id(write::StandardSection::UninitializedData);\n\n\n\n let symbol = object.add_symbol(write::Symbol {\n\n name: b\"v1\".to_vec(),\n\n value: 0,\n\n size: 0,\n\n kind: SymbolKind::Data,\n\n scope: SymbolScope::Linkage,\n\n weak: false,\n\n section: write::SymbolSection::Undefined,\n\n flags: SymbolFlags::None,\n\n });\n\n object.add_symbol_bss(symbol, section, 18, 4);\n\n\n\n let symbol = object.add_symbol(write::Symbol {\n\n name: b\"v2\".to_vec(),\n", "file_path": "tests/round_trip/bss.rs", "rank": 98, "score": 73905.18458038129 }, { "content": "fn print_sections(\n\n p: &mut Printer<'_>,\n\n data: &[u8],\n\n machine: u16,\n\n symbols: Option<&SymbolTable>,\n\n sections: &SectionTable,\n\n) {\n\n for (index, section) in sections.iter().enumerate() {\n\n p.group(\"ImageSectionHeader\", |p| {\n\n p.field(\"Index\", index + 1);\n\n if let Some(name) =\n\n symbols.and_then(|symbols| section.name(symbols.strings()).print_err(p))\n\n {\n\n p.field_inline_string(\"Name\", name);\n\n } else {\n\n p.field_inline_string(\"Name\", section.raw_name());\n\n }\n\n p.field_hex(\"VirtualSize\", section.virtual_size.get(LE));\n\n p.field_hex(\"VirtualAddress\", section.virtual_address.get(LE));\n\n p.field_hex(\"SizeOfRawData\", section.size_of_raw_data.get(LE));\n", "file_path": "crates/examples/src/readobj/pe.rs", "rank": 99, "score": 73905.18458038129 } ]
Rust
src/move_table.rs
lePerdu/twisted
6f3330fbb594beb9f06d8bfeb307cb60ca8035b9
use std::marker::PhantomData; use crate::coord::{CompositeCoord, Coord}; use crate::puzzle::{PuzzleMove, PuzzlePerm}; use crate::symmetry::{SymCoord, Symmetry}; use crate::util::{EnumCount, IntoEnumIterator}; pub trait MoveTable { type Puzzle: PuzzlePerm; type Coord: Coord<Self::Puzzle>; type Move: PuzzleMove<Puzzle = Self::Puzzle>; fn get_move(&self, coord: Self::Coord, mov: Self::Move) -> Self::Coord; } pub struct BasicMoveTable<C, M> { table: Box<[C]>, _moves: PhantomData<M>, } impl<C, M> BasicMoveTable<C, M> where C: Coord<M::Puzzle>, M: PuzzleMove, { pub fn create() -> Self { let mut table = Vec::with_capacity(C::COUNT * M::COUNT); for coord in C::iter() { let perm = coord.into_perm(); for mov in M::iter() { let new_perm = perm.sequence(mov.permutation()); table.push(C::from_perm(&new_perm)); } } Self { table: table.into_boxed_slice(), _moves: PhantomData::default(), } } } impl<C, M> MoveTable for BasicMoveTable<C, M> where C: Coord<M::Puzzle>, M: PuzzleMove, { type Puzzle = M::Puzzle; type Coord = C; type Move = M; fn get_move(&self, coord: C, mov: M) -> C { self.table[M::COUNT * coord.index() + mov.index()] } } pub struct CompositeMoveTable<'a, C, AT, BT> { table_a: &'a AT, table_b: &'a BT, _coord: PhantomData<C>, } impl<'a, C, AT, BT> CompositeMoveTable<'a, C, AT, BT> { pub fn new(table_a: &'a AT, table_b: &'a BT) -> Self { CompositeMoveTable { table_a, table_b, _coord: PhantomData::default(), } } } impl<'a, P, A, B, C, M, AT, BT> CompositeMoveTable<'a, C, AT, BT> where P: PuzzlePerm, M: PuzzleMove<Puzzle = P>, AT: MoveTable<Puzzle = P, Coord = A, Move = M>, BT: MoveTable<Puzzle = P, Coord = B, Move = M>, A: Coord<P>, B: Coord<P>, C: Coord<P> + CompositeCoord<P, CoordA = A, CoordB = B>, { pub fn to_basic(&self) -> BasicMoveTable<C, M> { let mut table = Vec::with_capacity(C::COUNT * M::COUNT); for coord in C::iter() { for mov in M::iter() { table.push(self.get_move(coord, mov)); } } BasicMoveTable { table: table.into_boxed_slice(), _moves: PhantomData::default(), } } } impl<'a, P, A, B, C, M, AT, BT> MoveTable for CompositeMoveTable<'a, C, AT, BT> where P: PuzzlePerm, M: PuzzleMove<Puzzle = P>, AT: MoveTable<Puzzle = P, Coord = A, Move = M>, BT: MoveTable<Puzzle = P, Coord = B, Move = M>, A: Coord<P>, B: Coord<P>, C: Coord<P> + CompositeCoord<P, CoordA = A, CoordB = B>, { type Puzzle = P; type Coord = C; type Move = M; fn get_move(&self, coord: C, mov: M) -> C { let (a, b) = coord.into_coords(); C::from_coords(self.table_a.get_move(a, mov), self.table_b.get_move(b, mov)) } } pub struct SymMoveTable<C, M> { coord_table: Box<[C]>, move_table: Box<[M]>, } impl<C, M> SymMoveTable<C, M> where C: SymCoord<M::Puzzle> + Coord<M::Puzzle>, M: PuzzleMove, { pub fn create() -> Self { let representants = C::representants(); let mut coord_table = Vec::with_capacity(representants.len() * M::COUNT); for representant in representants.iter() { let perm = representant.into_perm(); for mov in M::iter() { let new_perm = perm.sequence(mov.permutation()); coord_table.push(C::from_perm(&new_perm)); } } let mut move_table = Vec::with_capacity(M::COUNT * C::Symmetry::COUNT); for mov in M::iter() { let perm = mov.permutation(); for sym in C::Symmetry::iter() { let transformed = perm.sequence(sym.permutation()); if let Some(transformed_move) = M::iter().find(|m| *m.permutation() == transformed) { move_table.push(transformed_move); } else { panic!("Transformed move coult not be found."); } } } Self { coord_table: coord_table.into_boxed_slice(), move_table: move_table.into_boxed_slice(), } } } impl<C, M> MoveTable for SymMoveTable<C, M> where C: SymCoord<M::Puzzle> + Coord<M::Puzzle>, M: PuzzleMove, { type Puzzle = M::Puzzle; type Coord = C; type Move = M; fn get_move(&self, coord: C, mov: M) -> C { } } /* TODO Move into cube-specific mod #[cfg(test)] pub(crate) mod test { use super::*; use std::fmt::Debug; use crate::coord::{CornerOrient7Coord, CornerPos7Coord}; use crate::cube::moves::UrfTurn; fn coordinates_correct_after_move<C: Coord + Debug, M: PuzzleMove>( table: &impl MoveTable<C, M>, ) { let mut perm = PuzzlePerm::default(); // Run through a series of moves and make sure the coordinates match up for turn in M::iter() { let orig_coord = C::from(&perm); perm += turn.permutation(); let table_coord = table.get_move(orig_coord, turn); let perm_coord = C::from(&perm); assert_eq!(table_coord, perm_coord); } } // Do the tests for each coordinate macro_rules! make_tests { ($name:ident, $coord:ty) => { pub mod $name { use super::*; lazy_static! { pub static ref TABLE: BasicMoveTable<$coord, UrfTurn> = BasicMoveTable::create(); } #[test] fn coordinates_correct_after_move() { super::coordinates_correct_after_move(&*TABLE); } } }; } make_tests!(corner_orient, CornerOrient7Coord); make_tests!(corner_pos, CornerPos7Coord); lazy_static! { pub static ref CORNER_MOVE_TABLE: CompositeMoveTable< 'static, CornerOrient7Coord, CornerPos7Coord, UrfTurn, BasicMoveTable<CornerOrient7Coord, UrfTurn>, BasicMoveTable<CornerPos7Coord, UrfTurn>, > = CompositeMoveTable::new(&*corner_orient::TABLE, &*corner_pos::TABLE); } } */
use std::marker::PhantomData; use crate::coord::{CompositeCoord, Coord}; use crate::puzzle::{PuzzleMove, PuzzlePerm}; use crate::symmetry::{SymCoord, Symmetry}; use crate::util::{EnumCount, IntoEnumIterator}; pub trait MoveTable { type Puzzle: PuzzlePerm; type Coord: Coord<Self::Puzzle>; type Move: PuzzleMove<Puzzle = Self::Puzzle>; fn get_move(&self, coord: Self::Coord, mov: Self::Move) -> Self::Coord; } pub struct BasicMoveTable<C, M> { table: Box<[C]>, _moves: PhantomData<M>, } impl<C, M> BasicMoveTable<C, M> where C: Coord<M::Puzzle>, M: PuzzleMove, { pub fn create() -> Self { let mut table = Vec::with_capacity(C::COUNT * M::COUNT); for coord in C::iter() { let perm = coord.into_perm(); for mov in M::iter() { let new_perm = perm.sequence(mov.permutation()); table.push(C::from_perm(&new_perm)); } } Self { table: table.into_boxed_slice(), _moves: PhantomData::default(), } } } impl<C, M> MoveTable for BasicMoveTable<C, M> where C: Coord<M::Puzzle>, M: PuzzleMove, { type Puzzle = M::Puzzle; type Coord = C; type Move = M; fn get_move(&self, coord: C, mov: M) -> C { self.table[M::COUNT * coord.index() + mov.index()] } } pub struct CompositeMoveTable<'a, C, AT, BT> { table_a: &'a AT, table_b: &'a BT, _coord: PhantomData<C>, } impl<'a, C, AT, BT> CompositeMoveTable<'a, C, AT, BT> { pub fn new(table_a: &'a AT, table_b: &'a BT) -> Self { CompositeMoveTable { table_a, table_b, _coord: PhantomData::default(), } } } impl<'a, P, A, B, C, M, AT, BT> CompositeMoveTable<'a, C, AT, BT> where P: PuzzlePerm, M: PuzzleMove<Puzzle = P>, AT: MoveTable<Puzzle = P, Coord = A, Move = M>, BT: MoveTable<Puzzle = P, Coord = B, Move = M>, A: Coord<P>, B: Coord<P>, C: Coord<P> + CompositeCoord<P, CoordA = A, CoordB = B>, { pub fn to_basic(&self) -> BasicMoveTable<C, M> { let mut table = Vec::with_capacity(C::COUNT * M::COUNT); for coord in C::iter() { for mov in M::iter() { table.push(self.get_move(coord, mov)); } } BasicMoveTable { table: table.into_boxed_slice(), _moves: PhantomData::default(), } } } impl<'a, P, A, B, C, M, AT, BT> MoveTable for CompositeMoveTable<'a, C, AT, BT> where P: PuzzlePerm, M: PuzzleMove<Puzzle = P>, AT: MoveTable<Puzzle = P, Coord = A, Move = M>, BT: MoveTable<Puzzle = P, Coord = B, Move = M>, A: Coord<P>, B: Coord<P>, C: Coord<P> + CompositeCoord<P, CoordA = A, CoordB = B>, { type Puzzle = P; type Coord = C; type Move = M; fn get_move(&self, coord: C, mov: M) -> C { let (a, b) = coord.into_coords(); C::from_coords(self.table_a.get_move(a, mov), self.table_b.get_move(b, mov)) } } pub struct SymMoveTable<C, M> { coord_table: Box<[C]>, move_table: Box<[M]>, } impl<C, M> SymMoveTable<C, M> where C: SymCoord<M::Puzzle> + Coord<M::Puzzle>, M: PuzzleMove, { pub fn create() -> Self { let representants = C::representants(); let mut coord_table = Vec::with_capacity(representants.len() * M::COUNT); for representant in representants.iter() { let perm = representant.into_perm(); for mov in M::iter() { let new_perm = perm.sequence(mov.permutation()); coord_table.push(C::from_perm(&new_perm)); } } let mut move_table = Vec::with_capacity(M::COUNT * C::Symmetry::COUNT); for mov in M::iter() { let perm = mov.permutation(); for sym in C::Symmetry::iter() {
ble_coord = table.get_move(orig_coord, turn); let perm_coord = C::from(&perm); assert_eq!(table_coord, perm_coord); } } // Do the tests for each coordinate macro_rules! make_tests { ($name:ident, $coord:ty) => { pub mod $name { use super::*; lazy_static! { pub static ref TABLE: BasicMoveTable<$coord, UrfTurn> = BasicMoveTable::create(); } #[test] fn coordinates_correct_after_move() { super::coordinates_correct_after_move(&*TABLE); } } }; } make_tests!(corner_orient, CornerOrient7Coord); make_tests!(corner_pos, CornerPos7Coord); lazy_static! { pub static ref CORNER_MOVE_TABLE: CompositeMoveTable< 'static, CornerOrient7Coord, CornerPos7Coord, UrfTurn, BasicMoveTable<CornerOrient7Coord, UrfTurn>, BasicMoveTable<CornerPos7Coord, UrfTurn>, > = CompositeMoveTable::new(&*corner_orient::TABLE, &*corner_pos::TABLE); } } */
let transformed = perm.sequence(sym.permutation()); if let Some(transformed_move) = M::iter().find(|m| *m.permutation() == transformed) { move_table.push(transformed_move); } else { panic!("Transformed move coult not be found."); } } } Self { coord_table: coord_table.into_boxed_slice(), move_table: move_table.into_boxed_slice(), } } } impl<C, M> MoveTable for SymMoveTable<C, M> where C: SymCoord<M::Puzzle> + Coord<M::Puzzle>, M: PuzzleMove, { type Puzzle = M::Puzzle; type Coord = C; type Move = M; fn get_move(&self, coord: C, mov: M) -> C { } } /* TODO Move into cube-specific mod #[cfg(test)] pub(crate) mod test { use super::*; use std::fmt::Debug; use crate::coord::{CornerOrient7Coord, CornerPos7Coord}; use crate::cube::moves::UrfTurn; fn coordinates_correct_after_move<C: Coord + Debug, M: PuzzleMove>( table: &impl MoveTable<C, M>, ) { let mut perm = PuzzlePerm::default(); // Run through a series of moves and make sure the coordinates match up for turn in M::iter() { let orig_coord = C::from(&perm); perm += turn.permutation(); let ta
random
[ { "content": "/// Symmetry-reduced coordinate, constructed from a regular coordinate and a symmetry.\n\npub trait SymCoord<P: PuzzlePerm>: EnumIndex {\n\n type BaseCoord: Coord<P>;\n\n type Symmetry: Symmetry<Puzzle = P>;\n\n type EquivClass: EnumIndex;\n\n\n\n fn from_sym_and_class(sym: Self::Symmetry, equiv_class: Self::EquivClass) -> Self;\n\n\n\n fn symmetry(&self) -> Self::Symmetry;\n\n\n\n fn equiv_class(&self) -> Self::EquivClass;\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! make_symmetry_coord {\n\n (\n\n $( #[ $attrs:meta ] )*\n\n $v:vis struct $newtype:ident < $sym:ty, $coord:ty $(,)? >\n\n ( $equivclass:ident ( $inner:ty ) ) : $count:expr ;\n\n ) => {\n\n make_newtype_enum_index! {\n", "file_path": "src/symmetry.rs", "rank": 0, "score": 281239.7508641175 }, { "content": "/// A subgorup of puzzle permutations which separate a puzzle permutation into a set of equivalence\n\n/// classes.\n\n///\n\n/// This will usually correspond to rotations or reflections of a puzzle.\n\n///\n\n/// Note: Symmetry groups are restricted to having no more than 64 elements (including the identity)\n\n/// because they are stored as a bitmask in some scenarios.\n\npub trait Symmetry: PuzzlePerm + PuzzleMove {}\n\n\n", "file_path": "src/symmetry.rs", "rank": 1, "score": 244312.02564481436 }, { "content": "pub trait CompositeCoord<P: PuzzlePerm>: Copy {\n\n type CoordA: Coord<P>;\n\n type CoordB: Coord<P>;\n\n\n\n fn from_coords(a: Self::CoordA, b: Self::CoordB) -> Self;\n\n\n\n fn into_coords(self) -> (Self::CoordA, Self::CoordB);\n\n}\n\n\n\n// TODO Make this a derive macro instead? Right now this can't impl Coord for sub-coordinates\n\n// outside of this crate without making a new type wrapper which implements a bunch of other traits.\n\n\n\n#[macro_export]\n\nmacro_rules! make_composite_coord {\n\n (\n\n $( #[ $attrs:meta ] )*\n\n $v:vis struct $newtype:ident < $puzzle:ty > (\n\n $inner:ty\n\n ) {\n\n $a:ident : $a_type:ty ,\n", "file_path": "src/coord/mod.rs", "rank": 2, "score": 239937.9741855972 }, { "content": "pub fn apply_coord<C, T, P>(coord: C, items_in_order: impl Iterator<Item = T>, items: &mut [P])\n\nwhere\n\n C: PrimInt,\n\n T: Copy + Eq,\n\n P: Copy,\n\n{\n\n let mut coord = coord;\n\n\n\n for (index, _) in items_in_order.enumerate().skip(1) {\n\n let base = C::from(index).unwrap() + C::one();\n\n let rotations = (coord % base).to_usize().unwrap();\n\n coord = coord / base;\n\n\n\n // Rotate right the extracted number of times\n\n for _ in 0..rotations {\n\n rotate_right(&mut items[..=index]);\n\n }\n\n }\n\n}\n", "file_path": "src/coord/permutation.rs", "rank": 3, "score": 229611.75765895552 }, { "content": "/// Checks whether a permutation is in an equivalence class.\n\nfn sym_equivalent_perm<S: Symmetry, C: Coord<S::Puzzle>>(a: C, b_perm: &S::Puzzle) -> Option<S> {\n\n S::iter().find(|sym| {\n\n let conj_perm = sym\n\n .permutation()\n\n .sequence(b_perm)\n\n .sequence(sym.invert().permutation());\n\n C::from_perm(&conj_perm) == a\n\n })\n\n}\n\n\n", "file_path": "src/symmetry.rs", "rank": 4, "score": 222454.07566181477 }, { "content": "/// Checks whether 2 coordinate values are equivalent under a symmetry.\n\nfn sym_equivalent<S: Symmetry, C: Coord<S::Puzzle>>(a: C, b: C) -> Option<S> {\n\n sym_equivalent_perm(a, &b.into_perm())\n\n}\n\n\n\nimpl<P: PuzzlePerm, S: SymCoord<P>> RepresentantTable<P, S> {\n\n pub fn create() -> Self {\n\n assert!(\n\n S::COUNT <= 64,\n\n \"Cannot calculate representant table for symmetries with more than 64 elements\"\n\n );\n\n\n\n // Get a list of all coordinate values\n\n // TODO Use an invalid value instead of Option to save space?\n\n let mut coord_values = S::BaseCoord::iter()\n\n .map(|c| Some(c))\n\n .collect::<Vec<_>>()\n\n .into_boxed_slice();\n\n\n\n // Will be slightly larger, but this is a good starting point\n\n let mut table = Vec::with_capacity(S::BaseCoord::COUNT / S::COUNT);\n", "file_path": "src/symmetry.rs", "rank": 5, "score": 220286.61163588884 }, { "content": "/// A puzzle coordinate\n\n///\n\n/// This trait requires bounds for valid coordinate values and convertions\n\n/// to/from integers/permutations.\n\npub trait Coord<P>: EnumIndex\n\nwhere\n\n P: PuzzlePerm,\n\n{\n\n fn from_perm(perm: &P) -> Self;\n\n\n\n fn into_perm(self) -> P;\n\n}\n\n\n", "file_path": "src/coord/mod.rs", "rank": 6, "score": 202428.16707644955 }, { "content": "/// A \"twisty puzzle\".\n\n///\n\n/// This trait essentially defines a Group, where \"sequence\" is the group operation.\n\n///\n\n/// TODO Use operator overloads?\n\npub trait PuzzlePerm: 'static + Clone + Eq {\n\n /// Identity (no-op) permutation on the puzzle.\n\n fn identity() -> &'static Self;\n\n\n\n /// Combine 2 permutations by sequencing them.\n\n fn sequence(&self, other: &Self) -> Self;\n\n\n\n /// Invert a permutation.\n\n fn invert(&self) -> Self;\n\n\n\n /// Tests whether a permutation is the identity.\n\n fn is_identity(&self) -> bool {\n\n self == Self::identity()\n\n }\n\n\n\n /// Sequence a permutation multiple times with itself.\n\n ///\n\n /// Can be given a custom implementation if there is an efficient, puzzle-specific way to\n\n /// sequence a permutation multiple times.\n\n fn ntimes<T: PrimInt>(&self, n: T) -> Self {\n", "file_path": "src/puzzle.rs", "rank": 8, "score": 176393.45821239665 }, { "content": "/// Calculates a coordinate from a set of independent values, all in the range from `[0, base)`.\n\n///\n\n/// It is assumed that the whole puzzle has a fixed parity, so the last item is excluded from the\n\n/// coordinate calculation.\n\npub fn calculate_coord<C, T, I>(items: I) -> C\n\nwhere\n\n C: PrimInt,\n\n T: EnumIndex,\n\n I: Iterator<Item = T> + DoubleEndedIterator,\n\n{\n\n let mut c = 0;\n\n for item in items.rev().skip(1) {\n\n c = c * T::COUNT + item.index();\n\n }\n\n\n\n C::from(c).unwrap()\n\n}\n\n\n", "file_path": "src/coord/parity.rs", "rank": 9, "score": 169495.23153499077 }, { "content": "/// Representation of a set of generators to use in a `MoveTable`\n\n///\n\n/// TODO Make the type parameter an associated type instead? It would is theoretically possible that\n\n/// multiple puzzles have the same move sets (e.g. 3x3 and 2x2), but it might not be that\n\n/// beneficial to unify the move types.\n\npub trait PuzzleMove: EnumIndex {\n\n type Puzzle: PuzzlePerm;\n\n\n\n /// Permutation associated with this move\n\n fn permutation(&self) -> &Self::Puzzle;\n\n\n\n /// Returns whether two moves can combine with each other to produde another move.\n\n ///\n\n /// This is used to trim down search trees.\n\n fn combines_with(&self, other: &Self) -> bool;\n\n}\n\n\n\n/// Implement `PuzzlePerm` for a type by making lazily-evaluated tables for combining and inverting\n\n/// permutations.\n\n///\n\n/// Takes a closure which returns the identity and a function which returns some other type which\n\n/// implements `PuzzlePerm` associawith the implementing type.\n\n///\n\n/// Note: this should only be used when there are not very many values for the type, as the sequence\n\n/// tables containts `COUNT * COUNT` elements.\n", "file_path": "src/puzzle.rs", "rank": 10, "score": 165265.43267373194 }, { "content": "// Helper function since lifetype annotations don't work in closures\n\nfn sym_perm(sym: &SymmetryE) -> &Cube3Perm {\n\n sym.permutation()\n\n}\n\n\n\nimpl_puzzle_perm_with_tables!(\n\n SymmetryE,\n\n || {\n\n const IDENTITY: SymmetryE = Self(0);\n\n &IDENTITY\n\n },\n\n sym_perm\n\n);\n\n\n\nmod primitives {\n\n use super::*;\n\n use crate::cube::corner::{Corner, CornerOrient, CornerPerm, CornerPos::*};\n\n use crate::cube::edge::{Edge, EdgeOrient, EdgePerm, EdgePos::*};\n\n\n\n /// 90 degree rotation about the Y axis (U face)\n\n pub const Y_ROT: Cube3Perm = Cube3Perm::new(\n", "file_path": "src/cube/cube3/symmetry.rs", "rank": 11, "score": 164043.78283191187 }, { "content": "/// Find a symmetry transformation which solves the DBL corner\n\npub fn fix_dbl_corner(perm: &CornerPerm) -> (Symmetry, CornerPerm) {\n\n for sym in Symmetry::all() {\n\n let transformed = sym.permutation().sequence(perm);\n\n\n\n const SOLVED_DBL: Corner = Corner::new(CornerPos::DBL, CornerOrient::Oriented);\n\n if transformed[CornerPos::DBL] == SOLVED_DBL {\n\n return (sym, transformed);\n\n }\n\n }\n\n\n\n // TODO Return Option instead?\n\n // There are no valid cases in which the DBL corner cannot be solved by rotations\n\n panic!(\"Unable to find symmetry transformation\");\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n use super::super::primitives;\n", "file_path": "src/cube/cube2/symmetry.rs", "rank": 12, "score": 160804.7339032502 }, { "content": "/// Extracts values from a coordinate built with `coord()`.\n\n///\n\n/// Values are returned in order, with the last one set so that the parity of all of them is equal\n\n/// to `parity`.\n\npub fn extract_from_coord<C, T, Idx, I, M>(\n\n coord: C,\n\n parity: C,\n\n all_items: I,\n\n mapper: M,\n\n) -> impl Iterator<Item = (Idx, T)>\n\nwhere\n\n C: PrimInt,\n\n T: EnumIndex,\n\n Idx: EnumIndex,\n\n I: Iterator<Item = Idx> + ExactSizeIterator,\n\n M: Fn(C) -> T,\n\n{\n\n let base = C::from(T::COUNT).unwrap();\n\n let mut coord = coord;\n\n let mut sum = C::zero();\n\n let mut all_items = all_items;\n\n\n\n std::iter::from_fn(move || {\n\n all_items.next().map(|item| {\n", "file_path": "src/coord/parity.rs", "rank": 13, "score": 154627.5682849892 }, { "content": "pub fn calculate_coord<C, T, I1, I2>(items_in_order: I1, items: I2) -> C\n\nwhere\n\n C: PrimInt,\n\n T: Copy + Eq,\n\n I1: Iterator<Item = T> + DoubleEndedIterator + ExactSizeIterator,\n\n I2: Iterator<Item = T>,\n\n{\n\n let mut c = C::zero();\n\n let mut items: Vec<T> = items.collect();\n\n\n\n // Go in reverse order because it's easier to compute with factorial\n\n // base as (l_1 + (l_2 + (... ) * 2) * 1\n\n for (index, item) in items_in_order.enumerate().skip(1).rev() {\n\n // Rotate left until the correct corner\n\n let mut rot_count = C::zero();\n\n while items[index] != item {\n\n rotate_left(&mut items[..=index]);\n\n rot_count = rot_count + C::one();\n\n\n\n assert!(\n", "file_path": "src/coord/permutation.rs", "rank": 14, "score": 145931.85991947158 }, { "content": "#[derive(Clone, Debug)]\n\nstruct RepresentantEntry<C> {\n\n coord: C,\n\n // TODO Use a wider / generic type that is guarunteed to fit all symmetries.\n\n symmetry_bitmask: u64,\n\n}\n\n\n\n/// Table of equivalent class representants under a symmetry.\n\n///\n\n/// This is used for creating and using symmetry-reduced coordinates.\n\n#[derive(Clone, Debug)]\n\npub struct RepresentantTable<P: PuzzlePerm, S: SymCoord<P>> {\n\n table: Box<[RepresentantEntry<S::BaseCoord>]>,\n\n _sym: PhantomData<S>,\n\n}\n\n\n", "file_path": "src/symmetry.rs", "rank": 15, "score": 144650.75802134495 }, { "content": "pub trait PruneTable {\n\n /// Puzzle this pruning table is for.\n\n type Puzzle: PuzzlePerm;\n\n\n\n /// Coordinate or composite coordinate this pruning table is indexed by.\n\n type Coord: Coord<Self::Puzzle>;\n\n\n\n /// Move set used to count minimum bounds.\n\n type Move: PuzzleMove<Puzzle = Self::Puzzle>;\n\n\n\n // TODO Make u8? Will it every be larger than that?\n\n /// Returns a lower bound for the number of moves it will take to get to a particular state.\n\n fn get_min_moves(&self, coord: Self::Coord) -> u32;\n\n}\n\n\n\n/// Pruning table which always returns 0\n\n#[derive(Clone, Debug)]\n\npub struct ZeroPruneTable<C, M>(PhantomData<(C, M)>);\n\n\n\nimpl<C, M> Default for ZeroPruneTable<C, M> {\n", "file_path": "src/prune_table.rs", "rank": 16, "score": 143202.45676689828 }, { "content": "fn do_phase_solve<C, M, P>(\n\n move_table: &M,\n\n prune_table: &P,\n\n perm: &Cube3Perm,\n\n target: C,\n\n) -> Option<Notation>\n\nwhere\n\n C: Coord<Cube3Perm>,\n\n M: MoveTable<Puzzle = Cube3Perm, Coord = C>,\n\n P: PruneTable<Puzzle = Cube3Perm, Coord = C, Move = M::Move>,\n\n M::Move: Into<NotationMove<Cube3Notation>>,\n\n{\n\n SolutionIter::new(move_table, prune_table, target, perm)\n\n .next()\n\n .map(|sol| {\n\n Notation::from(\n\n sol.iter()\n\n .map(|m| (*m).into())\n\n .collect::<Vec<NotationMove<Cube3Notation>>>(),\n\n )\n\n })\n\n}\n\n\n", "file_path": "cube3x3x3/src/main.rs", "rank": 17, "score": 141317.06090339753 }, { "content": "pub fn b() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref B: Cube3Perm = Cube3Perm {\n\n corners: corner_prim::b().clone(),\n\n edges: edge_prim::B.clone(),\n\n };\n\n }\n\n\n\n &B\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 18, "score": 135302.53736094688 }, { "content": "pub fn b() -> &'static CornerPerm {\n\n &B\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 19, "score": 135302.53736094688 }, { "content": "type FinalMoveTable = CompositeMoveTable<\n\n 'static,\n\n Corner7Coord,\n\n BasicMoveTable<Cube2Perm, CornerOrient7Coord, UrfTurn>,\n\n BasicMoveTable<Cube2Perm, CornerPos7Coord, UrfTurn>,\n\n>;\n\n\n\nlazy_static! {\n\n static ref ORIENT_TABLE: BasicMoveTable<Cube2Perm, CornerOrient7Coord, UrfTurn> = BasicMoveTable::create();\n\n static ref POS_TABLE: BasicMoveTable<Cube2Perm, CornerPos7Coord, UrfTurn> = BasicMoveTable::create();\n\n static ref MOVE_TABLE: FinalMoveTable = CompositeMoveTable::new(&*ORIENT_TABLE, &*POS_TABLE);\n\n static ref PRUNE_TABLE: FullPruneTable<Corner7Coord, UrfTurn> =\n\n FullPruneTable::create(&*MOVE_TABLE, Corner7Coord::default());\n\n}\n\n\n", "file_path": "cube2x2x2/src/main.rs", "rank": 20, "score": 124246.75934023043 }, { "content": "pub fn rotate_left<T: Copy>(slice: &mut [T]) {\n\n let len = slice.len();\n\n let first = slice[0];\n\n for i in 1..len {\n\n slice[i - 1] = slice[i];\n\n }\n\n\n\n slice[len - 1] = first;\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 21, "score": 120863.15506646282 }, { "content": "pub fn rotate_right<T: Copy>(slice: &mut [T]) {\n\n let len = slice.len();\n\n let last = slice[len - 1];\n\n for i in (1..len).rev() {\n\n slice[i] = slice[i - 1];\n\n }\n\n\n\n slice[0] = last;\n\n}\n\n\n\n// TODO Make this a derive macro instead?\n\n#[macro_export]\n\nmacro_rules! make_newtype_enum_index {\n\n (\n\n $( #[ $attrs:meta ] )*\n\n $v:vis struct $newtype:ident ( $inner:ty ) : $count:expr ;\n\n ) => {\n\n #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, FromPrimitive)]\n\n $( #[$attrs] )*\n\n $v struct $newtype($inner);\n", "file_path": "src/util.rs", "rank": 22, "score": 120863.15506646282 }, { "content": "pub fn f() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref F: Cube3Perm = Cube3Perm {\n\n corners: corner_prim::f().clone(),\n\n edges: edge_prim::F.clone(),\n\n };\n\n }\n\n\n\n &F\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 23, "score": 112621.54417603304 }, { "content": "pub fn r() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref R: Cube3Perm = Cube3Perm {\n\n corners: corner_prim::r().clone(),\n\n edges: edge_prim::R.clone(),\n\n };\n\n }\n\n\n\n &R\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 24, "score": 112621.54417603304 }, { "content": "pub fn d() -> &'static CornerPerm {\n\n &D\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 25, "score": 112621.54417603304 }, { "content": "pub fn u() -> &'static CornerPerm {\n\n &U\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 26, "score": 112621.54417603304 }, { "content": "pub fn l() -> &'static CornerPerm {\n\n &L\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 27, "score": 112621.54417603304 }, { "content": "pub fn l() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref L: Cube3Perm = Cube3Perm {\n\n corners: corner_prim::l().clone(),\n\n edges: edge_prim::L.clone(),\n\n };\n\n }\n\n\n\n &L\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 28, "score": 112621.54417603304 }, { "content": "pub fn f() -> &'static CornerPerm {\n\n &F\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 29, "score": 112621.54417603304 }, { "content": "pub fn u() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref U: Cube3Perm = Cube3Perm {\n\n corners: corner_prim::u().clone(),\n\n edges: edge_prim::U.clone(),\n\n };\n\n }\n\n\n\n &U\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 30, "score": 112621.54417603304 }, { "content": "pub fn d() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref D: Cube3Perm = Cube3Perm {\n\n corners: corner_prim::d().clone(),\n\n edges: edge_prim::D.clone(),\n\n };\n\n }\n\n\n\n &D\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 31, "score": 112621.54417603304 }, { "content": "pub fn r() -> &'static CornerPerm {\n\n &R\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 32, "score": 112621.54417603304 }, { "content": "pub fn b2() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref B2: CornerPerm = b().ntimes(2);\n\n }\n\n\n\n &B2\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn default_is_own_inverse() {\n\n let def = CornerPerm::identity().invert();\n\n assert_eq!(&def, CornerPerm::identity());\n\n }\n\n\n\n /// Checks that the order of a permutation is exactly as specified\n\n fn has_order(perm: &CornerPerm, order: u32) {\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 33, "score": 109713.83970527604 }, { "content": "pub fn b_prime() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref B_PRIME: CornerPerm = b().invert();\n\n }\n\n\n\n &B_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 34, "score": 109713.83970527604 }, { "content": "pub fn l_prime() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref L_PRIME: CornerPerm = l().invert();\n\n }\n\n\n\n &L_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 35, "score": 109713.83970527604 }, { "content": "pub fn d_prime() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref D_PRIME: CornerPerm = d().invert();\n\n }\n\n\n\n &D_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 36, "score": 109713.83970527604 }, { "content": "pub fn r2() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref R2: Cube3Perm = r().ntimes(2);\n\n }\n\n\n\n &R2\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 37, "score": 109713.83970527604 }, { "content": "pub fn r_prime() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref R_PRIME: CornerPerm = r().invert();\n\n }\n\n\n\n &R_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 38, "score": 109713.83970527604 }, { "content": "pub fn b2() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref B2: Cube3Perm = b().ntimes(2);\n\n }\n\n\n\n &B2\n\n}\n\n\n\n// TODO These tests are identical to the 2x2x2 ones\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn default_is_own_inverse() {\n\n let def = Cube3Perm::identity().invert();\n\n assert_eq!(&def, Cube3Perm::identity());\n\n }\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 39, "score": 109713.83970527604 }, { "content": "pub fn l2() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref L2: CornerPerm = l().ntimes(2);\n\n }\n\n\n\n &L2\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 40, "score": 109713.83970527604 }, { "content": "pub fn r_prime() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref R_PRIME: Cube3Perm = r().invert();\n\n }\n\n\n\n &R_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 41, "score": 109713.83970527604 }, { "content": "pub fn u_prime() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref U_PRIME: CornerPerm = u().invert();\n\n }\n\n\n\n &U_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 42, "score": 109713.83970527604 }, { "content": "pub fn b_prime() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref B_PRIME: Cube3Perm = b().invert();\n\n }\n\n\n\n &B_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 43, "score": 109713.83970527604 }, { "content": "pub fn l_prime() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref L_PRIME: Cube3Perm = l().invert();\n\n }\n\n\n\n &L_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 44, "score": 109713.83970527604 }, { "content": "pub fn r2() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref R2: CornerPerm = r().ntimes(2);\n\n }\n\n\n\n &R2\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 45, "score": 109713.83970527604 }, { "content": "pub fn f_prime() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref F_PRIME: Cube3Perm = f().invert();\n\n }\n\n\n\n &F_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 46, "score": 109713.83970527604 }, { "content": "pub fn l2() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref L2: Cube3Perm = l().ntimes(2);\n\n }\n\n\n\n &L2\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 47, "score": 109713.83970527604 }, { "content": "pub fn d2() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref D2: Cube3Perm = d().ntimes(2);\n\n }\n\n\n\n &D2\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 48, "score": 109713.83970527604 }, { "content": "pub fn d_prime() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref D_PRIME: Cube3Perm = d().invert();\n\n }\n\n\n\n &D_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 49, "score": 109713.83970527604 }, { "content": "pub fn f_prime() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref F_PRIME: CornerPerm = f().invert();\n\n }\n\n\n\n &F_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 50, "score": 109713.83970527604 }, { "content": "pub fn d2() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref D2: CornerPerm = d().ntimes(2);\n\n }\n\n\n\n &D2\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 51, "score": 109713.83970527604 }, { "content": "pub fn u2() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref U2: CornerPerm = u().ntimes(2);\n\n }\n\n\n\n &U2\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 52, "score": 109713.83970527604 }, { "content": "pub fn f2() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref F2: CornerPerm = f().ntimes(2);\n\n }\n\n\n\n &F2\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 53, "score": 109713.83970527604 }, { "content": "pub fn f2() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref F2: Cube3Perm = f().ntimes(2);\n\n }\n\n\n\n &F2\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 54, "score": 109713.83970527604 }, { "content": "pub fn u_prime() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref U_PRIME: Cube3Perm = u().invert();\n\n }\n\n\n\n &U_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 55, "score": 109713.83970527604 }, { "content": "pub fn u2() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref U2: Cube3Perm = u().ntimes(2);\n\n }\n\n\n\n &U2\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 56, "score": 109713.83970527604 }, { "content": "pub fn print_cube(cube: &CornerPerm) {\n\n use CornerOrient::*;\n\n use CornerPos::*;\n\n\n\n println!();\n\n\n\n // Up\n\n print!(\" \");\n\n print_facelet(cube, ULB, Oriented);\n\n print_facelet(cube, UBR, Oriented);\n\n println_reset();\n\n print!(\" \");\n\n print_facelet(cube, UFL, Oriented);\n\n print_facelet(cube, URF, Oriented);\n\n\n\n println_reset();\n\n println!();\n\n\n\n // Left/Up\n\n print_facelet(cube, ULB, Clockwise);\n", "file_path": "src/cube/cube2/render.rs", "rank": 57, "score": 107010.54794070317 }, { "content": "pub fn in_e_slice(pos: EdgePos) -> bool {\n\n // E edges are last, DL is the last D edge\n\n pos > EdgePos::DL\n\n}\n\n\n", "file_path": "src/cube/cube3/coord/util.rs", "rank": 58, "score": 102125.6566967941 }, { "content": "#[derive(Clone, Debug)]\n\nstruct StackState<C, M: IntoEnumIterator> {\n\n coord: C,\n\n mov: M,\n\n move_iter: M::Iterator,\n\n}\n\n\n\nimpl<C, M: IntoEnumIterator> StackState<C, M> {\n\n fn new(coord: C, mov: M) -> Self {\n\n Self {\n\n coord,\n\n mov,\n\n move_iter: M::iter(),\n\n }\n\n }\n\n}\n\n\n\n/// Iterator producing puzzle solutions using IDA*.\n\n///\n\n/// Solutions are returned in order by length, shortest first (however solutions of equal length\n\n/// have no specified order).\n", "file_path": "src/solver.rs", "rank": 59, "score": 99959.56258272854 }, { "content": "pub fn solve_cube<MT, PT>(\n\n move_table: &MT,\n\n prune_table: &PT,\n\n perm: &MT::Puzzle,\n\n target: MT::Coord,\n\n) -> Option<Vec<MT::Move>>\n\nwhere\n\n MT: MoveTable,\n\n PT: PruneTable<Puzzle = MT::Puzzle, Coord = MT::Coord, Move = MT::Move>,\n\n{\n\n fn depth_search<MT, PT>(\n\n move_table: &MT,\n\n prune_table: &PT,\n\n depth: u32,\n\n path: Vec<MT::Move>,\n\n coord: MT::Coord,\n\n target: MT::Coord,\n\n ) -> (bool, Vec<MT::Move>)\n\n where\n\n MT: MoveTable,\n", "file_path": "src/solver.rs", "rank": 60, "score": 87867.41869491944 }, { "content": "fn consume_iter<I: Iterator>(mut iter: I) -> I {\n\n iter.find(|_| false);\n\n iter\n\n}\n\n\n\nimpl<'a, MT, PT> SolutionIter<'a, MT, PT>\n\nwhere\n\n MT: MoveTable,\n\n PT: PruneTable<Puzzle = MT::Puzzle, Coord = MT::Coord, Move = MT::Move>,\n\n{\n\n pub fn new(\n\n move_table: &'a MT,\n\n prune_table: &'a PT,\n\n target: MT::Coord,\n\n perm: &MT::Puzzle,\n\n ) -> Self {\n\n Self {\n\n move_table,\n\n prune_table,\n\n target,\n", "file_path": "src/solver.rs", "rank": 61, "score": 87421.3787637577 }, { "content": "/// Trait for primitive moves in a puzzle notation.\n\npub trait NotationPrim: Copy + Eq + FromStr + ToString {\n\n type Puzzle: PuzzlePerm;\n\n\n\n fn permutation(&self) -> &Self::Puzzle;\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\n\npub struct NotationMove<M: NotationPrim> {\n\n prim: M,\n\n count: i8,\n\n}\n\n\n\nimpl<M: NotationPrim> NotationMove<M> {\n\n pub fn ntimes<T: PrimInt>(prim: M, n: T) -> Self {\n\n NotationMove {\n\n prim,\n\n count: n.to_i8().unwrap(),\n\n }\n\n }\n\n\n", "file_path": "src/notation/mod.rs", "rank": 62, "score": 85305.15192933308 }, { "content": "pub fn e_slice_edges() -> impl Iterator<Item = EdgePos> + DoubleEndedIterator + ExactSizeIterator {\n\n // EdgePos is organized to have the UD edges first\n\n EdgePos::iter().skip(8)\n\n}\n", "file_path": "src/cube/cube3/coord/util.rs", "rank": 63, "score": 81944.81566483562 }, { "content": "pub fn ud_edges() -> impl Iterator<Item = EdgePos> + DoubleEndedIterator + ExactSizeIterator {\n\n // EdgePos is organized to have the UD edges first\n\n EdgePos::iter().take(8)\n\n}\n\n\n", "file_path": "src/cube/cube3/coord/util.rs", "rank": 64, "score": 81944.81566483562 }, { "content": "/// Trait for types which act as bounds-restricted indices into tables.\n\n///\n\n/// Provides an iterator over all values, conversion into index (usize), and an element count.\n\npub trait EnumIndex: 'static + Copy + Eq + EnumCount + IntoEnumIterator {\n\n /// Returns the index of this element.\n\n ///\n\n /// The index MUST be the same as the order of the item in its `IntoEnumIterator` implementation\n\n /// (and hence must start from 0 and end at `IntoEnumIterator::VALUE_COUNT`).\n\n fn index(&self) -> usize;\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 65, "score": 80024.07539277992 }, { "content": "fn parse_move_full<M: NotationPrim>(s: ParseState) -> Result<NotationMove<M>> {\n\n let (m, s) = parse_move(s)?;\n\n if s.is_empty() {\n\n Ok(m)\n\n } else {\n\n Err(())\n\n }\n\n}\n\n\n", "file_path": "src/notation/parser.rs", "rank": 79, "score": 64105.8140745769 }, { "content": "fn parse_move<M: NotationPrim>(s: ParseState) -> Result<(NotationMove<M>, ParseState)> {\n\n let (prim, s) = parse_prim(s)?;\n\n let (count, s) = parse_count(s)?;\n\n\n\n Ok((NotationMove::ntimes(prim, count), s))\n\n}\n\n\n", "file_path": "src/notation/parser.rs", "rank": 80, "score": 62229.22958591647 }, { "content": "fn print_term_color(c: impl color::Color) {\n\n print!(\"{} \", color::Bg(c));\n\n}\n\n\n", "file_path": "src/cube/cube2/render.rs", "rank": 81, "score": 59597.32701211118 }, { "content": "// TODO Implement actual errors\n\ntype ParseErr = ();\n\n\n", "file_path": "src/notation/parser.rs", "rank": 82, "score": 59256.291647210746 }, { "content": "fn print_facelet(cube: &CornerPerm, pos: CornerPos, orient: CornerOrient) {\n\n print_face_color(cube.get_face(Corner::new(pos, orient)));\n\n}\n\n\n", "file_path": "src/cube/cube2/render.rs", "rank": 83, "score": 54709.727208330936 }, { "content": "type ParseState<'a> = &'a str;\n\n\n", "file_path": "src/notation/parser.rs", "rank": 84, "score": 52222.373010385265 }, { "content": "fn important_corners() -> impl Iterator<Item = CornerPos> + DoubleEndedIterator + ExactSizeIterator\n\n{\n\n CornerPos::iter().take(7)\n\n}\n\n\n\nimpl Coord<CornerPerm> for CornerOrient7Coord {\n\n fn from_perm(perm: &CornerPerm) -> Self {\n\n let c = parity_coord::calculate_coord(important_corners().map(|p| perm[p].orient));\n\n CornerOrient7Coord(c)\n\n }\n\n\n\n fn into_perm(self) -> CornerPerm {\n\n let mut res = CornerPerm::default();\n\n let orients = parity_coord::extract_from_coord(self.0, 0, important_corners(), |o| {\n\n CornerOrient::from_i8_unsafe(o as i8)\n\n });\n\n for (p, o) in orients {\n\n res[p].orient = o;\n\n }\n\n\n", "file_path": "src/cube/cube2/coord.rs", "rank": 85, "score": 52106.31411131941 }, { "content": "type Notation = NotationStr<Cube2Notation>;\n\n\n", "file_path": "cube2x2x2/src/main.rs", "rank": 86, "score": 51616.44686145838 }, { "content": "type Notation = NotationStr<Cube3Notation>;\n\n\n", "file_path": "cube3x3x3/src/main.rs", "rank": 87, "score": 51616.44686145838 }, { "content": "fn main() {\n\n pretty_env_logger::init();\n\n\n\n let mut stdout = io::stdout();\n\n let stdin = io::stdin();\n\n let mut input_buf = String::new();\n\n\n\n println!(\"Initializing tables...\");\n\n\n\n // Phase 1\n\n\n\n println!(\"Corner orient...\");\n\n let co_table: BasicMoveTable<Cube3Perm, CornerOrientCoord, CubeTurn> = BasicMoveTable::create();\n\n\n\n println!(\"Edge orient...\");\n\n let eo_table: BasicMoveTable<Cube3Perm, EdgeOrientCoord, CubeTurn> = BasicMoveTable::create();\n\n\n\n println!(\"E edge location...\");\n\n let phase1_eslice_table: BasicMoveTable<Cube3Perm, EEdgePosCoord, CubeTurn> =\n\n BasicMoveTable::create();\n", "file_path": "cube3x3x3/src/main.rs", "rank": 88, "score": 50185.03913823218 }, { "content": "fn main() {\n\n let mut stdout = io::stdout();\n\n let stdin = io::stdin();\n\n let mut input_buf = String::new();\n\n\n\n println!(\"Initializing tables...\");\n\n lazy_static::initialize(&ORIENT_TABLE);\n\n println!(\"Orient\");\n\n lazy_static::initialize(&POS_TABLE);\n\n println!(\"Pos\");\n\n lazy_static::initialize(&MOVE_TABLE);\n\n println!(\"Move\");\n\n lazy_static::initialize(&PRUNE_TABLE);\n\n println!(\"Prune\");\n\n println!(\"Done\");\n\n\n\n loop {\n\n input_buf.clear();\n\n\n\n print!(\"Scramble: \");\n", "file_path": "cube2x2x2/src/main.rs", "rank": 89, "score": 50185.03913823218 }, { "content": "fn print_space() {\n\n print!(\"{} \", color::Bg(color::Reset));\n\n}\n\n\n", "file_path": "src/cube/cube2/render.rs", "rank": 90, "score": 47642.964496903296 }, { "content": "fn println_reset() {\n\n println!(\"{}\", color::Bg(color::Reset));\n\n}\n\n\n", "file_path": "src/cube/cube2/render.rs", "rank": 91, "score": 47642.964496903296 }, { "content": "fn do_solve(notation: Notation) {\n\n let perm = notation.permutation();\n\n print_cube(&perm);\n\n\n\n let (_sym, perm) = fix_dbl_corner(&perm);\n\n match solve_cube(&*MOVE_TABLE, &*PRUNE_TABLE, &perm, Corner7Coord::default()) {\n\n Some(sol) => {\n\n let sol_notation = Notation::from(\n\n sol.iter()\n\n .map(|m| NotationMove::<Cube2Notation>::from(*m))\n\n .collect::<Vec<_>>(),\n\n );\n\n println!(\"Solution: {}\", sol_notation);\n\n }\n\n None => {\n\n println!(\"No solution found\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "cube2x2x2/src/main.rs", "rank": 92, "score": 44854.422016920886 }, { "content": "type Result<T> = std::result::Result<T, ParseErr>;\n\n\n", "file_path": "src/notation/parser.rs", "rank": 93, "score": 43363.61526768787 }, { "content": "fn print_face_color(face: Face) {\n\n use FaceColor::*;\n\n match face.into() {\n\n White => print_term_color(color::White),\n\n Red => print_term_color(color::Red),\n\n Green => print_term_color(color::Green),\n\n Yellow => print_term_color(color::Yellow),\n\n Orange => print_term_color(color::Magenta),\n\n Blue => print_term_color(color::Blue),\n\n }\n\n}\n\n\n", "file_path": "src/cube/cube2/render.rs", "rank": 94, "score": 41735.88237545971 }, { "content": "fn do_solve<M1, P1, M2, P2>(\n\n phase1_move_table: &M1,\n\n phase1_prune_table: &P1,\n\n phase2_move_table: &M2,\n\n phase2_prune_table: &P2,\n\n notation: Notation,\n\n) where\n\n M1: MoveTable<Puzzle = Cube3Perm, Coord = Phase1Coord, Move = CubeTurn>,\n\n P1: PruneTable<Puzzle = Cube3Perm, Coord = Phase1Coord, Move = CubeTurn>,\n\n M2: MoveTable<Puzzle = Cube3Perm, Coord = Phase2Coord, Move = G1CubeTurn>,\n\n P2: PruneTable<Puzzle = Cube3Perm, Coord = Phase2Coord, Move = G1CubeTurn>,\n\n{\n\n let perm = notation.permutation();\n\n\n\n let phase1_sol_generator = SolutionIter::new(\n\n phase1_move_table,\n\n phase1_prune_table,\n\n Phase1Coord::default(),\n\n &perm,\n\n );\n", "file_path": "cube3x3x3/src/main.rs", "rank": 95, "score": 40618.46288878783 }, { "content": "fn parse_count(s: ParseState) -> Result<(i8, ParseState)> {\n\n let (opt_n, s) = parse_num(s)?;\n\n // TODO Check overflow\n\n let n = opt_n.unwrap_or(1) as i8;\n\n\n\n let (inverse, s) = parse_prime(s)?;\n\n\n\n let n = if inverse { -n } else { n };\n\n\n\n Ok((n, s))\n\n}\n\n\n", "file_path": "src/notation/parser.rs", "rank": 96, "score": 37078.96262071752 }, { "content": "fn parse_prime(s: ParseState) -> Result<(bool, ParseState)> {\n\n if s.chars().next() == Some('\\'') {\n\n Ok((true, &s[1..]))\n\n } else {\n\n Ok((false, s))\n\n }\n\n}\n\n\n", "file_path": "src/notation/parser.rs", "rank": 97, "score": 37078.96262071752 }, { "content": "fn parse_num(s: ParseState) -> Result<(Option<u8>, ParseState)> {\n\n let (n, len) = s\n\n .chars()\n\n .take_while(|c| c.is_digit(10))\n\n .fold((0, 0), |(n, count), digit| {\n\n // Return the number and how many digits were consumed\n\n // Digit will always exist because of the take_while\n\n let digit_val = digit.to_digit(10).unwrap();\n\n (n * 10 + digit_val, count + 1)\n\n });\n\n\n\n if len == 0 {\n\n Ok((None, s))\n\n } else if n > 0 {\n\n // TODO Check overflow\n\n Ok((Some(n as u8), &s[len..]))\n\n } else {\n\n Err(())\n\n }\n\n}\n\n\n", "file_path": "src/notation/parser.rs", "rank": 98, "score": 35544.725491102756 }, { "content": "#[macro_export]\n\nmacro_rules! impl_puzzle_perm_with_tables {\n\n ($impl_ty:ty, $identity_closure:expr, $prem_closure:expr) => {\n\n impl $crate::puzzle::PuzzlePerm for $impl_ty {\n\n fn identity() -> &'static Self {\n\n $identity_closure()\n\n }\n\n\n\n fn invert(&self) -> Self {\n\n use $crate::util::{EnumCount, EnumIndex, IntoEnumIterator};\n\n lazy_static! {\n\n static ref TABLE: [$impl_ty; <$impl_ty>::COUNT] = {\n\n let mut table: [$impl_ty; <$impl_ty>::COUNT] = Default::default();\n\n\n\n for sym in <$impl_ty>::iter() {\n\n let perm = ($prem_closure)(&sym);\n\n for other in <$impl_ty>::iter() {\n\n if perm.sequence(($prem_closure)(&other)).is_identity() {\n\n table[sym.index()] = other;\n\n }\n", "file_path": "src/puzzle.rs", "rank": 99, "score": 35433.60116084952 } ]
Rust
compiler/src/llvm.rs
kowaalczyk/instant
95ae0bcb58ec829828e68bc8b09016514c6e08f5
use instant_parser::ast; use crate::common::CompilationError; use std::collections::HashSet; use instant_parser::ast::Stmt; pub trait FormatLLVM { fn format_llvm(&self) -> String; } enum CompilationResult { Register { id: u32 }, Constant { val: i32 }, None, } impl FormatLLVM for CompilationResult { fn format_llvm(&self) -> String { match self { CompilationResult::Constant { val } => val.to_string(), CompilationResult::Register { id } => format!("%r{}", id), CompilationResult::None => String::from(""), } } } pub trait CompileLLVM { fn compile_llvm( &self, available_reg: &mut u32, variables: &mut HashSet<String>, ) -> Result<CompiledCode, CompilationError>; } pub struct CompiledCode { instructions: Vec<String>, result: CompilationResult, } impl CompileLLVM for ast::Prog { fn compile_llvm( &self, available_reg: &mut u32, variables: &mut HashSet<String> ) -> Result<CompiledCode, CompilationError> { let mut instructions: Vec<String> = vec![]; for stmt in self.stmts.iter() { let mut compiled_stmt = stmt.compile_llvm( available_reg, variables, )?; instructions.append(&mut compiled_stmt.instructions); } let compiled_program = CompiledCode { instructions, result: CompilationResult::None }; Ok(compiled_program) } } impl CompileLLVM for ast::Stmt { fn compile_llvm( &self, available_reg: &mut u32, variables: &mut HashSet<String> ) -> Result<CompiledCode, CompilationError> { match self { Stmt::Expr { expr } => { let mut compiled_expr = expr.compile_llvm(available_reg, variables)?; let print_instr = format!( "call void @printInt(i32 {})", compiled_expr.result.format_llvm(), ); compiled_expr.instructions.push(print_instr); compiled_expr.result = CompilationResult::None; Ok(compiled_expr) }, Stmt::Decl { var, expr } => { let mut compiled_expr = expr.compile_llvm(available_reg, variables)?; if !variables.contains(var) { let alloc_instr = format!( "%{}ptr = alloca i32", var ); compiled_expr.instructions.push(alloc_instr); variables.insert(var.clone()); } let store_instr = format!( "store i32 {}, i32* %{}ptr", compiled_expr.result.format_llvm(), var ); compiled_expr.instructions.push(store_instr); compiled_expr.result = CompilationResult::None; Ok(compiled_expr) }, } } } impl CompileLLVM for ast::Expr { fn compile_llvm( &self, available_reg: &mut u32, variables: &mut HashSet<String> ) -> Result<CompiledCode, CompilationError> { match self { ast::Expr::Binary { left, op, right } => { let mut compiled_instructions: Vec<String> = vec![]; let mut lhs = left.compile_llvm(available_reg, variables)?; compiled_instructions.append(&mut lhs.instructions); let mut rhs = right.compile_llvm(available_reg, variables)?; compiled_instructions.append(&mut rhs.instructions); let current_reg = CompilationResult::Register { id: available_reg.clone() }; let current_instr = format!( "{} = {} {}, {}", current_reg.format_llvm(), op.format_llvm(), lhs.result.format_llvm(), rhs.result.format_llvm(), ); compiled_instructions.push(current_instr); *available_reg += 1; let compiled_code = CompiledCode { instructions: compiled_instructions, result: current_reg }; Ok(compiled_code) }, ast::Expr::Number { val } => { let compiled_code = CompiledCode { instructions: vec![], result: CompilationResult::Constant { val: val.clone() } }; Ok(compiled_code) }, ast::Expr::Variable { var } => { if variables.contains(var) { let current_reg = CompilationResult::Register { id: available_reg.clone() }; let current_instr = format!( "{} = load i32, i32* %{}ptr", current_reg.format_llvm(), var ); *available_reg += 1; let compiled_code = CompiledCode { instructions: vec![current_instr], result: current_reg, }; Ok(compiled_code) } else { Err(CompilationError::UnidentifiedVariable { identifier: var.clone() }) } }, } } } impl FormatLLVM for ast::Opcode { fn format_llvm(&self) -> String { let op_str = match self { ast::Opcode::Add => {"add i32"}, ast::Opcode::Sub => {"sub i32"}, ast::Opcode::Mul => {"mul i32"}, ast::Opcode::Div => {"sdiv i32"}, }; String::from(op_str) } } pub fn compile_llvm(program: &ast::Prog) -> Result<Vec<String>, CompilationError> { let mut instructions = vec![ String::from("declare void @printInt(i32)"), String::from("define i32 @main() {"), ]; let mut available_reg = 0 as u32; let mut used_variables: HashSet<String> = HashSet::new(); let mut compilation_result = program.compile_llvm( &mut available_reg, &mut used_variables )?; instructions.append(&mut compilation_result.instructions); instructions.append(&mut vec![ String::from("ret i32 0"), String::from("}"), ]); Ok(instructions) }
use instant_parser::ast; use crate::common::CompilationError; use std::collections::HashSet; use instant_parser::ast::Stmt; pub trait FormatLLVM { fn format_llvm(&self) -> String; } enum CompilationResult { Register { id: u32 }, Constant { val: i32 }, None, } impl FormatLLVM for CompilationResult { fn format_llvm(&self) -> String { match self { CompilationResult::Constant { val } => val.to_string(), CompilationResult::Register { id } => format!("%r{}", id), CompilationResult::None => String::from(""), } } } pub trait CompileLLVM { fn compile_llvm( &self, available_reg: &mut u32, variables: &mut HashSet<String>, ) -> Result<CompiledCode, CompilationError>; } pub struct CompiledCode { instructions: Vec<String>, result: CompilationResult, } impl CompileLLVM for ast::Prog {
} impl CompileLLVM for ast::Stmt { fn compile_llvm( &self, available_reg: &mut u32, variables: &mut HashSet<String> ) -> Result<CompiledCode, CompilationError> { match self { Stmt::Expr { expr } => { let mut compiled_expr = expr.compile_llvm(available_reg, variables)?; let print_instr = format!( "call void @printInt(i32 {})", compiled_expr.result.format_llvm(), ); compiled_expr.instructions.push(print_instr); compiled_expr.result = CompilationResult::None; Ok(compiled_expr) }, Stmt::Decl { var, expr } => { let mut compiled_expr = expr.compile_llvm(available_reg, variables)?; if !variables.contains(var) { let alloc_instr = format!( "%{}ptr = alloca i32", var ); compiled_expr.instructions.push(alloc_instr); variables.insert(var.clone()); } let store_instr = format!( "store i32 {}, i32* %{}ptr", compiled_expr.result.format_llvm(), var ); compiled_expr.instructions.push(store_instr); compiled_expr.result = CompilationResult::None; Ok(compiled_expr) }, } } } impl CompileLLVM for ast::Expr { fn compile_llvm( &self, available_reg: &mut u32, variables: &mut HashSet<String> ) -> Result<CompiledCode, CompilationError> { match self { ast::Expr::Binary { left, op, right } => { let mut compiled_instructions: Vec<String> = vec![]; let mut lhs = left.compile_llvm(available_reg, variables)?; compiled_instructions.append(&mut lhs.instructions); let mut rhs = right.compile_llvm(available_reg, variables)?; compiled_instructions.append(&mut rhs.instructions); let current_reg = CompilationResult::Register { id: available_reg.clone() }; let current_instr = format!( "{} = {} {}, {}", current_reg.format_llvm(), op.format_llvm(), lhs.result.format_llvm(), rhs.result.format_llvm(), ); compiled_instructions.push(current_instr); *available_reg += 1; let compiled_code = CompiledCode { instructions: compiled_instructions, result: current_reg }; Ok(compiled_code) }, ast::Expr::Number { val } => { let compiled_code = CompiledCode { instructions: vec![], result: CompilationResult::Constant { val: val.clone() } }; Ok(compiled_code) }, ast::Expr::Variable { var } => { if variables.contains(var) { let current_reg = CompilationResult::Register { id: available_reg.clone() }; let current_instr = format!( "{} = load i32, i32* %{}ptr", current_reg.format_llvm(), var ); *available_reg += 1; let compiled_code = CompiledCode { instructions: vec![current_instr], result: current_reg, }; Ok(compiled_code) } else { Err(CompilationError::UnidentifiedVariable { identifier: var.clone() }) } }, } } } impl FormatLLVM for ast::Opcode { fn format_llvm(&self) -> String { let op_str = match self { ast::Opcode::Add => {"add i32"}, ast::Opcode::Sub => {"sub i32"}, ast::Opcode::Mul => {"mul i32"}, ast::Opcode::Div => {"sdiv i32"}, }; String::from(op_str) } } pub fn compile_llvm(program: &ast::Prog) -> Result<Vec<String>, CompilationError> { let mut instructions = vec![ String::from("declare void @printInt(i32)"), String::from("define i32 @main() {"), ]; let mut available_reg = 0 as u32; let mut used_variables: HashSet<String> = HashSet::new(); let mut compilation_result = program.compile_llvm( &mut available_reg, &mut used_variables )?; instructions.append(&mut compilation_result.instructions); instructions.append(&mut vec![ String::from("ret i32 0"), String::from("}"), ]); Ok(instructions) }
fn compile_llvm( &self, available_reg: &mut u32, variables: &mut HashSet<String> ) -> Result<CompiledCode, CompilationError> { let mut instructions: Vec<String> = vec![]; for stmt in self.stmts.iter() { let mut compiled_stmt = stmt.compile_llvm( available_reg, variables, )?; instructions.append(&mut compiled_stmt.instructions); } let compiled_program = CompiledCode { instructions, result: CompilationResult::None }; Ok(compiled_program) }
function_block-full_function
[ { "content": "pub fn parse_arg() -> String {\n\n let args: Vec<String> = env::args().collect();\n\n match args.get(1) {\n\n Some(input_filename) => {\n\n String::from(input_filename)\n\n },\n\n None => {\n\n println!(\"Usage: {} {}\", &args[0], \"[input_filename]\");\n\n exit(2)\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 0, "score": 94961.80877975468 }, { "content": "pub fn write_file(file: &mut fs::File, compiled_code: &Vec<String>) {\n\n for line in compiled_code.iter() {\n\n match writeln!(file, \"{}\", line) {\n\n Ok(res) => res,\n\n Err(e) => {\n\n println!(\"Failed to write file: {:?}\", e);\n\n exit(1);\n\n },\n\n };\n\n };\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 2, "score": 81271.90561741058 }, { "content": "pub fn translate(compiled_program: &CompiledCode, name: &String) -> Vec<String> {\n\n let mut jasmin_representation = vec![\n\n String::from(\".bytecode 47.0\"),\n\n format!(\".class public {}\", name),\n\n String::from(\".super java/lang/Object\"),\n\n String::from(\".method public <init>()V\"),\n\n String::from(\"aload_0\"),\n\n String::from(\"invokenonvirtual java/lang/Object/<init>()V\"),\n\n String::from(\"return\"),\n\n String::from(\".end method\"),\n\n ];\n\n jasmin_representation.append(&mut compiled_program.translate());\n\n jasmin_representation\n\n}\n", "file_path": "compiler/src/jasmin.rs", "rank": 6, "score": 74765.12022155801 }, { "content": "pub fn parse_program(input_path: &String) -> ast::Prog {\n\n let source_code = match fs::read_to_string(input_path) {\n\n Ok(source_code) => source_code,\n\n Err(e) => {\n\n println!(\"Error reading file: {:?}\", e);\n\n exit(1);\n\n }\n\n };\n\n\n\n let parser = instant::ProgParser::new();\n\n match parser.parse(&source_code) {\n\n Ok(parsed_program) => {\n\n parsed_program\n\n },\n\n Err(parsing_error) => {\n\n println!(\"Parsing error: {:?}\", parsing_error);\n\n exit(1);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 7, "score": 72164.27264697228 }, { "content": "pub fn parse_env(key: &str, default: &str) -> String {\n\n match env::var_os(key) {\n\n Some(llvm_as) => llvm_as.into_string().unwrap(),\n\n None => String::from(default),\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 8, "score": 70186.81782216956 }, { "content": "pub trait CompileStack {\n\n fn compile_stack(&self, env: &mut HashMap<String, i32>) -> Result<CompiledCode, CompilationError>;\n\n}\n\n\n\nimpl CompileStack for ast::Prog {\n\n fn compile_stack(&self, env: &mut HashMap<String, i32>) -> Result<CompiledCode, CompilationError> {\n\n let mut instructions: Vec<Instruction> = vec![];\n\n let mut stack_limit = 0;\n\n for stmt in self.stmts.iter() {\n\n let mut compiled_stmt = stmt.compile_stack(env)?;\n\n instructions.append(&mut compiled_stmt.instructions);\n\n stack_limit = max(stack_limit, compiled_stmt.stack_limit);\n\n }\n\n let locals_limit = env.len() as u32;\n\n let compiled_program = CompiledCode { instructions, stack_limit, locals_limit };\n\n Ok(compiled_program)\n\n }\n\n}\n\n\n\nimpl CompileStack for ast::Stmt {\n", "file_path": "compiler/src/stack.rs", "rank": 9, "score": 68616.04490561124 }, { "content": "/// compiles the program to a list of instructions on abstract stack-based machine\n\npub fn compile_stack(program: &ast::Prog) -> Result<CompiledCode, CompilationError> {\n\n let mut env: HashMap<String, i32> = HashMap::new();\n\n let compiled_program = program.compile_stack(&mut env)?;\n\n Ok(compiled_program)\n\n}\n", "file_path": "compiler/src/stack.rs", "rank": 10, "score": 65193.47440649602 }, { "content": "pub trait __ToTriple<'input, > {\n\n fn to_triple(value: Self) -> Result<(usize,Token<'input>,usize), __lalrpop_util::ParseError<usize, Token<'input>, &'static str>>;\n\n}\n\n\n\nimpl<'input, > __ToTriple<'input, > for (usize, Token<'input>, usize) {\n\n fn to_triple(value: Self) -> Result<(usize,Token<'input>,usize), __lalrpop_util::ParseError<usize, Token<'input>, &'static str>> {\n\n Ok(value)\n\n }\n\n}\n\nimpl<'input, > __ToTriple<'input, > for Result<(usize, Token<'input>, usize), &'static str> {\n\n fn to_triple(value: Self) -> Result<(usize,Token<'input>,usize), __lalrpop_util::ParseError<usize, Token<'input>, &'static str>> {\n\n match value {\n\n Ok(v) => Ok(v),\n\n Err(error) => Err(__lalrpop_util::ParseError::User { error }),\n\n }\n\n }\n\n}\n", "file_path": "parser/src/instant.rs", "rank": 11, "score": 64070.95607218967 }, { "content": "pub fn check_exit_code(command_name: &str, status: &Result<ExitStatus>) {\n\n match status {\n\n Ok(status) => {\n\n if !status.success() {\n\n println!(\"{} exited with error code: {:?}\", command_name, status);\n\n exit(1);\n\n }\n\n },\n\n Err(e) => {\n\n println!(\"{} failed to execute: {:?}\", command_name, e);\n\n exit(1);\n\n }\n\n };\n\n}", "file_path": "src/lib.rs", "rank": 12, "score": 63615.540744002334 }, { "content": "fn main() {\n\n lalrpop::Configuration::new()\n\n .generate_in_source_tree()\n\n .process();\n\n}\n", "file_path": "parser/build.rs", "rank": 13, "score": 41111.801655259005 }, { "content": "trait JVMOutput {\n\n fn translate(&self) -> Vec<String>;\n\n}\n\n\n\nimpl JVMOutput for CompiledCode {\n\n fn translate(&self) -> Vec<String> {\n\n let mut instruction_vec: Vec<String> = vec![\n\n String::from(\".method public static main([Ljava/lang/String;)V\"),\n\n format!(\".limit stack {}\", self.stack_limit),\n\n format!(\".limit locals {}\", self.locals_limit + 1), // +1 local for main argument\n\n ];\n\n\n\n for instr in self.instructions.iter() {\n\n let mut instr_jvm_format = instr.translate();\n\n instruction_vec.append(&mut instr_jvm_format);\n\n }\n\n\n\n instruction_vec.push(String::from(\"return\"));\n\n instruction_vec.push(String::from(\".end method\"));\n\n instruction_vec\n", "file_path": "compiler/src/jasmin.rs", "rank": 14, "score": 39456.782171306724 }, { "content": "#[allow(unused_variables)]\n\nfn __action3<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, __0, _): (usize, Vec<Box<Stmt>>, usize),\n\n) -> Vec<Box<Stmt>>\n\n{\n\n (__0)\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 15, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action14<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, i, _): (usize, String, usize),\n\n) -> Box<Expr>\n\n{\n\n Box::new(Expr::Variable { var: i })\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 16, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action17<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, __0, _): (usize, &'input str, usize),\n\n) -> i32\n\n{\n\n i32::from_str(__0).unwrap()\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 17, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action2<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, statements, _): (usize, Vec<Box<Stmt>>, usize),\n\n) -> Prog\n\n{\n\n {\n\n Prog {\n\n stmts: Vec::from_iter(statements.into_iter()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 18, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action9<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, e, _): (usize, Box<Expr>, usize),\n\n) -> Box<Expr>\n\n{\n\n (e)\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 19, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action8<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, l, _): (usize, Box<Expr>, usize),\n\n (_, _, _): (usize, &'input str, usize),\n\n (_, r, _): (usize, Box<Expr>, usize),\n\n) -> Box<Expr>\n\n{\n\n {\n\n Box::new(Expr::Binary {\n\n left: l,\n\n op: Opcode::Sub,\n\n right:r,\n\n })\n\n }\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 20, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action6<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, l, _): (usize, Box<Expr>, usize),\n\n (_, _, _): (usize, &'input str, usize),\n\n (_, r, _): (usize, Box<Expr>, usize),\n\n) -> Box<Expr>\n\n{\n\n {\n\n Box::new(Expr::Binary {\n\n left: l,\n\n op: Opcode::Add,\n\n right:r,\n\n })\n\n }\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 21, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action11<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, l, _): (usize, Box<Expr>, usize),\n\n (_, _, _): (usize, &'input str, usize),\n\n (_, r, _): (usize, Box<Expr>, usize),\n\n) -> Box<Expr>\n\n{\n\n {\n\n Box::new(Expr::Binary {\n\n left: l,\n\n op: Opcode::Div,\n\n right:r,\n\n })\n\n }\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 22, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action13<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, n, _): (usize, i32, usize),\n\n) -> Box<Expr>\n\n{\n\n Box::new(Expr::Number { val: n })\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 23, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action15<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, _, _): (usize, &'input str, usize),\n\n (_, e, _): (usize, Box<Expr>, usize),\n\n (_, _, _): (usize, &'input str, usize),\n\n) -> Box<Expr>\n\n{\n\n (e)\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 24, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action24<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, __0, _): (usize, Box<Stmt>, usize),\n\n) -> ::std::vec::Vec<Box<Stmt>>\n\n{\n\n vec![__0]\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 25, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action21<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n __lookbehind: &usize,\n\n __lookahead: &usize,\n\n) -> ::std::vec::Vec<Box<Stmt>>\n\n{\n\n vec![]\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 26, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action25<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, v, _): (usize, ::std::vec::Vec<Box<Stmt>>, usize),\n\n (_, e, _): (usize, Box<Stmt>, usize),\n\n) -> ::std::vec::Vec<Box<Stmt>>\n\n{\n\n { let mut v = v; v.push(e); v }\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 27, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action27<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n __0: (usize, ::std::vec::Vec<Box<Stmt>>, usize),\n\n __1: (usize, Box<Stmt>, usize),\n\n __2: (usize, &'input str, usize),\n\n) -> ::std::vec::Vec<Box<Stmt>>\n\n{\n\n let __start0 = __1.0.clone();\n\n let __end0 = __2.2.clone();\n\n let __temp0 = __action23(\n\n input,\n\n __1,\n\n __2,\n\n );\n\n let __temp0 = (__start0, __temp0, __end0);\n\n __action25(\n\n input,\n\n __0,\n\n __temp0,\n\n )\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 28, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action29<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n __0: (usize, ::std::vec::Vec<Box<Stmt>>, usize),\n\n __1: (usize, ::std::option::Option<Box<Stmt>>, usize),\n\n) -> Vec<Box<Stmt>>\n\n{\n\n let __start0 = __0.0.clone();\n\n let __end0 = __0.2.clone();\n\n let __temp0 = __action22(\n\n input,\n\n __0,\n\n );\n\n let __temp0 = (__start0, __temp0, __end0);\n\n __action18(\n\n input,\n\n __temp0,\n\n __1,\n\n )\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 29, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action1<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, __0, _): (usize, Box<Stmt>, usize),\n\n) -> Box<Stmt>\n\n{\n\n (__0)\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 30, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action28<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n __0: (usize, ::std::option::Option<Box<Stmt>>, usize),\n\n) -> Vec<Box<Stmt>>\n\n{\n\n let __start0 = __0.0.clone();\n\n let __end0 = __0.0.clone();\n\n let __temp0 = __action21(\n\n input,\n\n &__start0,\n\n &__end0,\n\n );\n\n let __temp0 = (__start0, __temp0, __end0);\n\n __action18(\n\n input,\n\n __temp0,\n\n __0,\n\n )\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 31, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action32<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n __0: (usize, ::std::vec::Vec<Box<Stmt>>, usize),\n\n __1: (usize, Box<Stmt>, usize),\n\n) -> Vec<Box<Stmt>>\n\n{\n\n let __start0 = __1.0.clone();\n\n let __end0 = __1.2.clone();\n\n let __temp0 = __action19(\n\n input,\n\n __1,\n\n );\n\n let __temp0 = (__start0, __temp0, __end0);\n\n __action29(\n\n input,\n\n __0,\n\n __temp0,\n\n )\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 32, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action22<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, v, _): (usize, ::std::vec::Vec<Box<Stmt>>, usize),\n\n) -> ::std::vec::Vec<Box<Stmt>>\n\n{\n\n v\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 33, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action18<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, v, _): (usize, ::std::vec::Vec<Box<Stmt>>, usize),\n\n (_, e, _): (usize, ::std::option::Option<Box<Stmt>>, usize),\n\n) -> Vec<Box<Stmt>>\n\n{\n\n match e {\n\n None => v,\n\n Some(e) => {\n\n let mut v = v;\n\n v.push(e);\n\n v\n\n }\n\n }\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 34, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action0<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, __0, _): (usize, Prog, usize),\n\n) -> Prog\n\n{\n\n (__0)\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 35, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action10<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, l, _): (usize, Box<Expr>, usize),\n\n (_, _, _): (usize, &'input str, usize),\n\n (_, r, _): (usize, Box<Expr>, usize),\n\n) -> Box<Expr>\n\n{\n\n {\n\n Box::new(Expr::Binary {\n\n left: l,\n\n op: Opcode::Mul,\n\n right:r,\n\n })\n\n }\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 36, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action23<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, __0, _): (usize, Box<Stmt>, usize),\n\n (_, _, _): (usize, &'input str, usize),\n\n) -> Box<Stmt>\n\n{\n\n (__0)\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 37, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action20<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n __lookbehind: &usize,\n\n __lookahead: &usize,\n\n) -> ::std::option::Option<Box<Stmt>>\n\n{\n\n None\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 38, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action33<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n __0: (usize, ::std::vec::Vec<Box<Stmt>>, usize),\n\n) -> Vec<Box<Stmt>>\n\n{\n\n let __start0 = __0.2.clone();\n\n let __end0 = __0.2.clone();\n\n let __temp0 = __action20(\n\n input,\n\n &__start0,\n\n &__end0,\n\n );\n\n let __temp0 = (__start0, __temp0, __end0);\n\n __action29(\n\n input,\n\n __0,\n\n __temp0,\n\n )\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 39, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action31<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n __lookbehind: &usize,\n\n __lookahead: &usize,\n\n) -> Vec<Box<Stmt>>\n\n{\n\n let __start0 = __lookbehind.clone();\n\n let __end0 = __lookahead.clone();\n\n let __temp0 = __action20(\n\n input,\n\n &__start0,\n\n &__end0,\n\n );\n\n let __temp0 = (__start0, __temp0, __end0);\n\n __action28(\n\n input,\n\n __temp0,\n\n )\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 40, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action19<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, __0, _): (usize, Box<Stmt>, usize),\n\n) -> ::std::option::Option<Box<Stmt>>\n\n{\n\n Some(__0)\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 41, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action5<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, i, _): (usize, String, usize),\n\n (_, _, _): (usize, &'input str, usize),\n\n (_, e, _): (usize, Box<Expr>, usize),\n\n) -> Box<Stmt>\n\n{\n\n {\n\n Box::new(Stmt::Decl { var: i, expr: e })\n\n }\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 42, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action30<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n __0: (usize, Box<Stmt>, usize),\n\n) -> Vec<Box<Stmt>>\n\n{\n\n let __start0 = __0.0.clone();\n\n let __end0 = __0.2.clone();\n\n let __temp0 = __action19(\n\n input,\n\n __0,\n\n );\n\n let __temp0 = (__start0, __temp0, __end0);\n\n __action28(\n\n input,\n\n __temp0,\n\n )\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 43, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action4<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, e, _): (usize, Box<Expr>, usize),\n\n) -> Box<Stmt>\n\n{\n\n {\n\n Box::new(Stmt::Expr { expr: e })\n\n }\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 44, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action26<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n __0: (usize, Box<Stmt>, usize),\n\n __1: (usize, &'input str, usize),\n\n) -> ::std::vec::Vec<Box<Stmt>>\n\n{\n\n let __start0 = __0.0.clone();\n\n let __end0 = __1.2.clone();\n\n let __temp0 = __action23(\n\n input,\n\n __0,\n\n __1,\n\n );\n\n let __temp0 = (__start0, __temp0, __end0);\n\n __action24(\n\n input,\n\n __temp0,\n\n )\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 45, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action16<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, __0, _): (usize, &'input str, usize),\n\n) -> String\n\n{\n\n String::from(__0)\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 46, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action7<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, e, _): (usize, Box<Expr>, usize),\n\n) -> Box<Expr>\n\n{\n\n (e)\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 47, "score": 39422.38182483725 }, { "content": "#[allow(unused_variables)]\n\nfn __action12<\n\n 'input,\n\n>(\n\n input: &'input str,\n\n (_, e, _): (usize, Box<Expr>, usize),\n\n) -> Box<Expr>\n\n{\n\n (e)\n\n}\n\n\n", "file_path": "parser/src/instant.rs", "rank": 48, "score": 39422.38182483725 }, { "content": " }\n\n}\n\n\n\nimpl CompileStack for ast::Expr {\n\n fn compile_stack(&self, env: &mut HashMap<String, i32>) -> Result<CompiledCode, CompilationError> {\n\n match self {\n\n ast::Expr::Number {val} => {\n\n let instruction = Instruction::PUSH {val: *val};\n\n let compiled_code = CompiledCode {\n\n instructions: vec![instruction],\n\n stack_limit: 1,\n\n locals_limit: 0\n\n };\n\n Ok(compiled_code)\n\n },\n\n ast::Expr::Variable {var} => {\n\n match env.get(var) {\n\n Option::None => {\n\n Err(CompilationError::UnidentifiedVariable { identifier: var.to_string() })\n\n },\n", "file_path": "compiler/src/stack.rs", "rank": 49, "score": 26.115480785770465 }, { "content": "#### LLVM Compiler\n\n\n\nParts of abstract syntax tree that are compiled implement trait `CompileLLVM`, in `compiler/src/llvm.rs`.\n\n\n\nCompilation of expression results in a vector of LLVM instructions and a result: either register or constant.\n\nThis way, Instant constants are never translated into single instruction storing them in LLVM register.\n\n\n\nAll variables are allocated exactly once, to track this we pass mutable HashMap (`variables`) argument,\n\nwhich contains names of all allocated variables and allows to prevent accessing undefined variable at compile time.\n\n\n\nIn order to track which registers are free, we also keep passing a mutable id of next free register number.\n\nInteger register names are formatted using \"%r{register_id}\", while registers containing pointers to variables\n\nare formatted using \"%{variable_name}ptr\" to prevent name collisions.\n\n\n\nResult of compiling syntax tree to llvm is a vector of strings, which is then saved to `.ll` file - that action\n\nis performed in the `insc_llvm.rs` executable. After that, the executable calls `llvm-as` to translate the text\n\nfile into binary one, and `llvm-link` to include `dist/runtime.bc` which contains `printInt` function.\n\n\n\n\n", "file_path": "README.md", "rank": 50, "score": 20.861460493917285 }, { "content": " }\n\n}\n\n\n\nimpl JVMOutput for Instruction {\n\n fn translate(&self) -> Vec<String> {\n\n let mut instruction_vec: Vec<String> = vec![];\n\n match self {\n\n Instruction::PUSH { val } => {\n\n let instr = match *val {\n\n -1 => String::from(\"iconst_m1\"),\n\n 0..=5 => format!(\"iconst_{}\", val),\n\n 5..=127 => format!(\"bipush {}\", val),\n\n _ => format!(\"sipush {} {}\", ((val >> 16) / (2 << 8)) as u8, (val >> 24) as u8),\n\n // larger are truncated without any warning TODO: fix this\n\n };\n\n instruction_vec.push(instr);\n\n },\n\n Instruction::MUL => {\n\n instruction_vec.push(String::from(\"imul\"));\n\n },\n", "file_path": "compiler/src/jasmin.rs", "rank": 53, "score": 19.58554977944889 }, { "content": "use instant_parser::ast;\n\nuse crate::common::CompilationError;\n\n\n\nuse std::collections::HashMap;\n\nuse std::cmp::{min, max};\n\nuse std::fmt::Debug;\n\n\n\n#[derive(Debug)]\n\npub enum Instruction {\n\n PUSH { val: i32 },\n\n ADD,\n\n SUB,\n\n MUL,\n\n DIV,\n\n PRINT,\n\n STORE { addr: i32 },\n\n LOAD { addr: i32 },\n\n SWAP,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct CompiledCode {\n\n pub instructions: Vec<Instruction>,\n\n pub stack_limit: u32,\n\n pub locals_limit: u32,\n\n}\n\n\n", "file_path": "compiler/src/stack.rs", "rank": 55, "score": 19.541483502879714 }, { "content": " } else {\n\n let mut __longest_match = 0;\n\n let mut __index = 0;\n\n for __i in 0 .. 10 {\n\n if __matches.matched(__i) {\n\n let __match = self.regex_vec[__i].find(__text).unwrap();\n\n let __len = __match.end();\n\n if __len >= __longest_match {\n\n __longest_match = __len;\n\n __index = __i;\n\n }\n\n }\n\n }\n\n let __result = &__text[..__longest_match];\n\n let __remaining = &__text[__longest_match..];\n\n let __end_offset = __start_offset + __longest_match;\n\n self.text = __remaining;\n\n self.consumed = __end_offset;\n\n Some(Ok((__start_offset, Token(__index, __result), __end_offset)))\n\n }\n\n }\n\n }\n\n }\n\n}\n\npub use self::__intern_token::Token;\n\n\n\n#[allow(unused_variables)]\n", "file_path": "parser/src/instant.rs", "rank": 56, "score": 16.101948041076366 }, { "content": " fn compile_stack(&self, env: &mut HashMap<String, i32>) -> Result<CompiledCode, CompilationError> {\n\n match self {\n\n ast::Stmt::Expr {expr} => {\n\n let mut compiled_expr = expr.compile_stack(env)?;\n\n compiled_expr.instructions.push(Instruction::PRINT);\n\n\n\n // stack limit is increased by 1 to account for the 1st argument to print\n\n let compiled_stmt = CompiledCode {\n\n instructions: compiled_expr.instructions,\n\n stack_limit: 1 + compiled_expr.stack_limit,\n\n locals_limit: compiled_expr.locals_limit,\n\n };\n\n Ok(compiled_stmt)\n\n },\n\n ast::Stmt::Decl {var, expr} => {\n\n let mut compiled_expr = expr.compile_stack(env)?;\n\n let variable_location = match env.get(var) {\n\n Some(existing_location) => *existing_location,\n\n None => {\n\n let new_location = match env.values().max() {\n", "file_path": "compiler/src/stack.rs", "rank": 57, "score": 15.565935021096587 }, { "content": "#[derive(Debug, PartialEq)]\n\npub enum Opcode {\n\n Add,\n\n Sub,\n\n Mul,\n\n Div,\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Expr {\n\n Binary {\n\n left: Box<Expr>,\n\n op: Opcode,\n\n right: Box<Expr>,\n\n },\n\n Number { val: i32 },\n\n Variable { var: String },\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "parser/src/ast.rs", "rank": 60, "score": 13.976418565965584 }, { "content": " Instruction::ADD => {\n\n instruction_vec.push(String::from(\"iadd\"));\n\n },\n\n Instruction::SUB => {\n\n instruction_vec.push(String::from(\"isub\"));\n\n },\n\n Instruction::DIV => {\n\n instruction_vec.push(String::from(\"idiv\"));\n\n },\n\n Instruction::PRINT => {\n\n let mut print_instructions = vec![\n\n String::from(\"getstatic java/lang/System/out Ljava/io/PrintStream;\"),\n\n String::from(\"swap\"),\n\n String::from(\"invokevirtual java/io/PrintStream/println(I)V\"),\n\n ];\n\n instruction_vec.append(&mut print_instructions);\n\n },\n\n Instruction::STORE { addr } => {\n\n let instr = match *addr {\n\n 0..=3 => format!(\"istore_{}\", addr),\n", "file_path": "compiler/src/jasmin.rs", "rank": 61, "score": 13.27718208847708 }, { "content": " use self::__lalrpop_util::state_machine as __state_machine;\n\n extern crate regex as __regex;\n\n use std::fmt as __fmt;\n\n\n\n #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\n\n pub struct Token<'input>(pub usize, pub &'input str);\n\n impl<'a> __fmt::Display for Token<'a> {\n\n fn fmt<'f>(&self, formatter: &mut __fmt::Formatter<'f>) -> Result<(), __fmt::Error> {\n\n __fmt::Display::fmt(self.1, formatter)\n\n }\n\n }\n\n\n\n pub struct __MatcherBuilder {\n\n regex_set: __regex::RegexSet,\n\n regex_vec: Vec<__regex::Regex>,\n\n }\n\n\n\n impl __MatcherBuilder {\n\n pub fn new() -> __MatcherBuilder {\n\n let __strs: &[&str] = &[\n", "file_path": "parser/src/instant.rs", "rank": 63, "score": 12.597959907660293 }, { "content": " regex_vec: &'builder Vec<__regex::Regex>,\n\n }\n\n\n\n impl<'input, 'builder> Iterator for __Matcher<'input, 'builder> {\n\n type Item = Result<(usize, Token<'input>, usize), __lalrpop_util::ParseError<usize,Token<'input>,&'static str>>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let __text = self.text.trim_start();\n\n let __whitespace = self.text.len() - __text.len();\n\n let __start_offset = self.consumed + __whitespace;\n\n if __text.is_empty() {\n\n self.text = __text;\n\n self.consumed = __start_offset;\n\n None\n\n } else {\n\n let __matches = self.regex_set.matches(__text);\n\n if !__matches.matched_any() {\n\n Some(Err(__lalrpop_util::ParseError::InvalidToken {\n\n location: __start_offset,\n\n }))\n", "file_path": "parser/src/instant.rs", "rank": 64, "score": 12.565927524001562 }, { "content": " {\n\n // __Stmt = Stmt => ActionFn(1);\n\n let __sym0 = __pop_Variant1(__symbols);\n\n let __start = __sym0.0.clone();\n\n let __end = __sym0.2.clone();\n\n let __nt = super::__action1::<>(input, __sym0);\n\n __symbols.push((__start, __Symbol::Variant1(__nt), __end));\n\n (1, 15)\n\n }\n\n}\n\npub use self::__parse__Prog::ProgParser;\n\n\n\n#[cfg_attr(rustfmt, rustfmt_skip)]\n\nmod __parse__Stmt {\n\n #![allow(non_snake_case, non_camel_case_types, unused_mut, unused_variables, unused_imports, unused_parens)]\n\n\n\n use std::str::FromStr;\n\n use std::iter::FromIterator;\n\n use crate::ast::{Opcode, Expr, Stmt, Prog};\n\n #[allow(unused_extern_crates)]\n", "file_path": "parser/src/instant.rs", "rank": 66, "score": 10.239417147567512 }, { "content": "#[derive(Debug)]\n\npub enum CompilationError {\n\n UnidentifiedVariable { identifier: String },\n\n}\n", "file_path": "compiler/src/common.rs", "rank": 67, "score": 10.17288890744485 }, { "content": " ) -> Self::Symbol {\n\n panic!(\"error recovery not enabled for this grammar\")\n\n }\n\n\n\n fn reduce(\n\n &mut self,\n\n action: i8,\n\n start_location: Option<&Self::Location>,\n\n states: &mut Vec<i8>,\n\n symbols: &mut Vec<__state_machine::SymbolTriple<Self>>,\n\n ) -> Option<__state_machine::ParseResult<Self>> {\n\n __reduce(\n\n self.input,\n\n action,\n\n start_location,\n\n states,\n\n symbols,\n\n ::std::marker::PhantomData::<(&())>,\n\n )\n\n }\n", "file_path": "parser/src/instant.rs", "rank": 68, "score": 9.675957153207836 }, { "content": " ) -> Self::Symbol {\n\n panic!(\"error recovery not enabled for this grammar\")\n\n }\n\n\n\n fn reduce(\n\n &mut self,\n\n action: i8,\n\n start_location: Option<&Self::Location>,\n\n states: &mut Vec<i8>,\n\n symbols: &mut Vec<__state_machine::SymbolTriple<Self>>,\n\n ) -> Option<__state_machine::ParseResult<Self>> {\n\n __reduce(\n\n self.input,\n\n action,\n\n start_location,\n\n states,\n\n symbols,\n\n ::std::marker::PhantomData::<(&())>,\n\n )\n\n }\n", "file_path": "parser/src/instant.rs", "rank": 69, "score": 9.675957153207836 }, { "content": " instructions.append(&mut rhs.instructions);\n\n instructions.append(&mut lhs.instructions);\n\n\n\n if *op == ast::Opcode::Sub || *op == ast::Opcode::Div {\n\n instructions.push(Instruction::SWAP);\n\n }\n\n }\n\n\n\n match op {\n\n ast::Opcode::Add => {\n\n instructions.push(Instruction::ADD);\n\n },\n\n ast::Opcode::Sub => {\n\n instructions.push(Instruction::SUB);\n\n },\n\n ast::Opcode::Mul => {\n\n instructions.push(Instruction::MUL);\n\n },\n\n ast::Opcode::Div => {\n\n instructions.push(Instruction::DIV)\n", "file_path": "compiler/src/stack.rs", "rank": 70, "score": 9.675304512262159 }, { "content": " _ => format!(\"istore {}\", addr),\n\n };\n\n instruction_vec.push(instr);\n\n },\n\n Instruction::LOAD { addr } => {\n\n let instr = match *addr {\n\n 0..=3 => format!(\"iload_{}\", addr),\n\n _ => format!(\"iload {}\", addr),\n\n };\n\n instruction_vec.push(instr);\n\n },\n\n Instruction::SWAP => {\n\n instruction_vec.push(String::from(\"swap\"));\n\n },\n\n };\n\n instruction_vec\n\n }\n\n}\n\n\n", "file_path": "compiler/src/jasmin.rs", "rank": 71, "score": 9.674129929743708 }, { "content": " use self::__lalrpop_util::state_machine as __state_machine;\n\n use super::__intern_token::Token;\n\n #[allow(dead_code)]\n\n pub enum __Symbol<'input>\n\n {\n\n Variant0(&'input str),\n\n Variant1(Box<Stmt>),\n\n Variant2(::std::vec::Vec<Box<Stmt>>),\n\n Variant3(Box<Expr>),\n\n Variant4(String),\n\n Variant5(i32),\n\n Variant6(Prog),\n\n Variant7(Vec<Box<Stmt>>),\n\n Variant8(::std::option::Option<Box<Stmt>>),\n\n }\n\n const __ACTION: &'static [i8] = &[\n\n // State 0\n\n 13, 0, 0, 0, 0, 0, 0, 0, 14, 15,\n\n // State 1\n\n 13, 0, 0, 0, 0, 0, 0, 0, 14, 15,\n", "file_path": "parser/src/instant.rs", "rank": 72, "score": 9.184111879790661 }, { "content": " __ACTION[(__state * 10)..].iter().zip(__TERMINAL).filter_map(|(&state, terminal)| {\n\n if state == 0 {\n\n None\n\n } else {\n\n Some(terminal.to_string())\n\n }\n\n }).collect()\n\n }\n\n pub struct __StateMachine<'input>\n\n where \n\n {\n\n input: &'input str,\n\n __phantom: ::std::marker::PhantomData<(&'input ())>,\n\n }\n\n impl<'input> __state_machine::ParserDefinition for __StateMachine<'input>\n\n where \n\n {\n\n type Location = usize;\n\n type Error = &'static str;\n\n type Token = Token<'input>;\n", "file_path": "parser/src/instant.rs", "rank": 73, "score": 9.106421556924888 }, { "content": " __ACTION[(__state * 10)..].iter().zip(__TERMINAL).filter_map(|(&state, terminal)| {\n\n if state == 0 {\n\n None\n\n } else {\n\n Some(terminal.to_string())\n\n }\n\n }).collect()\n\n }\n\n pub struct __StateMachine<'input>\n\n where \n\n {\n\n input: &'input str,\n\n __phantom: ::std::marker::PhantomData<(&'input ())>,\n\n }\n\n impl<'input> __state_machine::ParserDefinition for __StateMachine<'input>\n\n where \n\n {\n\n type Location = usize;\n\n type Error = &'static str;\n\n type Token = Token<'input>;\n", "file_path": "parser/src/instant.rs", "rank": 74, "score": 9.106421556924888 }, { "content": " Some(last_used_location) => last_used_location + 1,\n\n None => 0,\n\n };\n\n env.insert(var.clone(), new_location);\n\n compiled_expr.locals_limit += 1;\n\n new_location\n\n }\n\n };\n\n\n\n let store_command = Instruction::STORE { addr: variable_location };\n\n compiled_expr.instructions.push(store_command);\n\n\n\n let compiled_stmt = CompiledCode {\n\n instructions: compiled_expr.instructions,\n\n stack_limit: compiled_expr.stack_limit,\n\n locals_limit: compiled_expr.locals_limit\n\n };\n\n Ok(compiled_stmt)\n\n }\n\n }\n", "file_path": "compiler/src/stack.rs", "rank": 75, "score": 8.938475304497487 }, { "content": " extern crate lalrpop_util as __lalrpop_util;\n\n #[allow(unused_imports)]\n\n use self::__lalrpop_util::state_machine as __state_machine;\n\n use super::__intern_token::Token;\n\n #[allow(dead_code)]\n\n pub enum __Symbol<'input>\n\n {\n\n Variant0(&'input str),\n\n Variant1(Box<Stmt>),\n\n Variant2(::std::vec::Vec<Box<Stmt>>),\n\n Variant3(Box<Expr>),\n\n Variant4(String),\n\n Variant5(i32),\n\n Variant6(Prog),\n\n Variant7(Vec<Box<Stmt>>),\n\n Variant8(::std::option::Option<Box<Stmt>>),\n\n }\n\n const __ACTION: &'static [i8] = &[\n\n // State 0\n\n 9, 0, 0, 0, 0, 0, 0, 0, 10, 11,\n", "file_path": "parser/src/instant.rs", "rank": 76, "score": 8.629377241156988 }, { "content": " Option::Some(variable_address) => {\n\n let instruction = Instruction::LOAD {addr: *variable_address};\n\n let compiled_code = CompiledCode {\n\n instructions: vec![instruction],\n\n stack_limit: 1,\n\n locals_limit: 1,\n\n };\n\n Ok(compiled_code)\n\n }\n\n }\n\n },\n\n ast::Expr::Binary {left, op, right} => {\n\n let mut lhs = left.compile_stack(env)?;\n\n let mut rhs = right.compile_stack(env)?;\n\n\n\n let mut instructions: Vec<Instruction> = vec![];\n\n if lhs.stack_limit >= rhs.stack_limit {\n\n instructions.append(&mut lhs.instructions);\n\n instructions.append(&mut rhs.instructions);\n\n } else {\n", "file_path": "compiler/src/stack.rs", "rank": 77, "score": 8.129677278797221 }, { "content": " __symbols: &mut ::std::vec::Vec<(usize,__Symbol<'input>,usize)>\n\n ) -> (usize, String, usize)\n\n {\n\n match __symbols.pop().unwrap() {\n\n (__l, __Symbol::Variant4(__v), __r) => (__l, __v, __r),\n\n _ => panic!(\"symbol type mismatch\")\n\n }\n\n }\n\n fn __pop_Variant7<\n\n 'input,\n\n >(\n\n __symbols: &mut ::std::vec::Vec<(usize,__Symbol<'input>,usize)>\n\n ) -> (usize, Vec<Box<Stmt>>, usize)\n\n {\n\n match __symbols.pop().unwrap() {\n\n (__l, __Symbol::Variant7(__v), __r) => (__l, __v, __r),\n\n _ => panic!(\"symbol type mismatch\")\n\n }\n\n }\n\n fn __pop_Variant5<\n", "file_path": "parser/src/instant.rs", "rank": 78, "score": 8.118094157458678 }, { "content": " __symbols: &mut ::std::vec::Vec<(usize,__Symbol<'input>,usize)>\n\n ) -> (usize, String, usize)\n\n {\n\n match __symbols.pop().unwrap() {\n\n (__l, __Symbol::Variant4(__v), __r) => (__l, __v, __r),\n\n _ => panic!(\"symbol type mismatch\")\n\n }\n\n }\n\n fn __pop_Variant7<\n\n 'input,\n\n >(\n\n __symbols: &mut ::std::vec::Vec<(usize,__Symbol<'input>,usize)>\n\n ) -> (usize, Vec<Box<Stmt>>, usize)\n\n {\n\n match __symbols.pop().unwrap() {\n\n (__l, __Symbol::Variant7(__v), __r) => (__l, __v, __r),\n\n _ => panic!(\"symbol type mismatch\")\n\n }\n\n }\n\n fn __pop_Variant5<\n", "file_path": "parser/src/instant.rs", "rank": 79, "score": 8.118094157458678 }, { "content": " 'input,\n\n >(\n\n __symbols: &mut ::std::vec::Vec<(usize,__Symbol<'input>,usize)>\n\n ) -> (usize, i32, usize)\n\n {\n\n match __symbols.pop().unwrap() {\n\n (__l, __Symbol::Variant5(__v), __r) => (__l, __v, __r),\n\n _ => panic!(\"symbol type mismatch\")\n\n }\n\n }\n\n fn __pop_Variant8<\n\n 'input,\n\n >(\n\n __symbols: &mut ::std::vec::Vec<(usize,__Symbol<'input>,usize)>\n\n ) -> (usize, ::std::option::Option<Box<Stmt>>, usize)\n\n {\n\n match __symbols.pop().unwrap() {\n\n (__l, __Symbol::Variant8(__v), __r) => (__l, __v, __r),\n\n _ => panic!(\"symbol type mismatch\")\n\n }\n", "file_path": "parser/src/instant.rs", "rank": 80, "score": 8.080930966928442 }, { "content": " 'input,\n\n >(\n\n __symbols: &mut ::std::vec::Vec<(usize,__Symbol<'input>,usize)>\n\n ) -> (usize, i32, usize)\n\n {\n\n match __symbols.pop().unwrap() {\n\n (__l, __Symbol::Variant5(__v), __r) => (__l, __v, __r),\n\n _ => panic!(\"symbol type mismatch\")\n\n }\n\n }\n\n fn __pop_Variant8<\n\n 'input,\n\n >(\n\n __symbols: &mut ::std::vec::Vec<(usize,__Symbol<'input>,usize)>\n\n ) -> (usize, ::std::option::Option<Box<Stmt>>, usize)\n\n {\n\n match __symbols.pop().unwrap() {\n\n (__l, __Symbol::Variant8(__v), __r) => (__l, __v, __r),\n\n _ => panic!(\"symbol type mismatch\")\n\n }\n", "file_path": "parser/src/instant.rs", "rank": 81, "score": 8.080930966928442 }, { "content": " __GOTO[(state as usize) * 16 + nt] - 1\n\n }\n\n\n\n fn token_to_symbol(&self, token_index: usize, token: Self::Token) -> Self::Symbol {\n\n __token_to_symbol(token_index, token, ::std::marker::PhantomData::<(&())>)\n\n }\n\n\n\n fn expected_tokens(&self, state: i8) -> Vec<String> {\n\n __expected_tokens(state as usize)\n\n }\n\n\n\n #[inline]\n\n fn uses_error_recovery(&self) -> bool {\n\n false\n\n }\n\n\n\n #[inline]\n\n fn error_recovery_symbol(\n\n &self,\n\n recovery: __state_machine::ErrorRecovery<Self>,\n", "file_path": "parser/src/instant.rs", "rank": 82, "score": 8.023651822388498 }, { "content": " __GOTO[(state as usize) * 16 + nt] - 1\n\n }\n\n\n\n fn token_to_symbol(&self, token_index: usize, token: Self::Token) -> Self::Symbol {\n\n __token_to_symbol(token_index, token, ::std::marker::PhantomData::<(&())>)\n\n }\n\n\n\n fn expected_tokens(&self, state: i8) -> Vec<String> {\n\n __expected_tokens(state as usize)\n\n }\n\n\n\n #[inline]\n\n fn uses_error_recovery(&self) -> bool {\n\n false\n\n }\n\n\n\n #[inline]\n\n fn error_recovery_symbol(\n\n &self,\n\n recovery: __state_machine::ErrorRecovery<Self>,\n", "file_path": "parser/src/instant.rs", "rank": 83, "score": 8.023651822388498 }, { "content": " _priv: (),\n\n }\n\n\n\n impl ProgParser {\n\n pub fn new() -> ProgParser {\n\n let __builder = super::__intern_token::__MatcherBuilder::new();\n\n ProgParser {\n\n builder: __builder,\n\n _priv: (),\n\n }\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn parse<\n\n 'input,\n\n >(\n\n &self,\n\n input: &'input str,\n\n ) -> Result<Prog, __lalrpop_util::ParseError<usize, Token<'input>, &'static str>>\n\n {\n", "file_path": "parser/src/instant.rs", "rank": 84, "score": 7.899511998422655 }, { "content": " _priv: (),\n\n }\n\n\n\n impl StmtParser {\n\n pub fn new() -> StmtParser {\n\n let __builder = super::__intern_token::__MatcherBuilder::new();\n\n StmtParser {\n\n builder: __builder,\n\n _priv: (),\n\n }\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn parse<\n\n 'input,\n\n >(\n\n &self,\n\n input: &'input str,\n\n ) -> Result<Box<Stmt>, __lalrpop_util::ParseError<usize, Token<'input>, &'static str>>\n\n {\n", "file_path": "parser/src/instant.rs", "rank": 85, "score": 7.829872099748634 }, { "content": "// use lalrpop_util::lalrpop_mod; // use with new lalrpop behaviour instead of pub mod instant\n\n\n\npub mod ast;\n\npub mod instant;\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn it_works() {\n\n assert_eq!(2 + 2, 4);\n\n }\n\n}\n", "file_path": "parser/src/lib.rs", "rank": 86, "score": 7.399203741651284 }, { "content": "use crate::stack::{CompiledCode, Instruction};\n\n\n", "file_path": "compiler/src/jasmin.rs", "rank": 87, "score": 7.327081457289196 }, { "content": "use std::{env, fs, io::Result, io::Write};\n\nuse instant_parser::{ast, instant};\n\nuse std::process::{exit, ExitStatus};\n\n\n", "file_path": "src/lib.rs", "rank": 88, "score": 7.283272404785951 }, { "content": " pub(crate) fn __reduce16<\n\n 'input,\n\n >(\n\n input: &'input str,\n\n __action: i8,\n\n __lookahead_start: Option<&usize>,\n\n __states: &mut ::std::vec::Vec<i8>,\n\n __symbols: &mut ::std::vec::Vec<(usize,__Symbol<'input>,usize)>,\n\n _: ::std::marker::PhantomData<(&'input ())>,\n\n ) -> (usize, usize)\n\n {\n\n // Num = r#\"[0-9]+\"# => ActionFn(17);\n\n let __sym0 = __pop_Variant0(__symbols);\n\n let __start = __sym0.0.clone();\n\n let __end = __sym0.2.clone();\n\n let __nt = super::__action17::<>(input, __sym0);\n\n __symbols.push((__start, __Symbol::Variant5(__nt), __end));\n\n (1, 8)\n\n }\n\n pub(crate) fn __reduce17<\n", "file_path": "parser/src/instant.rs", "rank": 89, "score": 7.142845784824014 }, { "content": " pub(crate) fn __reduce16<\n\n 'input,\n\n >(\n\n input: &'input str,\n\n __action: i8,\n\n __lookahead_start: Option<&usize>,\n\n __states: &mut ::std::vec::Vec<i8>,\n\n __symbols: &mut ::std::vec::Vec<(usize,__Symbol<'input>,usize)>,\n\n _: ::std::marker::PhantomData<(&'input ())>,\n\n ) -> (usize, usize)\n\n {\n\n // Num = r#\"[0-9]+\"# => ActionFn(17);\n\n let __sym0 = __pop_Variant0(__symbols);\n\n let __start = __sym0.0.clone();\n\n let __end = __sym0.2.clone();\n\n let __nt = super::__action17::<>(input, __sym0);\n\n __symbols.push((__start, __Symbol::Variant5(__nt), __end));\n\n (1, 8)\n\n }\n\n pub(crate) fn __reduce17<\n", "file_path": "parser/src/instant.rs", "rank": 90, "score": 7.142845784824015 }, { "content": " let __next_state = __GOTO[__state * 16 + __nonterminal] - 1;\n\n __states.push(__next_state);\n\n None\n\n }\n\n fn __pop_Variant3<\n\n 'input,\n\n >(\n\n __symbols: &mut ::std::vec::Vec<(usize,__Symbol<'input>,usize)>\n\n ) -> (usize, Box<Expr>, usize)\n\n {\n\n match __symbols.pop().unwrap() {\n\n (__l, __Symbol::Variant3(__v), __r) => (__l, __v, __r),\n\n _ => panic!(\"symbol type mismatch\")\n\n }\n\n }\n\n fn __pop_Variant1<\n\n 'input,\n\n >(\n\n __symbols: &mut ::std::vec::Vec<(usize,__Symbol<'input>,usize)>\n\n ) -> (usize, Box<Stmt>, usize)\n", "file_path": "parser/src/instant.rs", "rank": 91, "score": 7.132719157437742 }, { "content": " let __next_state = __GOTO[__state * 16 + __nonterminal] - 1;\n\n __states.push(__next_state);\n\n None\n\n }\n\n fn __pop_Variant3<\n\n 'input,\n\n >(\n\n __symbols: &mut ::std::vec::Vec<(usize,__Symbol<'input>,usize)>\n\n ) -> (usize, Box<Expr>, usize)\n\n {\n\n match __symbols.pop().unwrap() {\n\n (__l, __Symbol::Variant3(__v), __r) => (__l, __v, __r),\n\n _ => panic!(\"symbol type mismatch\")\n\n }\n\n }\n\n fn __pop_Variant1<\n\n 'input,\n\n >(\n\n __symbols: &mut ::std::vec::Vec<(usize,__Symbol<'input>,usize)>\n\n ) -> (usize, Box<Stmt>, usize)\n", "file_path": "parser/src/instant.rs", "rank": 92, "score": 7.132719157437742 }, { "content": " {\n\n // __Prog = Prog => ActionFn(0);\n\n let __sym0 = __pop_Variant6(__symbols);\n\n let __start = __sym0.0.clone();\n\n let __end = __sym0.2.clone();\n\n let __nt = super::__action0::<>(input, __sym0);\n\n __symbols.push((__start, __Symbol::Variant6(__nt), __end));\n\n (1, 14)\n\n }\n\n}\n\npub use self::__parse__Stmt::StmtParser;\n\n#[cfg_attr(rustfmt, rustfmt_skip)]\n\nmod __intern_token {\n\n #![allow(unused_imports)]\n\n use std::str::FromStr;\n\n use std::iter::FromIterator;\n\n use crate::ast::{Opcode, Expr, Stmt, Prog};\n\n #[allow(unused_extern_crates)]\n\n extern crate lalrpop_util as __lalrpop_util;\n\n #[allow(unused_imports)]\n", "file_path": "parser/src/instant.rs", "rank": 93, "score": 7.032307403269689 }, { "content": "pub enum Stmt {\n\n Expr { expr: Box<Expr> },\n\n Decl { var: String, expr: Box<Expr> },\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Prog {\n\n pub stmts: Vec<Box<Stmt>>,\n\n}\n", "file_path": "parser/src/ast.rs", "rank": 94, "score": 6.935197207264752 }, { "content": "// auto-generated: \"lalrpop 0.17.2\"\n\n// sha256: e7926053cbb6967122a3dde33b24adf8aec0dbd7d8c316d2ce573d109964c\n\nuse std::str::FromStr;\n\nuse std::iter::FromIterator;\n\nuse crate::ast::{Opcode, Expr, Stmt, Prog};\n\n#[allow(unused_extern_crates)]\n\nextern crate lalrpop_util as __lalrpop_util;\n\n#[allow(unused_imports)]\n\nuse self::__lalrpop_util::state_machine as __state_machine;\n\n\n\n#[cfg_attr(rustfmt, rustfmt_skip)]\n\nmod __parse__Prog {\n\n #![allow(non_snake_case, non_camel_case_types, unused_mut, unused_variables, unused_imports, unused_parens)]\n\n\n\n use std::str::FromStr;\n\n use std::iter::FromIterator;\n\n use crate::ast::{Opcode, Expr, Stmt, Prog};\n\n #[allow(unused_extern_crates)]\n\n extern crate lalrpop_util as __lalrpop_util;\n\n #[allow(unused_imports)]\n", "file_path": "parser/src/instant.rs", "rank": 95, "score": 6.921050240349949 }, { "content": "#### JVM Compiler\n\n\n\nI started learning to use Rust and Lalrpop before this assignment, by writing a simple calculator which translated\n\nexpressions into a set of stack-based vm commands. The result only needed few changes to be compatible with Instant\n\nlanguage, so I decided to build them on top of it - this is the reason why `compiler/src/stack.rs` exists.\n\n\n\nParts of abstract syntax tree that are compiled implement trait `CompileStack`. During compilation,\n\nnecessary stack depth is returned together with the vector of compiled instructions. This allows for optimization\n\nof evaluation order in the case of binary expressions. Stack limit is also necessary for the final jasmin output.\n\n\n\nProgram statements are evaluated one by one, mapping between instant variables and jvm variable ids is stored\n\nin a mutable HashMap (`env` argument). Compiler also prevents access to undefined variables.\n\n\n\nThe JVM compilation process, first translates the parsed abstract syntax tree into abstract stack representation\n\n(implemented in `stack.rs`), which is later translated to Jasmin representation (implemented in `jasmin.rs`).\n\n\n\nThe executable `insc_jvm.rs` saves this representation, and runs `jasmin.jar` distributed in `dist` folder on the\n\ncreated jasmin file in order to translate it to JVM bytecode, that is also saved. \n\n\n\n\n\n### Executables\n\n\n\nThe root-directory executables (`isnc_jvm` and `insc_llvm`) are just bash scripts wrapping compiled rust programs\n\n(also setting environment variables, which I added to make local development easier).\n\n\n\nSource code for the executables is contained within `src` directory:\n\n\n\n```\n\nsrc\n\n├── bin\n\n│   ├── insc_jvm.rs\n\n│   └── insc_llvm.rs\n\n└── lib.rs\n\n```\n\n\n\nCommon utility methods for these executables are grouped in `src/lib.rs`.\n\n\n\n\n\n### External resources\n\n\n\nTogether with my compiler, I also packaged `e2e_tests` for testing and demonstration purposes, as well as\n\nthe `dist` folder containing utilities necessary to translate compiled code to final executable representation.\n\n\n\nFiles contained within these two folders were not created by me.\n", "file_path": "README.md", "rank": 96, "score": 6.741924864560657 }, { "content": " __states: &mut ::std::vec::Vec<i8>,\n\n __symbols: &mut ::std::vec::Vec<(usize,__Symbol<'input>,usize)>,\n\n _: ::std::marker::PhantomData<(&'input ())>,\n\n ) -> (usize, usize)\n\n {\n\n // Stmt? = Stmt => ActionFn(19);\n\n let __sym0 = __pop_Variant1(__symbols);\n\n let __start = __sym0.0.clone();\n\n let __end = __sym0.2.clone();\n\n let __nt = super::__action19::<>(input, __sym0);\n\n __symbols.push((__start, __Symbol::Variant8(__nt), __end));\n\n (1, 12)\n\n }\n\n pub(crate) fn __reduce25<\n\n 'input,\n\n >(\n\n input: &'input str,\n\n __action: i8,\n\n __lookahead_start: Option<&usize>,\n\n __states: &mut ::std::vec::Vec<i8>,\n", "file_path": "parser/src/instant.rs", "rank": 97, "score": 6.607430798465788 }, { "content": " __states: &mut ::std::vec::Vec<i8>,\n\n __symbols: &mut ::std::vec::Vec<(usize,__Symbol<'input>,usize)>,\n\n _: ::std::marker::PhantomData<(&'input ())>,\n\n ) -> (usize, usize)\n\n {\n\n // Stmt = Expr1 => ActionFn(4);\n\n let __sym0 = __pop_Variant3(__symbols);\n\n let __start = __sym0.0.clone();\n\n let __end = __sym0.2.clone();\n\n let __nt = super::__action4::<>(input, __sym0);\n\n __symbols.push((__start, __Symbol::Variant1(__nt), __end));\n\n (1, 11)\n\n }\n\n pub(crate) fn __reduce23<\n\n 'input,\n\n >(\n\n input: &'input str,\n\n __action: i8,\n\n __lookahead_start: Option<&usize>,\n\n __states: &mut ::std::vec::Vec<i8>,\n", "file_path": "parser/src/instant.rs", "rank": 98, "score": 6.607430798465787 }, { "content": " __states: &mut ::std::vec::Vec<i8>,\n\n __symbols: &mut ::std::vec::Vec<(usize,__Symbol<'input>,usize)>,\n\n _: ::std::marker::PhantomData<(&'input ())>,\n\n ) -> (usize, usize)\n\n {\n\n // Stmt? = Stmt => ActionFn(19);\n\n let __sym0 = __pop_Variant1(__symbols);\n\n let __start = __sym0.0.clone();\n\n let __end = __sym0.2.clone();\n\n let __nt = super::__action19::<>(input, __sym0);\n\n __symbols.push((__start, __Symbol::Variant8(__nt), __end));\n\n (1, 12)\n\n }\n\n pub(crate) fn __reduce25<\n\n 'input,\n\n >(\n\n input: &'input str,\n\n __action: i8,\n\n __lookahead_start: Option<&usize>,\n\n __states: &mut ::std::vec::Vec<i8>,\n", "file_path": "parser/src/instant.rs", "rank": 99, "score": 6.607430798465787 } ]
Rust
testsuite/cluster-test/src/experiments/client_compatibility_test.rs
chouette254/libra
1eaefa60d29e1df72ba6c4f9cf1867964821b586
#![forbid(unsafe_code)] use crate::{ cluster::Cluster, cluster_swarm::cluster_swarm_kube::CFG_SEED, experiments::{Context, Experiment, ExperimentParam}, instance::Instance, }; use async_trait::async_trait; use libra_logger::prelude::*; use libra_types::chain_id::ChainId; use std::{collections::HashSet, fmt, time::Duration}; use structopt::StructOpt; use tokio::time; #[derive(StructOpt, Debug)] pub struct ClientCompatiblityTestParams { #[structopt(long, help = "Image tag of old client to test")] pub old_image_tag: String, } pub struct ClientCompatibilityTest { old_image_tag: String, faucet_node: Instance, cli_node: Instance, } impl ExperimentParam for ClientCompatiblityTestParams { type E = ClientCompatibilityTest; fn build(self, cluster: &Cluster) -> Self::E { let (test_nodes, _) = cluster.split_n_fullnodes_random(2); let mut test_nodes = test_nodes.into_fullnode_instances(); let faucet_node = test_nodes.pop().expect("Requires at least one faucet node"); let cli_node = test_nodes.pop().expect("Requires at least one test node"); Self::E { old_image_tag: self.old_image_tag, faucet_node, cli_node, } } } #[async_trait] impl Experiment for ClientCompatibilityTest { fn affected_validators(&self) -> HashSet<String> { HashSet::new() } async fn run(&mut self, context: &mut Context<'_>) -> anyhow::Result<()> { context.report.report_text(format!( "Client compatibility test results for {} ==> {} (PR)", self.old_image_tag, context.current_tag )); let test_image = format!( "853397791086.dkr.ecr.us-west-2.amazonaws.com/libra_faucet:{}", self.old_image_tag ); let faucet_port: &str = "9999"; let num_validators = context.cluster.validator_instances().len(); let config_cmd = format!( "/opt/libra/bin/config-builder faucet -o /opt/libra/etc --chain-id {chain_id} -s {seed} -n {num_validators}; echo $?; cat /opt/libra/etc/waypoint.txt", chain_id=ChainId::test(), seed=CFG_SEED, num_validators=num_validators ); let env_cmd = format!( "CFG_CHAIN_ID={chain_id} AC_HOST={ac_host} AC_PORT={ac_port}", chain_id = ChainId::test(), ac_host = self.faucet_node.ip(), ac_port = self.faucet_node.ac_port() ); let run_cmd = format!("gunicorn --bind 0.0.0.0:{faucet_port} --access-logfile - --error-logfile - --log-level debug --pythonpath /opt/libra/bin server", faucet_port=faucet_port); let full_faucet_cmd = format!( "{config_cmd}; {env_cmd} {run_cmd}", config_cmd = config_cmd, env_cmd = env_cmd, run_cmd = run_cmd ); let msg = format!("1. Starting faucet on node {}", self.faucet_node); info!("{}", msg); context.report.report_text(msg); let faucet_job_name = self .faucet_node .spawn_job(&test_image, &full_faucet_cmd, "run-faucet") .await .map_err(|err| anyhow::format_err!("Failed to spawn faucet job: {}", err))?; info!( "Job {} started for node {}:{} faucet command: {}", faucet_job_name, self.faucet_node, self.faucet_node.peer_name(), full_faucet_cmd ); info!("Waiting for faucet job to spin up completely"); time::delay_for(Duration::from_secs(20)).await; let run_cli_cmd = format!( "/opt/libra/bin/cli --url {fn_url} --chain-id {chain_id} -f http://{faucet_host}:{faucet_port} --waypoint $(cat /opt/libra/etc/waypoint.txt)", fn_url = self.cli_node.json_rpc_url(), chain_id = ChainId::test(), faucet_host = self.faucet_node.ip(), faucet_port = faucet_port ); let mut build_cli_cmd = String::new(); let cli_cmd_file = "/opt/libra/etc/cmds.txt"; let cmds = include_str!("client_compatibility_cmds.txt"); for cmd in cmds.split('\n') { build_cli_cmd.push_str(&format!( "echo {cmd} >> {cmd_file};", cmd = cmd, cmd_file = cli_cmd_file )); } let full_cli_cmd = format!( "{config_cmd}; {build_cli_cmd} {run_cli_cmd} < {cli_cmd_file} && echo SUCCESS", config_cmd = config_cmd, build_cli_cmd = build_cli_cmd, run_cli_cmd = run_cli_cmd, cli_cmd_file = cli_cmd_file ); let msg = format!("2. Running CLI mint from node {}", self.cli_node); info!("{}", msg); context.report.report_text(msg); info!( "Job starting for node {}:{} CLI command: {}", self.cli_node, self.cli_node.peer_name(), full_cli_cmd ); self.cli_node .cmd(&test_image, &full_cli_cmd, "run-cli-commands") .await .map_err(|err| anyhow::format_err!("Failed to run CLI: {}", err))?; let msg = format!("3. CLI success from node {}", self.cli_node); info!("{}", msg); context.report.report_text(msg); context .cluster_builder .cluster_swarm .kill_job(&faucet_job_name) .await .map_err(|err| anyhow::format_err!("Failed to kill faucet: {}", err))?; Ok(()) } fn deadline(&self) -> Duration { Duration::from_secs(5 * 60) } } impl fmt::Display for ClientCompatibilityTest { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "Client compatibility test {}, faucet {}, CLI on {}", self.old_image_tag, self.faucet_node, self.cli_node ) } }
#![forbid(unsafe_code)] use crate::{ cluster::Cluster, cluster_swarm::cluster_swarm_kube::CFG_SEED, experiments::{Context, Experiment, ExperimentParam}, instance::Instance, }; use async_trait::async_trait; use libra_logger::prelude::*; use libra_types::chain_id::ChainId; use std::{collections::HashSet, fmt, time::Duration}; use structopt::StructOpt; use tokio::time; #[derive(StructOpt, Debug)] pub struct ClientCompatiblityTestParams { #[structopt(long, help = "Image tag of old client to test")] pub old_image_tag: String, } pub struct ClientCompatibilityTest { old_image_tag: String, faucet_node: Instance, cli_node: Instance, } impl ExperimentParam for ClientCompatiblityTestParams { type E = ClientCompatibilityTest; fn build(self, cluster: &Cluster) -> Self::E { let (test_nodes, _) = cluster.split_n_fullnodes_random(2); let mut test_nodes = test_nodes.into_fullnode_instances(); let faucet_node = test_nodes.pop().expect("Requires at least one faucet node"); let cli_node = test_nodes.pop().expect("Requires at least one test node"); Self::E { old_image_tag: self.old_image_tag, faucet_node, cli_node, } } } #[async_trait] impl Experiment for ClientCompatibilityTest { fn affected_validators(&self) -> HashSet<String> { HashSet::new() } async fn run(&mut self, context: &mut Context<'_>) -> anyhow::Result<()> { context.report.report_text(format!( "Client compatibility test results for {} ==> {} (PR)", self.old_image_tag, context.current_tag )); let test_image = format!( "853397791086.dkr.ecr.us-west-2.amazonaws.com/libra_faucet:{}", self.old_image_tag ); let faucet_port: &str = "9999"; let num_validators = context.cluster.validator_instances().len(); let config_cmd = format!( "/opt/libra/bin/config-builder faucet -o /opt/libra/etc --chain-id {chain_id} -s {seed} -n {num_validators}; echo $?; cat /opt/libra/etc/waypoint.txt", chain_id=ChainId::test(), seed=CFG_SEED, num_validators=num_validators ); let env_cmd = format!( "CFG_CHAIN_ID={chain_id} AC_HOST={ac_host} AC_PORT={ac_port}", chain_id = ChainId::test(), ac_host = self.faucet_node.ip(), ac_port = self.faucet_node.ac_port() ); let run_cmd = format!("gunicorn --bind 0.0.0.0:{faucet_port} --access-logfile - --error-logfile - --log-level debug --pythonpath /opt/libra/bin server", faucet_port=faucet_port); let full_faucet_cmd = format!( "{config_cmd}; {env_cmd} {run_cmd}", config_cmd = config_cmd, env_cmd = env_cmd, run_cmd = run_cmd ); let msg = format!("1. Starting faucet on node {}", self.faucet_node); info!("{}", msg); context.report.report_text(msg);
self.faucet_node.peer_name(), full_faucet_cmd ); info!("Waiting for faucet job to spin up completely"); time::delay_for(Duration::from_secs(20)).await; let run_cli_cmd = format!( "/opt/libra/bin/cli --url {fn_url} --chain-id {chain_id} -f http://{faucet_host}:{faucet_port} --waypoint $(cat /opt/libra/etc/waypoint.txt)", fn_url = self.cli_node.json_rpc_url(), chain_id = ChainId::test(), faucet_host = self.faucet_node.ip(), faucet_port = faucet_port ); let mut build_cli_cmd = String::new(); let cli_cmd_file = "/opt/libra/etc/cmds.txt"; let cmds = include_str!("client_compatibility_cmds.txt"); for cmd in cmds.split('\n') { build_cli_cmd.push_str(&format!( "echo {cmd} >> {cmd_file};", cmd = cmd, cmd_file = cli_cmd_file )); } let full_cli_cmd = format!( "{config_cmd}; {build_cli_cmd} {run_cli_cmd} < {cli_cmd_file} && echo SUCCESS", config_cmd = config_cmd, build_cli_cmd = build_cli_cmd, run_cli_cmd = run_cli_cmd, cli_cmd_file = cli_cmd_file ); let msg = format!("2. Running CLI mint from node {}", self.cli_node); info!("{}", msg); context.report.report_text(msg); info!( "Job starting for node {}:{} CLI command: {}", self.cli_node, self.cli_node.peer_name(), full_cli_cmd ); self.cli_node .cmd(&test_image, &full_cli_cmd, "run-cli-commands") .await .map_err(|err| anyhow::format_err!("Failed to run CLI: {}", err))?; let msg = format!("3. CLI success from node {}", self.cli_node); info!("{}", msg); context.report.report_text(msg); context .cluster_builder .cluster_swarm .kill_job(&faucet_job_name) .await .map_err(|err| anyhow::format_err!("Failed to kill faucet: {}", err))?; Ok(()) } fn deadline(&self) -> Duration { Duration::from_secs(5 * 60) } } impl fmt::Display for ClientCompatibilityTest { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "Client compatibility test {}, faucet {}, CLI on {}", self.old_image_tag, self.faucet_node, self.cli_node ) } }
let faucet_job_name = self .faucet_node .spawn_job(&test_image, &full_faucet_cmd, "run-faucet") .await .map_err(|err| anyhow::format_err!("Failed to spawn faucet job: {}", err))?; info!( "Job {} started for node {}:{} faucet command: {}", faucet_job_name, self.faucet_node,
random
[ { "content": "pub fn get_instance_list_str(batch: &[Instance]) -> String {\n\n let mut nodes_list = String::from(\"\");\n\n for instance in batch.iter() {\n\n nodes_list.push_str(&instance.to_string());\n\n nodes_list.push_str(\", \")\n\n }\n\n nodes_list\n\n}\n\n\n\n#[derive(StructOpt, Debug)]\n\npub struct CompatiblityTestParams {\n\n #[structopt(\n\n long,\n\n default_value = \"15\",\n\n help = \"Number of nodes to update in the first batch\"\n\n )]\n\n pub count: usize,\n\n #[structopt(long, help = \"Image tag of newer validator software\")]\n\n pub updated_image_tag: String,\n\n}\n", "file_path": "testsuite/cluster-test/src/experiments/compatibility_test.rs", "rank": 0, "score": 617028.3327156748 }, { "content": "fn types<'a>(context: &mut Context, tys: impl IntoIterator<Item = &'a N::Type>) {\n\n tys.into_iter().for_each(|ty| type_(context, ty))\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 1, "score": 589103.3780190678 }, { "content": "fn type_(context: &mut Context, sp!(_, ty_): &N::Type) {\n\n use N::Type_ as T;\n\n match ty_ {\n\n T::Apply(_, tn, tys) => {\n\n type_name(context, tn);\n\n types(context, tys);\n\n }\n\n T::Ref(_, t) => type_(context, t),\n\n T::Param(_) | T::Unit | T::Anything | T::UnresolvedError | T::Var(_) => (),\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 2, "score": 572440.725637687 }, { "content": "/// Given an experiment name and its flags, it constructs an instance of that experiment\n\n/// and returns it as a `Box<dyn Experiment>`\n\npub fn get_experiment(name: &str, args: &[String], cluster: &Cluster) -> Box<dyn Experiment> {\n\n fn f<P: ExperimentParam + StructOpt + 'static>(\n\n ) -> Box<dyn Fn(&[String], &Cluster) -> Box<dyn Experiment>> {\n\n Box::new(from_args::<P>)\n\n }\n\n\n\n let mut known_experiments = HashMap::new();\n\n\n\n known_experiments.insert(\"recovery_time\", f::<RecoveryTimeParams>());\n\n known_experiments.insert(\n\n \"packet_loss_random_validators\",\n\n f::<PacketLossRandomValidatorsParams>(),\n\n );\n\n known_experiments.insert(\"bench\", f::<PerformanceBenchmarkParams>());\n\n known_experiments.insert(\n\n \"bench_three_region\",\n\n f::<PerformanceBenchmarkThreeRegionSimulationParams>(),\n\n );\n\n known_experiments.insert(\n\n \"reboot_random_validators\",\n", "file_path": "testsuite/cluster-test/src/experiments/mod.rs", "rank": 3, "score": 566012.4507499702 }, { "content": "fn types(context: &mut Context, tys: Vec<E::Type>) -> Vec<N::Type> {\n\n tys.into_iter().map(|t| type_(context, t)).collect()\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/translate.rs", "rank": 4, "score": 562360.5868320419 }, { "content": "fn type_(context: &mut Context, sp!(loc, ety_): E::Type) -> N::Type {\n\n use ResolvedType as RT;\n\n use E::{ModuleAccess_ as EN, Type_ as ET};\n\n use N::{TypeName_ as NN, Type_ as NT};\n\n let ty_ = match ety_ {\n\n ET::Unit => NT::Unit,\n\n ET::Multiple(tys) => {\n\n NT::multiple_(loc, tys.into_iter().map(|t| type_(context, t)).collect())\n\n }\n\n ET::Ref(mut_, inner) => NT::Ref(mut_, Box::new(type_(context, *inner))),\n\n ET::UnresolvedError => {\n\n assert!(context.has_errors());\n\n NT::UnresolvedError\n\n }\n\n ET::Apply(sp!(_, EN::Name(n)), tys) => match context.resolve_unscoped_type(&n) {\n\n None => {\n\n assert!(context.has_errors());\n\n NT::UnresolvedError\n\n }\n\n Some(RT::BuiltinType) => {\n", "file_path": "language/move-lang/src/naming/translate.rs", "rank": 5, "score": 562360.586832042 }, { "content": "// Extracts lines out of some text file where each line starts with `start` which can be a regular\n\n// expressions. Returns the list of such lines with `start` stripped. Use as in\n\n// `extract_test_directives(file, \"// dep:\")`.\n\npub fn extract_test_directives(path: &Path, start: &str) -> anyhow::Result<Vec<String>> {\n\n let rex = Regex::new(&format!(\"(?m)^{}(?P<ann>.*?)$\", start)).unwrap();\n\n let mut content = String::new();\n\n let mut file = File::open(path)?;\n\n file.read_to_string(&mut content)?;\n\n let mut at = 0;\n\n let mut res = vec![];\n\n while let Some(cap) = rex.captures(&content[at..]) {\n\n res.push(cap.name(\"ann\").unwrap().as_str().trim().to_string());\n\n at += cap.get(0).unwrap().end();\n\n }\n\n Ok(res)\n\n}\n", "file_path": "language/move-prover/test-utils/src/lib.rs", "rank": 6, "score": 555493.3364191647 }, { "content": "fn solve_copyable_constraint(context: &mut Context, loc: Loc, msg: String, s: Type) {\n\n let s = unfold_type(&context.subst, s);\n\n let sloc = s.loc;\n\n let kind = match infer_kind(&context, &context.subst, s.clone()) {\n\n // Anything => None\n\n // Unbound TVar or Anything satisfies any constraint. Will fail later in expansion\n\n None => return,\n\n Some(k) => k,\n\n };\n\n match kind {\n\n sp!(_, Kind_::Copyable) | sp!(_, Kind_::Affine) => (),\n\n sp!(rloc, Kind_::Unknown) | sp!(rloc, Kind_::Resource) => {\n\n let ty_str = error_format(&s, &context.subst);\n\n context.error(vec![\n\n (loc, msg),\n\n (sloc, format!(\"The type: {}\", ty_str)),\n\n (rloc, \"Is found to be a non-copyable type here\".into()),\n\n ])\n\n }\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/typing/core.rs", "rank": 7, "score": 548346.4362995961 }, { "content": "fn solve_base_type_constraint(context: &mut Context, loc: Loc, msg: String, ty: &Type) {\n\n use TypeName_::*;\n\n use Type_::*;\n\n let sp!(tyloc, unfolded_) = unfold_type(&context.subst, ty.clone());\n\n match unfolded_ {\n\n Var(_) => unreachable!(),\n\n Unit | Ref(_, _) | Apply(_, sp!(_, Multiple(_)), _) => {\n\n let tystr = error_format(ty, &context.subst);\n\n let tmsg = format!(\"Expected a single non-reference type, but found: {}\", tystr);\n\n context.error(vec![(loc, msg), (tyloc, tmsg)])\n\n }\n\n UnresolvedError | Anything | Param(_) | Apply(_, _, _) => (),\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/typing/core.rs", "rank": 8, "score": 546755.8602027171 }, { "content": "fn solve_single_type_constraint(context: &mut Context, loc: Loc, msg: String, ty: &Type) {\n\n use TypeName_::*;\n\n use Type_::*;\n\n let sp!(tyloc, unfolded_) = unfold_type(&context.subst, ty.clone());\n\n match unfolded_ {\n\n Var(_) => unreachable!(),\n\n Unit | Apply(_, sp!(_, Multiple(_)), _) => {\n\n let tystr = error_format(ty, &context.subst);\n\n let tmsg = format!(\n\n \"Expected a single type, but found expression list type: {}\",\n\n tystr\n\n );\n\n context.error(vec![(loc, msg), (tyloc, tmsg)])\n\n }\n\n UnresolvedError | Anything | Ref(_, _) | Param(_) | Apply(_, _, _) => (),\n\n }\n\n}\n\n\n\n//**************************************************************************************************\n\n// Subst\n\n//**************************************************************************************************\n\n\n", "file_path": "language/move-lang/src/typing/core.rs", "rank": 9, "score": 546755.8602027171 }, { "content": "fn check_restricted_self_name(context: &mut Context, case: &str, n: &Name) -> Result<(), ()> {\n\n check_restricted_name(context, case, n, ModuleName::SELF_NAME)\n\n}\n\n\n", "file_path": "language/move-lang/src/expansion/translate.rs", "rank": 10, "score": 541431.2187780319 }, { "content": "fn type_opt(context: &mut Context, t_opt: &Option<N::Type>) {\n\n t_opt.iter().for_each(|t| type_(context, t))\n\n}\n\n\n\n//**************************************************************************************************\n\n// Expressions\n\n//**************************************************************************************************\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 11, "score": 537737.5638191033 }, { "content": "fn type_(context: &mut Context, sp!(loc, ty_): &N::Type) {\n\n use N::Type_::*;\n\n match ty_ {\n\n Var(_) => panic!(\"ICE tvar in struct field type\"),\n\n Unit | Anything | UnresolvedError | Param(_) => (),\n\n Ref(_, t) => type_(context, t),\n\n Apply(_, sp!(_, tn_), tys) => {\n\n if let TypeName_::ModuleType(m, s) = tn_ {\n\n context.add_usage(*loc, m, s)\n\n }\n\n tys.iter().for_each(|t| type_(context, t))\n\n }\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/typing/recursive_structs.rs", "rank": 12, "score": 532436.9284231921 }, { "content": "pub fn type_(context: &mut Context, ty: &mut Type) {\n\n use Type_::*;\n\n match &mut ty.value {\n\n Anything | UnresolvedError | Param(_) | Unit => (),\n\n Ref(_, b) => type_(context, b),\n\n Var(tvar) => {\n\n let ty_tvar = sp(ty.loc, Var(*tvar));\n\n let replacement = core::unfold_type(&context.subst, ty_tvar);\n\n let replacement = match replacement {\n\n sp!(_, Var(_)) => panic!(\"ICE unfold_type_base failed to expand\"),\n\n sp!(loc, Anything) => {\n\n context.error(vec![(\n\n ty.loc,\n\n \"Could not infer this type. Try adding an annotation\",\n\n )]);\n\n sp(loc, UnresolvedError)\n\n }\n\n t => t,\n\n };\n\n *ty = replacement;\n", "file_path": "language/move-lang/src/typing/expand.rs", "rank": 13, "score": 525523.5702634334 }, { "content": "fn struct_fields(context: &mut Context, efields: E::StructFields) -> N::StructFields {\n\n match efields {\n\n E::StructFields::Native(loc) => N::StructFields::Native(loc),\n\n E::StructFields::Defined(em) => {\n\n N::StructFields::Defined(em.map(|_f, (idx, t)| (idx, type_(context, t))))\n\n }\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/translate.rs", "rank": 14, "score": 523926.4630057452 }, { "content": "fn types_opt(context: &mut Context, tys_opt: &Option<Vec<N::Type>>) {\n\n tys_opt.iter().for_each(|tys| types(context, tys))\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 15, "score": 519974.1747671275 }, { "content": "pub fn parse_type_tag(s: &str) -> Result<TypeTag> {\n\n parse(s, |parser| parser.parse_type_tag())\n\n}\n\n\n", "file_path": "language/move-core/types/src/parser.rs", "rank": 16, "score": 511261.9962613904 }, { "content": "fn struct_def(context: &mut Context, _name: StructName, s: &mut N::StructDefinition) {\n\n assert!(context.constraints.is_empty());\n\n context.reset_for_module_item();\n\n\n\n let field_map = match &mut s.fields {\n\n N::StructFields::Native(_) => return,\n\n N::StructFields::Defined(m) => m,\n\n };\n\n\n\n for (_field, idx_ty) in field_map.iter() {\n\n let inst_ty = core::instantiate(context, idx_ty.1.clone());\n\n context.add_base_type_constraint(inst_ty.loc, \"Invalid field type\", inst_ty);\n\n }\n\n core::solve_constraints(context);\n\n\n\n for (_field, idx_ty) in field_map.iter_mut() {\n\n expand::type_(context, &mut idx_ty.1);\n\n }\n\n}\n\n\n\n//**************************************************************************************************\n\n// Types\n\n//**************************************************************************************************\n\n\n", "file_path": "language/move-lang/src/typing/translate.rs", "rank": 17, "score": 510687.32790051465 }, { "content": "fn struct_def(context: &mut Context, sdef: &N::StructDefinition) {\n\n if let N::StructFields::Defined(fields) = &sdef.fields {\n\n fields.iter().for_each(|(_, (_, bt))| type_(context, bt));\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 18, "score": 506580.50375692465 }, { "content": "pub fn parse_type_tags(s: &str) -> Result<Vec<TypeTag>> {\n\n parse(s, |parser| {\n\n parser.parse_comma_list(|parser| parser.parse_type_tag(), Token::EOF, true)\n\n })\n\n}\n\n\n", "file_path": "language/move-core/types/src/parser.rs", "rank": 19, "score": 498840.50407438166 }, { "content": "pub fn exp(context: &mut Context, e: &mut T::Exp) {\n\n use T::UnannotatedExp_ as E;\n\n match &e.exp.value {\n\n // dont expand the type for return, abort, break, or continue\n\n E::Break | E::Continue | E::Return(_) | E::Abort(_) => {\n\n let t = e.ty.clone();\n\n match core::unfold_type(&context.subst, t) {\n\n sp!(_, Type_::Anything) => (),\n\n mut t => {\n\n // report errors if there is an uninferred type argument somewhere\n\n type_(context, &mut t);\n\n }\n\n }\n\n e.ty = sp(e.ty.loc, Type_::Anything)\n\n }\n\n // Loop's default type is ()\n\n E::Loop {\n\n has_break: false, ..\n\n } => {\n\n let t = e.ty.clone();\n", "file_path": "language/move-lang/src/typing/expand.rs", "rank": 20, "score": 495488.2275250694 }, { "content": "pub fn instantiate(context: &mut Context, sp!(loc, t_): Type) -> Type {\n\n use Type_::*;\n\n let it_ = match t_ {\n\n Unit => Unit,\n\n UnresolvedError => UnresolvedError,\n\n Anything => make_tvar(context, loc).value,\n\n Ref(mut_, b) => Ref(mut_, Box::new(instantiate(context, *b))),\n\n Apply(kopt, n, ty_args) => instantiate_apply(context, loc, kopt, n, ty_args),\n\n x @ Param(_) => x,\n\n Var(_) => panic!(\"ICE instantiate type variable\"),\n\n };\n\n sp(loc, it_)\n\n}\n\n\n", "file_path": "language/move-lang/src/typing/core.rs", "rank": 21, "score": 491535.26372626465 }, { "content": "fn struct_def(context: &mut Context, sname: StructName, sdef: &N::StructDefinition) {\n\n assert!(context.current_struct == None, \"ICE struct name not unset\");\n\n context.current_struct = Some(sname);\n\n match &sdef.fields {\n\n N::StructFields::Native(_) => (),\n\n N::StructFields::Defined(fields) => {\n\n fields.iter().for_each(|(_, (_, ty))| type_(context, ty))\n\n }\n\n };\n\n context.current_struct = None;\n\n}\n\n\n", "file_path": "language/move-lang/src/typing/recursive_structs.rs", "rank": 22, "score": 486520.8768231013 }, { "content": "fn lvalues<'a>(context: &mut Context, al: impl IntoIterator<Item = &'a N::LValue>) {\n\n al.into_iter().for_each(|a| lvalue(context, a))\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 23, "score": 485424.530782658 }, { "content": "fn type_name(context: &mut Context, sp!(loc, tn_): &N::TypeName) {\n\n use N::TypeName_ as TN;\n\n if let TN::ModuleType(m, _) = tn_ {\n\n context.add_usage(m, *loc)\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 24, "score": 483123.51464892155 }, { "content": "fn exp_(context: &mut Context, e: E::Exp) -> N::Exp {\n\n use E::Exp_ as EE;\n\n use N::Exp_ as NE;\n\n let sp!(eloc, e_) = e;\n\n let ne_ = match e_ {\n\n EE::Unit { trailing } => NE::Unit { trailing },\n\n EE::InferredNum(u) => NE::InferredNum(u),\n\n EE::Value(val) => NE::Value(val),\n\n EE::Move(v) => NE::Move(v),\n\n EE::Copy(v) => NE::Copy(v),\n\n EE::Name(sp!(aloc, E::ModuleAccess_::Name(v)), None) => {\n\n if is_constant_name(&v.value) {\n\n access_constant(context, sp(aloc, E::ModuleAccess_::Name(v)))\n\n } else {\n\n NE::Use(Var(v))\n\n }\n\n }\n\n EE::Name(ma, None) => access_constant(context, ma),\n\n\n\n EE::IfElse(eb, et, ef) => {\n", "file_path": "language/move-lang/src/naming/translate.rs", "rank": 25, "score": 482277.2704811427 }, { "content": "fn check_no_nominal_resources(context: &mut Context, s: &StructName, field: &Field, ty: &N::Type) {\n\n use N::Type_ as T;\n\n let sp!(tloc, ty_) = ty;\n\n match ty_ {\n\n T::Apply(Some(sp!(kloc, Kind_::Resource)), _, _) => {\n\n let field_msg = format!(\n\n \"Invalid resource field '{}' for struct '{}'. Structs cannot contain resource \\\n\n types, except through type parameters\",\n\n field, s\n\n );\n\n let tmsg = format!(\n\n \"Field '{}' is a resource due to the type: {}\",\n\n field,\n\n core::error_format(ty, &Subst::empty()),\n\n );\n\n let kmsg = format!(\n\n \"Type {} was declared as a resource here\",\n\n core::error_format(ty, &Subst::empty()),\n\n );\n\n context.error(vec![\n", "file_path": "language/move-lang/src/naming/translate.rs", "rank": 26, "score": 480822.7184710035 }, { "content": "pub fn make_num_tvar(context: &mut Context, loc: Loc) -> Type {\n\n let tvar = context.subst.new_num_var(loc);\n\n sp(loc, Type_::Var(tvar))\n\n}\n\n\n", "file_path": "language/move-lang/src/typing/core.rs", "rank": 27, "score": 480246.4095899365 }, { "content": "fn bind_list(context: &mut Context, ls: E::LValueList) -> Option<N::LValueList> {\n\n lvalue_list(context, LValueCase::Bind, ls)\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/translate.rs", "rank": 28, "score": 479094.00084365567 }, { "content": "fn freeze(context: &mut Context, result: &mut Block, expected_type: &H::Type, e: H::Exp) -> H::Exp {\n\n use H::{Type_ as T, UnannotatedExp_ as E};\n\n\n\n match needs_freeze(&e.ty, expected_type) {\n\n Freeze::NotNeeded => e,\n\n Freeze::Point => freeze_point(e),\n\n\n\n Freeze::Sub(points) => {\n\n let loc = e.exp.loc;\n\n let actual_tys = match &e.ty.value {\n\n T::Multiple(v) => v.clone(),\n\n _ => unreachable!(\"ICE needs_freeze failed\"),\n\n };\n\n assert!(actual_tys.len() == points.len());\n\n let new_temps = actual_tys\n\n .into_iter()\n\n .map(|ty| (context.new_temp(loc, ty.clone()), ty))\n\n .collect::<Vec<_>>();\n\n\n\n let lvalues = new_temps\n", "file_path": "language/move-lang/src/hlir/translate.rs", "rank": 29, "score": 478311.98614958534 }, { "content": "fn type_(context: &mut Context, sp!(loc, pt_): P::Type) -> E::Type {\n\n use E::Type_ as ET;\n\n use P::Type_ as PT;\n\n let t_ = match pt_ {\n\n PT::Unit => ET::Unit,\n\n PT::Multiple(ts) => ET::Multiple(types(context, ts)),\n\n PT::Apply(pn, ptyargs) => {\n\n let tyargs = types(context, ptyargs);\n\n match module_access(context, Access::Type, *pn) {\n\n None => {\n\n assert!(context.has_errors());\n\n ET::UnresolvedError\n\n }\n\n Some(n) => ET::Apply(n, tyargs),\n\n }\n\n }\n\n PT::Ref(mut_, inner) => ET::Ref(mut_, Box::new(type_(context, *inner))),\n\n PT::Fun(args, result) => {\n\n if context\n\n .require_spec_context(loc, \"`|_|_` function type only allowed in specifications\")\n", "file_path": "language/move-lang/src/expansion/translate.rs", "rank": 30, "score": 478212.43711411697 }, { "content": "fn types(context: &mut Context, pts: Vec<P::Type>) -> Vec<E::Type> {\n\n pts.into_iter().map(|pt| type_(context, pt)).collect()\n\n}\n\n\n", "file_path": "language/move-lang/src/expansion/translate.rs", "rank": 31, "score": 478212.43711411697 }, { "content": "pub fn from_currency_code_string(currency_code_string: &str) -> Result<Identifier> {\n\n Identifier::new(currency_code_string)\n\n}\n", "file_path": "types/src/account_config/constants/libra.rs", "rank": 32, "score": 477752.87063445756 }, { "content": "fn exp(context: &mut Context, e: E::Exp) -> Box<N::Exp> {\n\n Box::new(exp_(context, e))\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/translate.rs", "rank": 33, "score": 470754.66151378106 }, { "content": "pub fn make_tvar(_context: &mut Context, loc: Loc) -> Type {\n\n sp(loc, Type_::Var(TVar::next()))\n\n}\n\n\n\n//**************************************************************************************************\n\n// Structs\n\n//**************************************************************************************************\n\n\n", "file_path": "language/move-lang/src/typing/core.rs", "rank": 34, "score": 470560.04009996087 }, { "content": "pub fn run(mut args: Args, xctx: XContext) -> Result<()> {\n\n args.args.extend(args.testname.clone());\n\n let config = xctx.config();\n\n\n\n let env_vars: &[(&str, &str)] = if args.html_cov_dir.is_some() {\n\n info!(\"Running \\\"cargo clean\\\" before collecting coverage\");\n\n let mut clean_cmd = Command::new(\"cargo\");\n\n clean_cmd.arg(\"clean\");\n\n clean_cmd.output()?;\n\n &[\n\n // A way to use -Z (unstable) flags with the stable compiler. See below.\n\n (\"RUSTC_BOOTSTRAP\", \"1\"),\n\n // Recommend setting for grcov, avoids using the cargo cache.\n\n (\"CARGO_INCREMENTAL\", \"0\"),\n\n // Recommend flags for use with grcov, with these flags removed: -Copt-level=0, -Clink-dead-code.\n\n // for more info see: https://github.com/mozilla/grcov#example-how-to-generate-gcda-fiels-for-a-rust-project\n\n (\n\n \"RUSTFLAGS\",\n\n \"-Zprofile -Ccodegen-units=1 -Coverflow-checks=off\",\n\n ),\n", "file_path": "devtools/x/src/test.rs", "rank": 35, "score": 469221.1826023073 }, { "content": "pub fn compile_script_string(code: &str) -> Result<CompiledScript> {\n\n compile_script_string_and_assert_no_error(code, vec![])\n\n}\n\n\n", "file_path": "language/compiler/src/unit_tests/testutils.rs", "rank": 36, "score": 466528.7816713045 }, { "content": "pub fn compile_module_string(code: &str) -> Result<CompiledModule> {\n\n compile_module_string_and_assert_no_error(code, vec![])\n\n}\n\n\n", "file_path": "language/compiler/src/unit_tests/testutils.rs", "rank": 37, "score": 466528.7816713045 }, { "content": "pub fn solve_constraints(context: &mut Context) {\n\n use BuiltinTypeName_ as BT;\n\n let num_vars = context.subst.num_vars.clone();\n\n let mut subst = std::mem::replace(&mut context.subst, Subst::empty());\n\n for (num_var, loc) in num_vars {\n\n let tvar = sp(loc, Type_::Var(num_var));\n\n match unfold_type(&subst, tvar.clone()).value {\n\n Type_::UnresolvedError | Type_::Anything => {\n\n let next_subst = join(subst, &Type_::u64(loc), &tvar).unwrap().0;\n\n subst = next_subst;\n\n }\n\n _ => (),\n\n }\n\n }\n\n context.subst = subst;\n\n\n\n let constraints = std::mem::replace(&mut context.constraints, vec![]);\n\n for constraint in constraints {\n\n match constraint {\n\n Constraint::IsCopyable(loc, msg, s) => solve_copyable_constraint(context, loc, msg, s),\n", "file_path": "language/move-lang/src/typing/core.rs", "rank": 38, "score": 465719.7631193304 }, { "content": "fn script(context: &mut Context, escript: E::Script) -> N::Script {\n\n let E::Script {\n\n loc,\n\n constants: econstants,\n\n function_name,\n\n function: efunction,\n\n specs: _specs,\n\n } = escript;\n\n let outer_unscoped = context.save_unscoped();\n\n for (n, _) in &econstants {\n\n let sp!(loc, s) = n.0;\n\n context.bind_constant(s, loc)\n\n }\n\n let inner_unscoped = context.save_unscoped();\n\n let constants = econstants.map(|name, c| {\n\n context.restore_unscoped(inner_unscoped.clone());\n\n constant(context, name, c)\n\n });\n\n context.restore_unscoped(inner_unscoped);\n\n let function = function(context, function_name.clone(), efunction);\n", "file_path": "language/move-lang/src/naming/translate.rs", "rank": 39, "score": 463652.22396552586 }, { "content": "fn sequence(context: &mut Context, seq: E::Sequence) -> N::Sequence {\n\n seq.into_iter().map(|s| sequence_item(context, s)).collect()\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/translate.rs", "rank": 40, "score": 463652.2239655259 }, { "content": "pub fn compile_module_string_with_stdlib(code: &str) -> Result<CompiledModule> {\n\n compile_module_string_and_assert_no_error(code, stdlib())\n\n}\n\n\n", "file_path": "language/compiler/src/unit_tests/testutils.rs", "rank": 41, "score": 461113.46084094385 }, { "content": "pub fn compile_script_string_with_stdlib(code: &str) -> Result<CompiledScript> {\n\n compile_script_string_and_assert_no_error(code, stdlib())\n\n}\n\n\n", "file_path": "language/compiler/src/unit_tests/testutils.rs", "rank": 42, "score": 461113.46084094385 }, { "content": "fn bind_exp(context: &mut Context, result: &mut Block, e: H::Exp) -> H::Exp {\n\n if let H::UnannotatedExp_::Unreachable = &e.exp.value {\n\n return e;\n\n }\n\n let loc = e.exp.loc;\n\n let ty = e.ty.clone();\n\n let tmps = make_temps(context, loc, ty.clone());\n\n H::exp(ty, sp(loc, bind_exp_(result, loc, tmps, e)))\n\n}\n\n\n", "file_path": "language/move-lang/src/hlir/translate.rs", "rank": 43, "score": 460850.5619319094 }, { "content": "/// Converts a record into a string representation:\n\n/// UNIX_TIMESTAMP LOG_LEVEL FILE:LINE MESSAGE\n\n/// Example:\n\n/// 2020-03-07 05:03:03 INFO common/libra-logger/src/lib.rs:261 Hello\n\nfn format(entry: &LogEntry) -> Result<String, fmt::Error> {\n\n use std::fmt::Write;\n\n\n\n let mut w = String::new();\n\n write!(\n\n w,\n\n \"{} {} {}\",\n\n entry.metadata.level(),\n\n entry.timestamp,\n\n entry.metadata.location()\n\n )?;\n\n\n\n if let Some(message) = &entry.message {\n\n write!(w, \" {}\", message)?;\n\n }\n\n\n\n if !entry.data.is_empty() {\n\n write!(w, \" {}\", serde_json::to_string(&entry.data).unwrap())?;\n\n }\n\n\n", "file_path": "common/logger/src/libra_logger.rs", "rank": 44, "score": 460543.1498023151 }, { "content": "pub fn coin_tag_parser(coin_tag: &str) -> TypeTag {\n\n type_tag_for_currency_code(\n\n from_currency_code_string(&coin_tag)\n\n .map_err(|err| {\n\n exit_with_error(format!(\"Failed to parse coin_tag {} : {}\", coin_tag, err))\n\n })\n\n .unwrap(),\n\n )\n\n}\n\n\n", "file_path": "client/swiss-knife/src/helpers.rs", "rank": 45, "score": 459996.61558556795 }, { "content": "/// Serializes a string (identifier or user string).\n\n///\n\n/// A `String` gets serialized as follows:\n\n/// - `String` size as a ULEB128\n\n/// - `String` bytes - *exact format to be defined, Rust utf8 right now*\n\nfn serialize_identifier(binary: &mut BinaryData, string: &str) -> Result<()> {\n\n let bytes = string.as_bytes();\n\n serialize_identifier_size(binary, bytes.len())?;\n\n for byte in bytes {\n\n binary.push(*byte)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "language/vm/src/serializer.rs", "rank": 46, "score": 459586.36385259 }, { "content": "fn parse_host_port(s: &str) -> Result<(String, u32, Option<u32>)> {\n\n let v = s.split(':').collect::<Vec<&str>>();\n\n if v.len() == 1 {\n\n let default_port = DEFAULT_JSON_RPC_PORT as u32;\n\n return Ok((v[0].to_string(), default_port, None));\n\n }\n\n if v.len() != 2 && v.len() != 3 {\n\n return Err(format_err!(\n\n \"Failed to parse {:?} in host:port or host:port:debug_interface_port format\",\n\n s\n\n ));\n\n }\n\n let host = v[0].to_string();\n\n let port = v[1].parse::<u32>()?;\n\n if v.len() == 3 {\n\n let debug_interface_port = v[2].parse::<u32>()?;\n\n return Ok((host, port, Some(debug_interface_port)));\n\n }\n\n Ok((host, port, None))\n\n}\n", "file_path": "testsuite/cluster-test/src/main.rs", "rank": 47, "score": 459469.65419022494 }, { "content": "fn builtin_function(context: &mut Context, sp!(_, bf_): &N::BuiltinFunction) {\n\n use N::BuiltinFunction_ as B;\n\n match bf_ {\n\n B::MoveTo(bt_opt)\n\n | B::MoveFrom(bt_opt)\n\n | B::BorrowGlobal(_, bt_opt)\n\n | B::Exists(bt_opt)\n\n | B::Freeze(bt_opt) => type_opt(context, bt_opt),\n\n B::Assert => (),\n\n }\n\n}\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 48, "score": 459295.7058391094 }, { "content": "fn exp_dotted(context: &mut Context, sp!(_, ed_): &N::ExpDotted) {\n\n use N::ExpDotted_ as D;\n\n match ed_ {\n\n D::Exp(e) => exp(context, e),\n\n D::Dot(edotted, _) => exp_dotted(context, edotted),\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 49, "score": 459295.7058391094 }, { "content": "pub fn instancelist_to_set(instances: &[Instance]) -> HashSet<String> {\n\n let mut r = HashSet::new();\n\n for instance in instances {\n\n r.insert(instance.peer_name().clone());\n\n }\n\n r\n\n}\n\n\n", "file_path": "testsuite/cluster-test/src/instance.rs", "rank": 50, "score": 458852.1114779683 }, { "content": "fn bind(context: &mut Context, sp!(loc, pb_): P::Bind) -> Option<E::LValue> {\n\n use E::LValue_ as EL;\n\n use P::Bind_ as PB;\n\n let b_ = match pb_ {\n\n PB::Var(v) => {\n\n check_valid_local_name(context, &v);\n\n EL::Var(sp(loc, E::ModuleAccess_::Name(v.0)), None)\n\n }\n\n PB::Unpack(ptn, ptys_opt, pfields) => {\n\n let tn = module_access(context, Access::ApplyNamed, ptn)?;\n\n let tys_opt = optional_types(context, ptys_opt);\n\n let vfields: Option<Vec<(Field, E::LValue)>> = pfields\n\n .into_iter()\n\n .map(|(f, pb)| Some((f, bind(context, pb)?)))\n\n .collect();\n\n let fields = fields(context, loc, \"deconstruction binding\", \"binding\", vfields?);\n\n EL::Unpack(tn, tys_opt, fields)\n\n }\n\n };\n\n Some(sp(loc, b_))\n\n}\n\n\n", "file_path": "language/move-lang/src/expansion/translate.rs", "rank": 51, "score": 457701.7062680723 }, { "content": "pub fn function_signature(context: &mut Context, sig: &mut FunctionSignature) {\n\n for (_, st) in &mut sig.parameters {\n\n type_(context, st);\n\n }\n\n type_(context, &mut sig.return_type);\n\n}\n\n\n\n//**************************************************************************************************\n\n// Types\n\n//**************************************************************************************************\n\n\n", "file_path": "language/move-lang/src/typing/expand.rs", "rank": 52, "score": 455781.42752287496 }, { "content": "pub fn invariant(cond: bool, msg: String) -> Result<(), Error> {\n\n if !cond {\n\n Err(Error::InvariantViolation(msg))\n\n } else {\n\n Ok(())\n\n }\n\n}\n", "file_path": "config/src/config/error.rs", "rank": 53, "score": 455746.60919646686 }, { "content": "fn access_constant(context: &mut Context, ma: E::ModuleAccess) -> N::Exp_ {\n\n match context.resolve_constant(ma) {\n\n None => {\n\n assert!(context.has_errors());\n\n N::Exp_::UnresolvedError\n\n }\n\n Some((m, c)) => N::Exp_::Constant(m, c),\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/translate.rs", "rank": 54, "score": 455001.2257199136 }, { "content": "pub fn parse_address(s: &str) -> Result<Address, String> {\n\n Address::parse_str(s).map_err(|msg| format!(\"Invalid argument to '{}': {}\", SENDER, msg))\n\n}\n", "file_path": "language/move-lang/src/command_line/mod.rs", "rank": 55, "score": 453930.0428524964 }, { "content": "pub fn ir_tests() -> impl Iterator<Item = (String, String)> {\n\n macro_rules! comp_to_string {\n\n ($comp_opt:expr) => {{\n\n $comp_opt.as_os_str().to_str()?\n\n }};\n\n }\n\n let num_root_components = Path::new(PATH_TO_IR_TESTS)\n\n .canonicalize()\n\n .unwrap()\n\n .components()\n\n .map(|_| 1)\n\n .sum();\n\n datatest_stable::utils::iterate_directory(Path::new(PATH_TO_IR_TESTS)).flat_map(move |path| {\n\n if path.extension()?.to_str()? != IR_EXTENSION {\n\n return None;\n\n }\n\n let pathbuf = path.canonicalize().ok()?;\n\n let mut components = pathbuf.components();\n\n // skip over the components pointing to the IR test dir\n\n for _ in 0..num_root_components {\n", "file_path": "language/move-lang/src/test_utils/mod.rs", "rank": 56, "score": 452637.92643905676 }, { "content": "fn display_list_of_items<T, I>(items: I, f: &mut fmt::Formatter) -> fmt::Result\n\nwhere\n\n T: Display,\n\n I: IntoIterator<Item = T>,\n\n{\n\n write!(f, \"[\")?;\n\n let mut items = items.into_iter();\n\n if let Some(x) = items.next() {\n\n write!(f, \"{}\", x)?;\n\n for x in items {\n\n write!(f, \", {}\", x)?;\n\n }\n\n }\n\n write!(f, \"]\")\n\n}\n\n\n\nimpl Display for ContainerRef {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Self::Local(c) => write!(f, \"({}, {})\", c.rc_count(), c),\n", "file_path": "language/move-vm/types/src/values/values_impl.rs", "rank": 57, "score": 452140.90287265356 }, { "content": "fn optional_types(context: &mut Context, pts_opt: Option<Vec<P::Type>>) -> Option<Vec<E::Type>> {\n\n pts_opt.map(|pts| pts.into_iter().map(|pt| type_(context, pt)).collect())\n\n}\n\n\n", "file_path": "language/move-lang/src/expansion/translate.rs", "rank": 58, "score": 451572.7411154655 }, { "content": "fn function_signature(context: &mut Context, sig: E::FunctionSignature) -> N::FunctionSignature {\n\n let type_parameters = type_parameters(context, sig.type_parameters);\n\n let parameters = sig\n\n .parameters\n\n .into_iter()\n\n .map(|(v, ty)| (v, type_(context, ty)))\n\n .collect();\n\n let return_type = type_(context, sig.return_type);\n\n N::FunctionSignature {\n\n type_parameters,\n\n parameters,\n\n return_type,\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/translate.rs", "rank": 59, "score": 450883.08744904236 }, { "content": "pub fn function_body_(context: &mut Context, b_: &mut T::FunctionBody_) {\n\n match b_ {\n\n T::FunctionBody_::Native => (),\n\n T::FunctionBody_::Defined(es) => sequence(context, es),\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/typing/expand.rs", "rank": 60, "score": 449931.5769481368 }, { "content": "fn function(context: &mut Context, fdef: &N::Function) {\n\n function_signature(context, &fdef.signature);\n\n function_acquires(context, &fdef.acquires);\n\n if let N::FunctionBody_::Defined(seq) = &fdef.body.value {\n\n sequence(context, seq)\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 61, "score": 448459.67639096803 }, { "content": "fn sequence(context: &mut Context, sequence: &N::Sequence) {\n\n use N::SequenceItem_ as SI;\n\n for sp!(_, item_) in sequence {\n\n match item_ {\n\n SI::Seq(e) => exp(context, e),\n\n SI::Declare(bl, ty_opt) => {\n\n lvalues(context, &bl.value);\n\n type_opt(context, ty_opt);\n\n }\n\n SI::Bind(bl, e) => {\n\n lvalues(context, &bl.value);\n\n exp(context, e)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 62, "score": 448459.67639096803 }, { "content": "pub fn validator_pod_name(index: u32) -> String {\n\n format!(\"val-{}\", index)\n\n}\n\n\n", "file_path": "testsuite/cluster-test/src/instance.rs", "rank": 63, "score": 448057.62845006643 }, { "content": "pub fn lsr_pod_name(index: u32) -> String {\n\n format!(\"lsr-{}\", index)\n\n}\n\n\n", "file_path": "testsuite/cluster-test/src/instance.rs", "rank": 64, "score": 448057.62845006643 }, { "content": "pub fn vault_pod_name(index: u32) -> String {\n\n format!(\"vault-{}\", index)\n\n}\n\n\n", "file_path": "testsuite/cluster-test/src/instance.rs", "rank": 65, "score": 448057.62845006643 }, { "content": "pub fn ident_str(s: &str) -> Result<&IdentStr> {\n\n IdentStr::new(s)\n\n}\n\n\n", "file_path": "language/compiler/ir-to-bytecode/src/context.rs", "rank": 66, "score": 448054.24434591015 }, { "content": "fn parse_one<'a, T>(args: &mut impl Iterator<Item = &'a str>) -> Result<T, ParseError>\n\nwhere\n\n T: FromStr,\n\n T::Err: Into<ParseError>,\n\n{\n\n let next_arg = args.next().ok_or(ParseError::UnexpectedEnd)?;\n\n next_arg.parse().map_err(Into::into)\n\n}\n\n\n\nimpl Protocol {\n\n fn parse<'a>(\n\n protocol_type: &str,\n\n args: &mut impl Iterator<Item = &'a str>,\n\n ) -> Result<Protocol, ParseError> {\n\n let protocol = match protocol_type {\n\n \"ip4\" => Protocol::Ip4(parse_one(args)?),\n\n \"ip6\" => Protocol::Ip6(parse_one(args)?),\n\n \"dns\" => Protocol::Dns(parse_one(args)?),\n\n \"dns4\" => Protocol::Dns4(parse_one(args)?),\n\n \"dns6\" => Protocol::Dns6(parse_one(args)?),\n", "file_path": "network/network-address/src/lib.rs", "rank": 67, "score": 444904.35456843325 }, { "content": "fn from_args<P: ExperimentParam>(args: &[String], cluster: &Cluster) -> Box<dyn Experiment>\n\nwhere\n\n P: StructOpt + 'static,\n\n{\n\n let params = P::from_clap(\n\n &P::clap()\n\n .global_setting(AppSettings::NoBinaryName)\n\n .get_matches_from(args),\n\n );\n\n Box::new(params.build(cluster))\n\n}\n\n\n", "file_path": "testsuite/cluster-test/src/experiments/mod.rs", "rank": 68, "score": 443861.4886825487 }, { "content": "fn dotted(context: &mut Context, edot: E::ExpDotted) -> Option<N::ExpDotted> {\n\n let sp!(loc, edot_) = edot;\n\n let nedot_ = match edot_ {\n\n E::ExpDotted_::Exp(e) => {\n\n let ne = exp(context, e);\n\n match &ne.value {\n\n N::Exp_::UnresolvedError => return None,\n\n _ => N::ExpDotted_::Exp(ne),\n\n }\n\n }\n\n E::ExpDotted_::Dot(d, f) => N::ExpDotted_::Dot(Box::new(dotted(context, *d)?), Field(f)),\n\n };\n\n Some(sp(loc, nedot_))\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/translate.rs", "rank": 69, "score": 443857.2129945565 }, { "content": "fn acquires_type(context: &mut Context, sp!(loc, en_): E::ModuleAccess) -> Option<StructName> {\n\n use ResolvedType as RT;\n\n use E::ModuleAccess_ as EN;\n\n match en_ {\n\n EN::Name(n) => {\n\n let case = match context.resolve_unscoped_type(&n)? {\n\n RT::BuiltinType => \"builtin type\",\n\n RT::TParam(_, _) => \"type parameter\",\n\n };\n\n context.error(vec![(\n\n loc,\n\n format!(\n\n \"Invalid acquires item. Expected a resource name, but got a {}\",\n\n case\n\n ),\n\n )]);\n\n None\n\n }\n\n EN::ModuleAccess(m, n) => {\n\n let (decl_loc, _, resource_opt) = context.resolve_module_type(loc, &m, &n)?;\n\n acquires_type_struct(context, loc, decl_loc, m, StructName(n), resource_opt)\n\n }\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/translate.rs", "rank": 70, "score": 442167.4674371342 }, { "content": "fn function(context: &mut Context, _name: FunctionName, f: E::Function) -> N::Function {\n\n let visibility = f.visibility;\n\n let signature = function_signature(context, f.signature);\n\n let acquires = function_acquires(context, f.acquires);\n\n let body = function_body(context, f.body);\n\n N::Function {\n\n visibility,\n\n signature,\n\n acquires,\n\n body,\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/translate.rs", "rank": 71, "score": 441458.8927335076 }, { "content": "fn exps(context: &mut Context, es: Vec<E::Exp>) -> Vec<N::Exp> {\n\n es.into_iter().map(|e| exp_(context, e)).collect()\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/translate.rs", "rank": 72, "score": 441458.89273350756 }, { "content": "fn function_signature(context: &mut Context, sig: &N::FunctionSignature) {\n\n types(context, sig.parameters.iter().map(|(_, st)| st));\n\n type_(context, &sig.return_type)\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 73, "score": 439206.4661863918 }, { "content": "fn function_signature(context: &mut Context, sig: &N::FunctionSignature) {\n\n assert!(context.constraints.is_empty());\n\n\n\n let mut declared = UniqueMap::new();\n\n for (param, param_ty) in &sig.parameters {\n\n let param_ty = core::instantiate(context, param_ty.clone());\n\n context.add_single_type_constraint(\n\n param_ty.loc,\n\n \"Invalid parameter type\",\n\n param_ty.clone(),\n\n );\n\n if let Err(prev_loc) = declared.add(param.clone(), ()) {\n\n context.error(vec![\n\n (\n\n param.loc(),\n\n format!(\"Duplicate parameter with name '{}'\", param),\n\n ),\n\n (prev_loc, \"Previously declared here\".into()),\n\n ]);\n\n }\n\n context.declare_local(param.clone(), Some(param_ty));\n\n }\n\n context.return_type = Some(core::instantiate(context, sig.return_type.clone()));\n\n core::solve_constraints(context);\n\n}\n\n\n", "file_path": "language/move-lang/src/typing/translate.rs", "rank": 74, "score": 438435.5585983043 }, { "content": "fn struct_fields(context: &mut Context, tfields: N::StructFields) -> H::StructFields {\n\n let tfields_map = match tfields {\n\n N::StructFields::Native(loc) => return H::StructFields::Native(loc),\n\n N::StructFields::Defined(m) => m,\n\n };\n\n let mut indexed_fields = tfields_map\n\n .into_iter()\n\n .map(|(f, (idx, t))| (idx, (f, base_type(context, t))))\n\n .collect::<Vec<_>>();\n\n indexed_fields.sort_by(|(idx1, _), (idx2, _)| idx1.cmp(idx2));\n\n H::StructFields::Defined(indexed_fields.into_iter().map(|(_, f_ty)| f_ty).collect())\n\n}\n\n\n\n//**************************************************************************************************\n\n// Types\n\n//**************************************************************************************************\n\n\n", "file_path": "language/move-lang/src/hlir/translate.rs", "rank": 75, "score": 437780.63826978236 }, { "content": "fn constant(context: &mut Context, _name: ConstantName, econstant: E::Constant) -> N::Constant {\n\n let E::Constant {\n\n loc,\n\n signature: esignature,\n\n value: evalue,\n\n } = econstant;\n\n let signature = type_(context, esignature);\n\n let value = exp_(context, evalue);\n\n N::Constant {\n\n loc,\n\n signature,\n\n value,\n\n }\n\n}\n\n\n\n//**************************************************************************************************\n\n// Types\n\n//**************************************************************************************************\n\n\n", "file_path": "language/move-lang/src/naming/translate.rs", "rank": 76, "score": 437340.7544626363 }, { "content": "fn types(context: &mut Context, ss: &mut Vec<Type>) {\n\n for st in ss {\n\n type_(context, st);\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/typing/expand.rs", "rank": 77, "score": 435567.12822317943 }, { "content": "pub fn format_delim<T: fmt::Display, I: IntoIterator<Item = T>>(items: I, delim: &str) -> String {\n\n items\n\n .into_iter()\n\n .map(|item| format!(\"{}\", item))\n\n .collect::<Vec<_>>()\n\n .join(delim)\n\n}\n\n\n", "file_path": "language/move-lang/src/shared/mod.rs", "rank": 78, "score": 434499.88359057176 }, { "content": "fn assign_list(context: &mut Context, ls: N::LValueList, rvalue_ty: Type) -> T::LValueList {\n\n lvalue_list(context, LValueCase::Assign, ls, Some(rvalue_ty)).1\n\n}\n\n\n", "file_path": "language/move-lang/src/typing/translate.rs", "rank": 79, "score": 434023.1973947643 }, { "content": "fn statement(context: &mut Context, result: &mut Block, e: T::Exp) {\n\n use H::Statement_ as S;\n\n use T::UnannotatedExp_ as TE;\n\n\n\n let ty = e.ty;\n\n let sp!(eloc, e_) = e.exp;\n\n let stmt_ = match e_ {\n\n TE::IfElse(tb, tt, tf) => {\n\n let cond = exp(context, result, None, *tb);\n\n\n\n let mut if_block = Block::new();\n\n let et = exp_(context, &mut if_block, None, *tt);\n\n ignore_and_pop(&mut if_block, et);\n\n\n\n let mut else_block = Block::new();\n\n let ef = exp_(context, &mut else_block, None, *tf);\n\n ignore_and_pop(&mut else_block, ef);\n\n\n\n S::IfElse {\n\n cond,\n", "file_path": "language/move-lang/src/hlir/translate.rs", "rank": 80, "score": 433941.4141059782 }, { "content": "/// Parses each line in the given input as an entry and build global config.\n\npub fn parse_and_build_config(s: &str) -> Result<Config> {\n\n Config::build(&parse_each_line_as::<Entry>(s)?, false)\n\n}\n\n\n", "file_path": "language/testing-infra/functional-tests/src/tests/global_config_tests.rs", "rank": 81, "score": 433456.9733807385 }, { "content": "pub fn error_format(b: &Type, subst: &Subst) -> String {\n\n error_format_impl(b, subst, false)\n\n}\n\n\n", "file_path": "language/move-lang/src/typing/core.rs", "rank": 82, "score": 433379.81772965245 }, { "content": "fn function_body(context: &mut Context, sp!(loc, b_): E::FunctionBody) -> N::FunctionBody {\n\n match b_ {\n\n E::FunctionBody_::Native => sp(loc, N::FunctionBody_::Native),\n\n E::FunctionBody_::Defined(es) => sp(loc, N::FunctionBody_::Defined(sequence(context, es))),\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/translate.rs", "rank": 83, "score": 433352.0020581461 }, { "content": "fn lvalue_expected_types(_context: &mut Context, sp!(loc, b_): &T::LValue) -> Option<N::Type> {\n\n use N::Type_::*;\n\n use T::LValue_ as L;\n\n let loc = *loc;\n\n match b_ {\n\n L::Ignore => None,\n\n L::Var(_, ty) => Some(*ty.clone()),\n\n L::BorrowUnpack(mut_, m, s, tys, _) => {\n\n let tn = sp(loc, N::TypeName_::ModuleType(m.clone(), s.clone()));\n\n Some(sp(\n\n loc,\n\n Ref(*mut_, Box::new(sp(loc, Apply(None, tn, tys.clone())))),\n\n ))\n\n }\n\n L::Unpack(m, s, tys, _) => {\n\n let tn = sp(loc, N::TypeName_::ModuleType(m.clone(), s.clone()));\n\n Some(sp(loc, Apply(None, tn, tys.clone())))\n\n }\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/typing/translate.rs", "rank": 84, "score": 432648.0074971989 }, { "content": "pub fn read_env_var(v: &str) -> String {\n\n std::env::var(v).unwrap_or_else(|_| \"\".into())\n\n}\n\n\n", "file_path": "language/move-lang/src/test_utils/mod.rs", "rank": 85, "score": 432531.7043078762 }, { "content": "pub fn read_env_var(v: &str) -> String {\n\n std::env::var(v).unwrap_or_else(|_| \"\".into())\n\n}\n\n\n", "file_path": "language/move-prover/test-utils/src/lib.rs", "rank": 86, "score": 432531.7043078762 }, { "content": "fn assign_list(context: &mut Context, ls: E::LValueList) -> Option<N::LValueList> {\n\n lvalue_list(context, LValueCase::Assign, ls)\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/translate.rs", "rank": 87, "score": 432255.05310032656 }, { "content": "pub fn struct_ref_instantiation(state: &mut AbstractState) -> Result<Vec<SignatureToken>, VMError> {\n\n let token = state.register_move().unwrap().token;\n\n if let Some(type_actuals) = get_type_actuals_from_reference(&token) {\n\n Ok(type_actuals)\n\n } else {\n\n Err(VMError::new(\"Invalid field borrow\".to_string()))\n\n }\n\n}\n\n\n", "file_path": "language/testing-infra/test-generation/src/transitions.rs", "rank": 88, "score": 432047.7436752972 }, { "content": "fn exp(context: &mut Context, sp!(loc, e_): &N::Exp) {\n\n use N::Exp_ as E;\n\n match e_ {\n\n E::Unit { .. }\n\n | E::UnresolvedError\n\n | E::Break\n\n | E::Continue\n\n | E::Spec(_, _)\n\n | E::InferredNum(_)\n\n | E::Value(_)\n\n | E::Constant(None, _)\n\n | E::Move(_)\n\n | E::Copy(_)\n\n | E::Use(_) => (),\n\n\n\n E::Constant(Some(m), _c) => context.add_usage(m, *loc),\n\n E::ModuleCall(m, _, bs_opt, sp!(_, es_)) => {\n\n context.add_usage(m, *loc);\n\n types_opt(context, bs_opt);\n\n es_.iter().for_each(|e| exp(context, e))\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 89, "score": 431274.5908938154 }, { "content": "pub fn postorder_to_inorder(mut node: u64) -> u64 {\n\n // The number of nodes in a full binary tree with height `n` is `2^n - 1`.\n\n let mut full_binary_size = !0u64;\n\n let mut bitmap = 0u64;\n\n for i in (0..64).rev() {\n\n if node >= full_binary_size {\n\n node -= full_binary_size;\n\n bitmap |= 1 << i;\n\n }\n\n full_binary_size >>= 1;\n\n }\n\n let level = node as u32;\n\n let pos = bitmap >> level;\n\n Position::from_level_and_pos(level, pos).to_inorder_index()\n\n}\n", "file_path": "types/src/proof/position/mod.rs", "rank": 90, "score": 430116.2289363985 }, { "content": "fn sequence_item(context: &mut Context, sp!(loc, ns_): E::SequenceItem) -> N::SequenceItem {\n\n use E::SequenceItem_ as ES;\n\n use N::SequenceItem_ as NS;\n\n\n\n let s_ = match ns_ {\n\n ES::Seq(e) => NS::Seq(exp_(context, e)),\n\n ES::Declare(b, ty_opt) => {\n\n let bind_opt = bind_list(context, b);\n\n let tys = ty_opt.map(|t| type_(context, t));\n\n match bind_opt {\n\n None => {\n\n assert!(context.has_errors());\n\n NS::Seq(sp(loc, N::Exp_::UnresolvedError))\n\n }\n\n Some(bind) => NS::Declare(bind, tys),\n\n }\n\n }\n\n ES::Bind(b, e) => {\n\n let bind_opt = bind_list(context, b);\n\n let e = exp_(context, e);\n", "file_path": "language/move-lang/src/naming/translate.rs", "rank": 91, "score": 429486.5245252529 }, { "content": "pub fn error_format_nested(b: &Type, subst: &Subst) -> String {\n\n error_format_impl(b, subst, true)\n\n}\n\n\n", "file_path": "language/move-lang/src/typing/core.rs", "rank": 92, "score": 428682.788711175 }, { "content": "fn compile_constant(_context: &mut Context, ty: Type, value: MoveValue) -> Result<Constant> {\n\n fn type_layout(ty: Type) -> Result<MoveTypeLayout> {\n\n Ok(match ty {\n\n Type::Address => MoveTypeLayout::Address,\n\n Type::Signer => MoveTypeLayout::Signer,\n\n Type::U8 => MoveTypeLayout::U8,\n\n Type::U64 => MoveTypeLayout::U64,\n\n Type::U128 => MoveTypeLayout::U128,\n\n Type::Bool => MoveTypeLayout::Bool,\n\n Type::Vector(inner_type) => MoveTypeLayout::Vector(Box::new(type_layout(*inner_type)?)),\n\n Type::Reference(_, _) => bail!(\"References are not supported in constant type layouts\"),\n\n Type::TypeParameter(_) => {\n\n bail!(\"Type parameters are not supported in constant type layouts\")\n\n }\n\n Type::Struct(_ident, _tys) => {\n\n bail!(\"TODO Structs are not *yet* supported in constant type layouts\")\n\n }\n\n })\n\n }\n\n\n\n Constant::serialize_constant(&type_layout(ty)?, &value)\n\n .ok_or_else(|| format_err!(\"Could not serialize constant\"))\n\n}\n\n\n\n//**************************************************************************************************\n\n// Bytecode\n\n//**************************************************************************************************\n\n\n", "file_path": "language/compiler/ir-to-bytecode/src/compiler.rs", "rank": 93, "score": 428550.5983586189 }, { "content": "/// Given an operation retries it successfully sleeping everytime it fails\n\n/// If the operation succeeds before the iterator runs out, it returns success\n\npub fn retry<I, O, T, E>(iterable: I, mut operation: O) -> Result<T, E>\n\nwhere\n\n I: IntoIterator<Item = Duration>,\n\n O: FnMut() -> Result<T, E>,\n\n{\n\n let mut iterator = iterable.into_iter();\n\n loop {\n\n match operation() {\n\n Ok(value) => return Ok(value),\n\n Err(err) => {\n\n if let Some(delay) = iterator.next() {\n\n thread::sleep(delay);\n\n } else {\n\n return Err(err);\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "common/retrier/src/lib.rs", "rank": 94, "score": 427951.29500645486 }, { "content": "/// Print the error and bump up error counter.\n\npub fn report_error(msg: &str, e: Error) {\n\n println!(\"[ERROR] {}: {}\", msg, e);\n\n COUNTER_CLIENT_ERRORS.inc();\n\n}\n\n\n", "file_path": "testsuite/cli/src/commands.rs", "rank": 95, "score": 427509.4537694456 }, { "content": "fn lvalue(context: &mut Context, sp!(loc, a_): &N::LValue) {\n\n use N::LValue_ as L;\n\n if let L::Unpack(m, _, bs_opt, f) = a_ {\n\n context.add_usage(m, *loc);\n\n types_opt(context, bs_opt);\n\n lvalues(context, f.iter().map(|(_, (_, b))| b));\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 96, "score": 426727.1788010074 }, { "content": "fn script(context: &mut Context, nscript: N::Script) -> T::Script {\n\n assert!(context.current_script_constants.is_none());\n\n context.current_module = None;\n\n let N::Script {\n\n loc,\n\n constants: nconstants,\n\n function_name,\n\n function: nfunction,\n\n } = nscript;\n\n context.bind_script_constants(&nconstants);\n\n let constants = nconstants.map(|name, c| constant(context, name, c));\n\n let function = function(context, function_name.clone(), nfunction, true);\n\n context.current_script_constants = None;\n\n T::Script {\n\n loc,\n\n function_name,\n\n function,\n\n constants,\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/typing/translate.rs", "rank": 97, "score": 425956.2712129198 }, { "content": "fn sequence(context: &mut Context, seq: N::Sequence) -> T::Sequence {\n\n use N::SequenceItem_ as NS;\n\n use T::SequenceItem_ as TS;\n\n\n\n let mut work_queue = VecDeque::new();\n\n let mut resulting_sequence = T::Sequence::new();\n\n\n\n let len = seq.len();\n\n for (idx, sp!(loc, ns_)) in seq.into_iter().enumerate() {\n\n match ns_ {\n\n NS::Seq(ne) => {\n\n let e = exp_(context, ne);\n\n // If it is not the last element\n\n if idx < len - 1 {\n\n context.add_copyable_constraint(\n\n loc,\n\n \"Cannot ignore resource values. The value must be used\",\n\n e.ty.clone(),\n\n )\n\n }\n", "file_path": "language/move-lang/src/typing/translate.rs", "rank": 98, "score": 425956.27121291985 }, { "content": "fn types(context: &mut Context, sp!(_, t_): H::Type) -> Vec<IR::Type> {\n\n use H::Type_ as T;\n\n match t_ {\n\n T::Unit => vec![],\n\n T::Single(st) => vec![single_type(context, st)],\n\n T::Multiple(ss) => ss.into_iter().map(|st| single_type(context, st)).collect(),\n\n }\n\n}\n\n\n\n//**************************************************************************************************\n\n// Commands\n\n//**************************************************************************************************\n\n\n", "file_path": "language/move-lang/src/to_bytecode/translate.rs", "rank": 99, "score": 425220.9454257741 } ]
Rust
src/unwrap.rs
jkfritcher/aes-keywrap-rs
76abb49644769a8a48228227ff3ebd56336f6ddf
use crate::{ types::{Aes128Ecb, Aes192Ecb, Aes256Ecb, AES_BLOCK_LEN, BLOCK_LEN}, }; use block_modes::BlockMode; use thiserror::Error; #[derive(Error, Debug)] pub enum UnwrapKeyError { #[error("Key length must be 16, 24 or 32 octets")] KeyLengthInvalid, #[error("Ciphertext length must be a multiple of {0} octets")] CipherTextInvalidLength(usize), #[error("Ciphertext length can not be longer than {0} octets")] CipherTextLengthTooLong(u32), #[error("Ciphertext length must be atleast {0} octet(s)")] CipherTextLengthTooShort(usize), #[error("Failed to successfully unwrap key")] CipherTextValidationFailure, } pub fn aes_unwrap_with_nopadding(ct: &[u8], key: &[u8]) -> Result<Vec<u8>, UnwrapKeyError> { let mut pt: Vec<u8> = Vec::new(); let ct_len = match ct.len() { ct_len if (ct_len % BLOCK_LEN) > 0 => { return Err(UnwrapKeyError::CipherTextInvalidLength(BLOCK_LEN)); }, ct_len => ct_len, }; let n = match (ct_len / BLOCK_LEN) - 1 { 0 | 1 => { return Err(UnwrapKeyError::CipherTextLengthTooShort(24)); }, n => n, }; pt.resize(ct_len, 0); pt.as_mut_slice().copy_from_slice(ct); let aes_func = match key.len() { 16 => aes128_ecb_decrypt, 24 => aes192_ecb_decrypt, 32 => aes256_ecb_decrypt, _ => return Err(UnwrapKeyError::KeyLengthInvalid), }; unwrap_core(key, n, pt.as_mut_slice(), aes_func); #[allow(non_snake_case)] let A: [u8; BLOCK_LEN] = [0xa6; BLOCK_LEN]; if !constant_time_eq(&pt[0..BLOCK_LEN], &A[0..BLOCK_LEN]) { return Err(UnwrapKeyError::CipherTextValidationFailure); } Ok(pt[BLOCK_LEN..].to_vec()) } pub fn aes_unwrap_with_padding(ct: &[u8], key: &[u8]) -> Result<Vec<u8>, UnwrapKeyError> { let mut pt: Vec<u8> = Vec::new(); let ct_len = match ct.len() { ct_len if (ct_len % BLOCK_LEN) > 0 => { return Err(UnwrapKeyError::CipherTextInvalidLength(BLOCK_LEN)); }, ct_len => ct_len, }; let n = match (ct_len / BLOCK_LEN) - 1 { 0 => { return Err(UnwrapKeyError::CipherTextLengthTooShort(16)); }, n => n, }; pt.resize(ct_len, 0); pt.as_mut_slice().copy_from_slice(ct); let aes_func = match key.len() { 16 => aes128_ecb_decrypt, 24 => aes192_ecb_decrypt, 32 => aes256_ecb_decrypt, _ => return Err(UnwrapKeyError::KeyLengthInvalid), }; unwrap_core(key, n, pt.as_mut_slice(), aes_func); #[allow(non_snake_case)] let A: [u8; 4] = [0xa6, 0x59, 0x59, 0xa6]; if !constant_time_eq(&pt[0..4], &A) { return Err(UnwrapKeyError::CipherTextValidationFailure); } let mli = { let mut mli_bytes: [u8; 4] = Default::default(); mli_bytes[..].copy_from_slice(&pt[4..8]); u32::from_be_bytes(mli_bytes) as usize }; if !(mli > (8 * (n - 1)) && mli <= (8 * n)) { return Err(UnwrapKeyError::CipherTextValidationFailure); } let pad_len = ct_len - mli - BLOCK_LEN; let padding = &pt[(ct_len - pad_len)..]; for pad_byte in padding { if *pad_byte != 0 { return Err(UnwrapKeyError::CipherTextValidationFailure); } } Ok(pt[BLOCK_LEN..(BLOCK_LEN + mli)].to_vec()) } fn constant_time_eq(a: &[u8], b: &[u8]) -> bool { if a.len() != b.len() { return false; } let c = a.iter().zip(b.iter()).fold(0, |acc, (a, b)| acc | (a ^ b)); c == 0 } fn aes128_ecb_decrypt(key: &[u8], data: &mut [u8]) { let cipher = Aes128Ecb::new_from_slices(key, Default::default()).expect("Failed to create AES context"); cipher.decrypt(data).expect("Failed to decrypt data block"); } fn aes192_ecb_decrypt(key: &[u8], data: &mut [u8]) { let cipher = Aes192Ecb::new_from_slices(key, Default::default()).expect("Failed to create AES context"); cipher.decrypt(data).expect("Failed to decrypt data block"); } fn aes256_ecb_decrypt(key: &[u8], data: &mut [u8]) { let cipher = Aes256Ecb::new_from_slices(key, Default::default()).expect("Failed to create AES context"); cipher.decrypt(data).expect("Failed to decrypt data block"); } fn unwrap_core<AesEcb>(key: &[u8], n: usize, pt: &mut [u8], aes_ecb_decrypt: AesEcb) where AesEcb: Fn(&[u8], &mut [u8]), { if pt.len() > AES_BLOCK_LEN { let mut tmp: Vec<u8> = vec![0u8; AES_BLOCK_LEN]; tmp[0..BLOCK_LEN].copy_from_slice(&pt[0..BLOCK_LEN]); for j in (0..6).rev() { for i in (1..=n).rev() { let idx = i * BLOCK_LEN; tmp[BLOCK_LEN..].copy_from_slice(&pt[idx..idx + BLOCK_LEN]); let t = ((n * j) + i) as u64; tmp[0..BLOCK_LEN] .iter_mut() .zip(t.to_be_bytes().iter()) .for_each(|(x1, x2)| *x1 ^= *x2); aes_ecb_decrypt(key, &mut tmp); pt[idx..idx + BLOCK_LEN].copy_from_slice(&tmp[BLOCK_LEN..]); } } pt[0..BLOCK_LEN].copy_from_slice(&tmp[0..BLOCK_LEN]); } else { aes_ecb_decrypt(key, pt); } } #[cfg(test)] mod tests { use super::{aes_unwrap_with_nopadding, aes_unwrap_with_padding}; #[test] fn test_unwrap_nopad_invalid_key_length() { let ct = hex!("000102030405060708090a0b0c0d0e0f").to_vec(); let key = hex!("000102030405060708090a0b0c0d0e").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_err(), "Invalid key length erroneously passed"); } #[test] fn test_wrap_pad_invalid_key_length() { let pt = hex!("000102030405060708090a0b0c0d0e0f").to_vec(); let key = hex!("000102030405060708090a0b0c0d0e").to_vec(); let ct = aes_unwrap_with_padding(&pt, &key); assert!(ct.is_err(), "Invalid key length erroneously passed"); } #[test] fn test_unwrap_nopad_16_byte_key_16_byte_data() { let ct = hex!("1FA68B0A8112B447AEF34BD8FB5A7B829D3E862371D2CFE5").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!(pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF").to_vec()); } #[test] fn test_unwrap_nopad_24_byte_key_16_byte_data() { let ct = hex!("96778B25AE6CA435F92B5B97C050AED2468AB8A17AD84E5D").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F1011121314151617").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!(pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF").to_vec()); } #[test] fn test_unwrap_nopad_32_byte_key_16_byte_data() { let ct = hex!("64E8C3F9CE0F5BA263E9777905818A2A93C8191E7D6E8AE7").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!(pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF").to_vec()); } #[test] fn test_unwrap_nopad_24_byte_key_24_byte_data() { let ct = hex!("031D33264E15D33268F24EC260743EDCE1C6C7DDEE725A936BA814915C6762D2").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F1011121314151617").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!( pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF0001020304050607").to_vec() ); } #[test] fn test_unwrap_nopad_32_byte_key_24_byte_data() { let ct = hex!("A8F9BC1612C68B3FF6E6F4FBE30E71E4769C8B80A32CB8958CD5D17D6B254DA1").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!( pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF0001020304050607").to_vec() ); } #[test] fn test_unwrap_nopad_32_byte_key_32_byte_data() { let ct = hex!("28C9F404C4B810F4CBCCB35CFB87F8263F5786E2D80ED326CBC7F0E71A99F43BFB988B9B7A02DD21").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!( pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF000102030405060708090A0B0C0D0E0F").to_vec() ); } #[test] fn test_unwrap_pad_24_byte_key_20_byte_data() { let ct = hex!("138bdeaa9b8fa7fc61f97742e72248ee5ae6ae5360d1ae6a5f54f373fa543b6a").to_vec(); let key = hex!("5840df6e29b02af1ab493b705bf16ea1ae8338f4dcc176a8").to_vec(); let pt = aes_unwrap_with_padding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!( pt.unwrap(), hex!("c37b7e6492584340bed12207808941155068f738").to_vec() ); } #[test] fn test_unwrap_pad_24_byte_key_7_byte_data() { let ct = hex!("afbeb0f07dfbf5419200f2ccb50bb24f").to_vec(); let key = hex!("5840df6e29b02af1ab493b705bf16ea1ae8338f4dcc176a8").to_vec(); let pt = aes_unwrap_with_padding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!(pt.unwrap(), hex!("466f7250617369").to_vec()); } }
use crate::{ types::{Aes128Ecb, Aes192Ecb, Aes256Ecb, AES_BLOCK_LEN, BLOCK_LEN}, }; use block_modes::BlockMode; use thiserror::Error; #[derive(Error, Debug)] pub enum UnwrapKeyError { #[error("Key length must be 16, 24 or 32 octets")] KeyLengthInvalid, #[error("Ciphertext length must be a multiple of {0} octets")] CipherTextInvalidLength(usize), #[error("Ciphertext length can not be longer than {0} octets")] CipherTextLengthTooLong(u32), #[error("Ciphertext length must be atleast {0} octet(s)")] CipherTextLengthTooShort(usize), #[error("Failed to successfully unwrap key")] CipherTextValidationFailure, } pub fn aes_unwrap_with_nopadding(ct: &[u8], key: &[u8]) -> Result<Vec<u8>, UnwrapKeyError> { let mut pt: Vec<u8> = Vec::new(); let ct_len = match ct.len() { ct_len if (ct_len % BLOCK_LEN) > 0 => { return Err(UnwrapKeyError::CipherTextInvalidLength(BLOCK_LEN)); }, ct_len => ct_len, }; let n = match (ct_len / BLOCK_LEN) - 1 { 0 | 1 => { return Err(UnwrapKeyError::CipherTextLengthTooShort(24)); }, n => n, }; pt.resize(ct_len, 0); pt.as_mut_slice().copy_from_slice(ct); let aes_func = match key.len() { 16 => aes128_ecb_decrypt, 24 => aes192_ecb_decrypt, 32 => aes256_ecb_decrypt, _ => return Err(UnwrapKeyError::KeyLengthInvalid), }; unwrap_core(key, n, pt.as_mut_slice(), aes_func); #[allow(non_snake_case)] let A: [u8; BLOCK_LEN] = [0xa6; BLOCK_LEN]; if !constant_time_eq(&pt[0..BLOCK_LEN], &A[0..BLOCK_LEN]) { return Err(UnwrapKeyError::CipherTextValidationFailure); } Ok(pt[BLOCK_LEN..].to_vec()) } pub fn aes_unwrap_with_padding(ct: &[u8], key: &[u8]) -> Result<Vec<u8>, UnwrapKeyError> { let mut pt: Vec<u8> = Vec::new(); let ct_len = match ct.len() { ct_len if (ct_len % BLOCK_LEN) > 0 => { return Err(UnwrapKeyError::CipherTextInvalidLength(BLOCK_LEN)); }, ct_len => ct_len, }; let n = match (ct_len / BLOCK_LEN) - 1 { 0 => { return Err(UnwrapKeyError::CipherTextLengthTooShort(16)); }, n => n, }; pt.resize(ct_len, 0); pt.as_mut_slice().copy_from_slice(ct); let aes_func = match key.len() { 16 => aes128_ecb_decrypt, 24 => aes192_ecb_decrypt, 32 => aes256_ecb_decrypt, _ => return Err(UnwrapKeyError::KeyLengthInvalid), }; unwrap_core(key, n, pt.as_mut_slice(), aes_func); #[allow(non_snake_case)] let A: [u8; 4] = [0xa6, 0x59, 0x59, 0xa6]; if !constant_time_eq(&pt[0..4], &A) { return Err(UnwrapKeyError::CipherTextValidationFailure); } let mli = { let mut mli_bytes: [u8; 4] = Default::default(); mli_bytes[..].copy_from_slice(&pt[4..8]); u32::from_be_bytes(mli_bytes) as usize }; if !(mli > (8 * (n - 1)) && mli <= (8 * n)) { return Err(UnwrapKeyError::CipherTextValidationFailure); } let pad_len = ct_len - mli - BLOCK_LEN; let padding = &pt[(ct_len - pad_len)..]; for pad_byte in padding { if *pad_byte != 0 { return Err(UnwrapKeyError::CipherTextValidationFailure); } } Ok(pt[BLOCK_LEN..(BLOCK_LEN + mli)].to_vec()) } fn constant_time_eq(a: &[u8], b: &[u8]) -> bool { if a.len() != b.len() { return false; } let c = a.iter().zip(b.iter()).fold(0, |acc, (a, b)| acc | (a ^ b)); c == 0 } fn aes128_ecb_decrypt(key: &[u8], data: &mut [u8]) { let cipher = Aes128Ecb::new_from_slices(key, Default::default()).expect("Failed to create AES context"); cipher.decrypt(data).expect("Failed to decrypt data block"); } fn aes192_ecb_decrypt(key: &[u8], data: &mut [u8]) { let cipher = Aes192Ecb::new_from_slices(key, Default::default()).expect("Failed to create AES context"); cipher.decrypt(data).expect("Failed to decrypt data block"); } fn aes256_ecb_decrypt(key: &[u8], data: &mut [u8]) { let cipher = Aes256Ecb::new_from_slices(key, Default::default()).expect("Failed to create AES context"); cipher.decrypt(data).expect("Failed to decrypt data block"); } fn unwrap_core<AesEcb>(key: &[u8], n: usize, pt: &mut [u8], aes_ecb_decrypt: AesEcb) where AesEcb: Fn(&[u8], &mut [u8]), { if pt.len() > AES_BLOCK_LEN { let mut tmp: Vec<u8> = vec![0u8; AES_BLOCK_LEN]; tmp[0..BLOCK_LEN].copy_from_slice(&pt[0..BLOCK_LEN]); for j in (0..6).rev() { for i in (1..=n).rev() { let idx = i * BLOCK_LEN; tmp[BLOCK_LEN..].copy_from_slice(&pt[idx..idx + BLOCK_LEN]); let t = ((n * j) + i) as u64; tmp[0..BLOCK_LEN] .iter_mut() .zip(t.to_be_bytes().iter()) .for_each(|(x1, x2)| *x1 ^= *x2); aes_ecb_decrypt(key, &mut tmp); pt[idx..idx + BLOCK_LEN].copy_from_slice(&tmp[BLOCK_LEN..]); } } pt[0..BLOCK_LEN].copy_from_slice(&tmp[0..BLOCK_LEN]); } else { aes_ecb_decrypt(key, pt); } } #[cfg(test)] mod tests { use super::{aes_unwrap_with_nopadding, aes_unwrap_with_padding}; #[test] fn test_unwrap_nopad_invalid_key_length() { let ct = hex!("000102030405060708090a0b0c0d0e0f").to_vec(); let key = hex!("000102030405060708090a0b0c0d0e").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_err(), "Invalid key length erroneously passed"); } #[test] fn test_wrap_pad_invalid_key_length() { let pt = hex!("000102030405060708090a0b0c0d0e0f").to_vec(); let key = hex!("000102030405060708090a0b0c0d0e").to_vec(); let ct = aes_unwrap_with_padding(&pt, &key); assert!(ct.is_err(), "Invalid key length erroneously passed"); } #[test] fn test_unwrap_nopad_16_byte_key_16_byte_data() { let ct = hex!("1FA68B0A8112B447AEF34BD8FB5A7B829D3E862371D2CFE5").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!(pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF").to_vec()); } #[test] fn test_unwrap_nopad_24_byte_key_16_byte_data() { let ct = hex!("96778B25AE6CA435F92B5B97C050AED2468AB8A17AD84E5D").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F1011121314151617").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!(pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF").to_vec()); } #[test] fn test_unwrap_nopad_32_byte_key_16_byte_data() { let ct = hex!("64E8C3F9CE0F5BA263E9777905818A2A93C8191E7D6E8AE7").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!(pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF").to_vec()); } #[test] fn test_unwrap_nopad_24_byte_key_24_byte_data() { let ct = hex!("031D33264E15D33268F24EC260743
#[test] fn test_unwrap_nopad_32_byte_key_24_byte_data() { let ct = hex!("A8F9BC1612C68B3FF6E6F4FBE30E71E4769C8B80A32CB8958CD5D17D6B254DA1").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!( pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF0001020304050607").to_vec() ); } #[test] fn test_unwrap_nopad_32_byte_key_32_byte_data() { let ct = hex!("28C9F404C4B810F4CBCCB35CFB87F8263F5786E2D80ED326CBC7F0E71A99F43BFB988B9B7A02DD21").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!( pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF000102030405060708090A0B0C0D0E0F").to_vec() ); } #[test] fn test_unwrap_pad_24_byte_key_20_byte_data() { let ct = hex!("138bdeaa9b8fa7fc61f97742e72248ee5ae6ae5360d1ae6a5f54f373fa543b6a").to_vec(); let key = hex!("5840df6e29b02af1ab493b705bf16ea1ae8338f4dcc176a8").to_vec(); let pt = aes_unwrap_with_padding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!( pt.unwrap(), hex!("c37b7e6492584340bed12207808941155068f738").to_vec() ); } #[test] fn test_unwrap_pad_24_byte_key_7_byte_data() { let ct = hex!("afbeb0f07dfbf5419200f2ccb50bb24f").to_vec(); let key = hex!("5840df6e29b02af1ab493b705bf16ea1ae8338f4dcc176a8").to_vec(); let pt = aes_unwrap_with_padding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!(pt.unwrap(), hex!("466f7250617369").to_vec()); } }
EDCE1C6C7DDEE725A936BA814915C6762D2").to_vec(); let key = hex!("000102030405060708090A0B0C0D0E0F1011121314151617").to_vec(); let pt = aes_unwrap_with_nopadding(&ct, &key); assert!(pt.is_ok(), "Test unexpectantly failed: {:?}", pt); assert_eq!( pt.unwrap(), hex!("00112233445566778899AABBCCDDEEFF0001020304050607").to_vec() ); }
function_block-function_prefixed
[ { "content": "fn wrap_core<AesEcb>(key: &[u8], n: usize, ct: &mut [u8], aes_ecb_encrypt: AesEcb)\n\nwhere\n\n AesEcb: Fn(&[u8], &mut [u8]),\n\n{\n\n if ct.len() > AES_BLOCK_LEN {\n\n // Allocate buffer for operations in loop\n\n // tmp = A | R[i]\n\n let mut tmp: Vec<u8> = vec![0u8; AES_BLOCK_LEN];\n\n\n\n // Copy A into buffer\n\n tmp[0..BLOCK_LEN].copy_from_slice(&ct[0..BLOCK_LEN]);\n\n\n\n for j in 0..6 {\n\n for i in 1..=n {\n\n let idx = i * BLOCK_LEN;\n\n tmp[BLOCK_LEN..].copy_from_slice(&ct[idx..idx + BLOCK_LEN]);\n\n\n\n // B = AES(K, A | R[i])\n\n aes_ecb_encrypt(key, &mut tmp);\n\n\n", "file_path": "src/wrap.rs", "rank": 5, "score": 131383.85951358164 }, { "content": "pub fn aes_wrap_with_padding(pt: &[u8], key: &[u8]) -> Result<Vec<u8>, WrapKeyError> {\n\n #[allow(non_snake_case)]\n\n let mut A: [u8; BLOCK_LEN] = [0xa6, 0x59, 0x59, 0xa6, 0, 0, 0, 0];\n\n let mut ct: Vec<u8> = Vec::new();\n\n let pt_len = match pt.len() {\n\n 0 => { return Err(WrapKeyError::PlainTextLengthTooShort(1)); },\n\n pt_len if pt_len > u32::MAX as usize => {\n\n // The MLI restricts pt.len() to a u32\n\n return Err(WrapKeyError::PlainTextLengthTooLong(u32::MAX));\n\n },\n\n pt_len => pt_len, // Need atleast one octet of plaintext.\n\n };\n\n\n\n // Check for valid key lengths and get func pointer\n\n let aes_func = match key.len() {\n\n 16 => aes128_ecb_encrypt,\n\n 24 => aes192_ecb_encrypt,\n\n 32 => aes256_ecb_encrypt,\n\n _ => return Err(WrapKeyError::KeyLengthInvalid),\n\n };\n", "file_path": "src/wrap.rs", "rank": 7, "score": 115116.56744461448 }, { "content": "pub fn aes_wrap_with_nopadding(pt: &[u8], key: &[u8]) -> Result<Vec<u8>, WrapKeyError> {\n\n #[allow(non_snake_case)]\n\n let A: [u8; BLOCK_LEN] = [0xa6; BLOCK_LEN];\n\n let mut ct: Vec<u8> = Vec::new();\n\n let pt_len = match pt.len() {\n\n pt_len if (pt_len % BLOCK_LEN) > 0 => {\n\n return Err(WrapKeyError::PlainTextInvalidLength(BLOCK_LEN));\n\n },\n\n pt_len => pt_len, // pt should be a multiple of BLOCK_LEN\n\n };\n\n\n\n let n = match pt_len / BLOCK_LEN {\n\n 0 | 1 => { return Err(WrapKeyError::PlainTextLengthTooShort(16)); },\n\n n => n, // pt must be at least 2 blocks in size\n\n };\n\n\n\n // Check for valid key lengths and get func pointer\n\n let aes_func = match key.len() {\n\n 16 => aes128_ecb_encrypt,\n\n 24 => aes192_ecb_encrypt,\n", "file_path": "src/wrap.rs", "rank": 8, "score": 104967.6200336785 }, { "content": "fn aes128_ecb_encrypt(key: &[u8], data: &mut [u8]) {\n\n let cipher = Aes128Ecb::new_from_slices(key, Default::default()).expect(\"Failed to create AES context\");\n\n // Modifies data in place\n\n cipher.encrypt(data, AES_BLOCK_LEN).expect(\"Failed to encrypt data block\");\n\n}\n\n\n", "file_path": "src/wrap.rs", "rank": 9, "score": 102598.25619079672 }, { "content": "fn aes192_ecb_encrypt(key: &[u8], data: &mut [u8]) {\n\n let cipher = Aes192Ecb::new_from_slices(key, Default::default()).expect(\"Failed to create AES context\");\n\n // Modifies data in place\n\n cipher.encrypt(data, AES_BLOCK_LEN).expect(\"Failed to encrypt data block\");\n\n}\n\n\n", "file_path": "src/wrap.rs", "rank": 10, "score": 102598.25619079672 }, { "content": "fn aes256_ecb_encrypt(key: &[u8], data: &mut [u8]) {\n\n let cipher = Aes256Ecb::new_from_slices(key, Default::default()).expect(\"Failed to create AES context\");\n\n // Modifies data in place\n\n cipher.encrypt(data, AES_BLOCK_LEN).expect(\"Failed to encrypt data block\");\n\n}\n\n\n", "file_path": "src/wrap.rs", "rank": 11, "score": 102598.25619079672 }, { "content": "# Changelog\n\nAll notable changes to this project will be documented in this file.\n\n\n\nThe format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),\n\nand this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).\n\n\n\n## 0.1.0 (2021-10-02)\n\nInitial release of aes keywrap crate with padded and unpadded wrapping schemes\n", "file_path": "CHANGELOG.md", "rank": 13, "score": 21484.42319386017 }, { "content": "// Copyright (c) 2020,2021, Jason Fritcher <[email protected]>\n\n// All rights reserved.\n\n\n\nuse crate::{\n\n types::{Aes128Ecb, Aes192Ecb, Aes256Ecb, AES_BLOCK_LEN, BLOCK_LEN},\n\n};\n\nuse block_modes::BlockMode;\n\nuse thiserror::Error;\n\n\n\n#[derive(Error, Debug)]\n\npub enum WrapKeyError {\n\n #[error(\"Key length must be 16, 24 or 32 octets\")]\n\n KeyLengthInvalid,\n\n\n\n #[error(\"Plaintext length must be a multiple of {0} octets\")]\n\n PlainTextInvalidLength(usize),\n\n\n\n #[error(\"Plaintext length can not be longer than {0} octets\")]\n\n PlainTextLengthTooLong(u32),\n\n\n\n #[error(\"Plaintext length must be atleast {0} octet(s)\")]\n\n PlainTextLengthTooShort(usize),\n\n}\n\n\n", "file_path": "src/wrap.rs", "rank": 25, "score": 18.347831107578465 }, { "content": " // A = MSB(64, B) ^ t where t = (n*j)+i\n\n let t = ((n * j) + i) as u64;\n\n tmp[0..BLOCK_LEN]\n\n .iter_mut()\n\n .zip(t.to_be_bytes().iter())\n\n .for_each(|(x1, x2)| *x1 ^= *x2);\n\n\n\n // R[i] = LSB(64, B)\n\n ct[idx..idx + BLOCK_LEN].copy_from_slice(&tmp[BLOCK_LEN..]);\n\n }\n\n\n\n // Put A into output\n\n ct[0..BLOCK_LEN].copy_from_slice(&tmp[0..BLOCK_LEN]);\n\n }\n\n } else {\n\n aes_ecb_encrypt(key, ct);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/wrap.rs", "rank": 26, "score": 17.12459647803933 }, { "content": "mod tests {\n\n use super::{aes_wrap_with_nopadding, aes_wrap_with_padding};\n\n\n\n #[test]\n\n fn test_wrap_nopad_invalid_key_length() {\n\n let pt = hex!(\"000102030405060708090a0b0c0d0e0f\").to_vec();\n\n let key = hex!(\"000102030405060708090a0b0c0d0e\").to_vec();\n\n let ct = aes_wrap_with_nopadding(&pt, &key);\n\n assert!(ct.is_err(), \"Invalid key length erroneously passed\");\n\n }\n\n\n\n #[test]\n\n fn test_wrap_pad_invalid_key_length() {\n\n let pt = hex!(\"000102030405060708090a0b0c0d0e0f\").to_vec();\n\n let key = hex!(\"000102030405060708090a0b0c0d0e\").to_vec();\n\n let ct = aes_wrap_with_padding(&pt, &key);\n\n assert!(ct.is_err(), \"Invalid key length erroneously passed\");\n\n }\n\n\n\n //\n", "file_path": "src/wrap.rs", "rank": 27, "score": 16.387430858685473 }, { "content": "// Copyright (c) 2020,2021, Jason Fritcher <[email protected]>\n\n// All rights reserved.\n\n\n\npub(crate) use aes::{Aes128, Aes192, Aes256};\n\npub(crate) use block_modes::{block_padding::NoPadding, Ecb};\n\n\n\n// create aliases for convenience\n\npub(crate) type Aes128Ecb = Ecb<Aes128, NoPadding>;\n\npub(crate) type Aes192Ecb = Ecb<Aes192, NoPadding>;\n\npub(crate) type Aes256Ecb = Ecb<Aes256, NoPadding>;\n\n\n\npub(crate) const BLOCK_LEN: usize = 8;\n\npub(crate) const AES_BLOCK_LEN: usize = 16;\n", "file_path": "src/types.rs", "rank": 28, "score": 13.295644470690101 }, { "content": "// Copyright (c) 2020,2021, Jason Fritcher <[email protected]>\n\n// All rights reserved.\n\n\n\n#[cfg(test)]\n\n#[macro_use]\n\nextern crate hex_literal;\n\n\n\nmod types;\n\nmod unwrap;\n\nmod wrap;\n\n\n\npub use unwrap::{UnwrapKeyError, aes_unwrap_with_nopadding, aes_unwrap_with_padding};\n\npub use wrap::{WrapKeyError, aes_wrap_with_nopadding, aes_wrap_with_padding};\n", "file_path": "src/lib.rs", "rank": 29, "score": 12.65907072587951 }, { "content": " let key = hex!(\"5840df6e29b02af1ab493b705bf16ea1ae8338f4dcc176a8\").to_vec();\n\n let ct = aes_wrap_with_padding(&pt, &key);\n\n assert!(ct.is_ok(), \"Test unexpectantly failed: {:?}\", ct);\n\n assert_eq!(\n\n ct.unwrap(),\n\n hex!(\"138bdeaa9b8fa7fc61f97742e72248ee5ae6ae5360d1ae6a5f54f373fa543b6a\").to_vec()\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_wrap_pad_24_byte_key_7_byte_data() {\n\n let pt = hex!(\"466f7250617369\").to_vec();\n\n let key = hex!(\"5840df6e29b02af1ab493b705bf16ea1ae8338f4dcc176a8\").to_vec();\n\n let ct = aes_wrap_with_padding(&pt, &key);\n\n assert!(ct.is_ok(), \"Test unexpectantly failed: {:?}\", ct);\n\n assert_eq!(\n\n ct.unwrap(),\n\n hex!(\"afbeb0f07dfbf5419200f2ccb50bb24f\").to_vec()\n\n );\n\n }\n\n}\n", "file_path": "src/wrap.rs", "rank": 30, "score": 12.627384862292368 }, { "content": "\n\n // Set MLI in A\n\n let pt_len_u32 = pt_len as u32;\n\n A[4..].copy_from_slice(&pt_len_u32.to_be_bytes());\n\n\n\n // Calculate padded length\n\n let padded_len = pt_len\n\n + match pt_len % BLOCK_LEN {\n\n 0 => 0,\n\n n => BLOCK_LEN - n,\n\n };\n\n let n = padded_len / BLOCK_LEN;\n\n\n\n // Allocate ct to the required size\n\n ct.resize(A.len() + padded_len, 0);\n\n\n\n // Because we're encrypting in place, copy A and pt into ct\n\n // Padding happens automatically if pt isn't a block length\n\n ct[..BLOCK_LEN].copy_from_slice(&A);\n\n ct[BLOCK_LEN..BLOCK_LEN + pt_len].copy_from_slice(pt);\n\n\n\n // Wrap the key into ct\n\n wrap_core(key, n, ct.as_mut_slice(), aes_func);\n\n\n\n Ok(ct)\n\n}\n\n\n", "file_path": "src/wrap.rs", "rank": 31, "score": 11.872248019266465 }, { "content": " // RFC3394 Test Vectors\n\n //\n\n #[test]\n\n fn test_wrap_nopad_16_byte_key_16_byte_data() {\n\n let pt = hex!(\"00112233445566778899AABBCCDDEEFF\").to_vec();\n\n let key = hex!(\"000102030405060708090A0B0C0D0E0F\").to_vec();\n\n let ct = aes_wrap_with_nopadding(&pt, &key);\n\n assert!(ct.is_ok(), \"Test unexpectantly failed: {:?}\", ct);\n\n assert_eq!(\n\n ct.unwrap(),\n\n hex!(\"1FA68B0A8112B447AEF34BD8FB5A7B829D3E862371D2CFE5\").to_vec()\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_wrap_nopad_24_byte_key_16_byte_data() {\n\n let pt = hex!(\"00112233445566778899AABBCCDDEEFF\").to_vec();\n\n let key = hex!(\"000102030405060708090A0B0C0D0E0F1011121314151617\").to_vec();\n\n let ct = aes_wrap_with_nopadding(&pt, &key);\n\n assert!(ct.is_ok(), \"Test unexpectantly failed: {:?}\", ct);\n", "file_path": "src/wrap.rs", "rank": 32, "score": 10.480750333006647 }, { "content": " }\n\n\n\n #[test]\n\n fn test_wrap_nopad_32_byte_key_32_byte_data() {\n\n let pt = hex!(\"00112233445566778899AABBCCDDEEFF000102030405060708090A0B0C0D0E0F\").to_vec();\n\n let key = hex!(\"000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F\").to_vec();\n\n let ct = aes_wrap_with_nopadding(&pt, &key);\n\n assert!(ct.is_ok(), \"Test unexpectantly failed: {:?}\", ct);\n\n assert_eq!(\n\n ct.unwrap(),\n\n hex!(\"28C9F404C4B810F4CBCCB35CFB87F8263F5786E2D80ED326CBC7F0E71A99F43BFB988B9B7A02DD21\").to_vec()\n\n );\n\n }\n\n\n\n //\n\n // RFC5649 Test Vectors\n\n //\n\n #[test]\n\n fn test_wrap_pad_24_byte_key_20_byte_data() {\n\n let pt = hex!(\"c37b7e6492584340bed12207808941155068f738\").to_vec();\n", "file_path": "src/wrap.rs", "rank": 33, "score": 10.235940704133284 }, { "content": " let pt = hex!(\"00112233445566778899AABBCCDDEEFF0001020304050607\").to_vec();\n\n let key = hex!(\"000102030405060708090A0B0C0D0E0F1011121314151617\").to_vec();\n\n let ct = aes_wrap_with_nopadding(&pt, &key);\n\n assert!(ct.is_ok(), \"Test unexpectantly failed: {:?}\", ct);\n\n assert_eq!(\n\n ct.unwrap(),\n\n hex!(\"031D33264E15D33268F24EC260743EDCE1C6C7DDEE725A936BA814915C6762D2\").to_vec()\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_wrap_nopad_32_byte_key_24_byte_data() {\n\n let pt = hex!(\"00112233445566778899AABBCCDDEEFF0001020304050607\").to_vec();\n\n let key = hex!(\"000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F\").to_vec();\n\n let ct = aes_wrap_with_nopadding(&pt, &key);\n\n assert!(ct.is_ok(), \"Test unexpectantly failed: {:?}\", ct);\n\n assert_eq!(\n\n ct.unwrap(),\n\n hex!(\"A8F9BC1612C68B3FF6E6F4FBE30E71E4769C8B80A32CB8958CD5D17D6B254DA1\").to_vec()\n\n );\n", "file_path": "src/wrap.rs", "rank": 34, "score": 9.722262693556461 }, { "content": " assert_eq!(\n\n ct.unwrap(),\n\n hex!(\"96778B25AE6CA435F92B5B97C050AED2468AB8A17AD84E5D\").to_vec()\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_wrap_nopad_32_byte_key_16_byte_data() {\n\n let pt = hex!(\"00112233445566778899AABBCCDDEEFF\").to_vec();\n\n let key = hex!(\"000102030405060708090A0B0C0D0E0F101112131415161718191A1B1C1D1E1F\").to_vec();\n\n let ct = aes_wrap_with_nopadding(&pt, &key);\n\n assert!(ct.is_ok(), \"Test unexpectantly failed: {:?}\", ct);\n\n assert_eq!(\n\n ct.unwrap(),\n\n hex!(\"64E8C3F9CE0F5BA263E9777905818A2A93C8191E7D6E8AE7\").to_vec()\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_wrap_nopad_24_byte_key_24_byte_data() {\n", "file_path": "src/wrap.rs", "rank": 35, "score": 9.644885472764212 }, { "content": " 32 => aes256_ecb_encrypt,\n\n _ => return Err(WrapKeyError::KeyLengthInvalid),\n\n };\n\n\n\n // Allocate ct to the proper size\n\n ct.resize(A.len() + pt_len, 0);\n\n\n\n // Because we're encrypting in place, copy A and pt into ct\n\n ct[..BLOCK_LEN].copy_from_slice(&A);\n\n ct[BLOCK_LEN..].copy_from_slice(pt);\n\n\n\n // Wrap the key into ct\n\n wrap_core(key, n, ct.as_mut_slice(), aes_func);\n\n\n\n Ok(ct)\n\n}\n\n\n", "file_path": "src/wrap.rs", "rank": 36, "score": 9.385312776443747 } ]
Rust
lang/src/compiler/path.rs
leops/hatchet
4f788737551b3cf74c06c8fae24b5b20be420ef4
use std::collections::VecDeque; use std::borrow::Borrow; use hct::ast::Path; use super::builder::*; use super::function::*; use super::scope::Scope; use super::types::*; use atom::*; pub fn resolve_path<'a, P: Borrow<Path>>(path: P, scope: &Scope<'a>, builder: &mut Builder) -> ValueRef { let path = path.borrow(); match *path { Path::Deref(ref obj, ref prop) => { let res = resolve_path(obj.borrow(), scope, builder); if res.ty == TypeId::Entity { res } else if let TypeId::Object { ref items } = res.ty { let idx = { items.binary_search_by(|&(ref item, _)| item.cmp(prop)) .expect(&format!("key \"{}\" not found in object", prop)) }; let (_, ref ty) = items[idx]; let zero = builder.build_const_i32(0i32); let idx = builder.build_const_i32(idx as i32); let gep = builder.build_in_bounds_gep( &res, &[ zero, idx, ], ); let res = builder.build_load(&gep); ValueRef { ty: ty.clone(), ptr: res.ptr } } else { panic!("trying to deref a non-map value {}", *path) } }, Path::Instance(ref obj) => resolve_path(obj.borrow(), scope, builder), Path::Binding(ref name) => { match scope.binding(builder, name) { Some(val) => val, None => if name.starts_with('@') { builder.build_const_entity(name) } else { panic!("entity {} not found", *path) } } }, } } type Trigger<'a> = (Option<ValueRef>, Option<ValueRef>, Option<Atom>); fn break_trigger<'a>(path: Path, scope: &Scope<'a>, builder: &mut Builder) -> Trigger<'a> { match path { Path::Deref(obj, prop) => match break_trigger(*obj, scope, builder) { (a, Some(b), None) => (a, Some(b), Some(prop)), (a, None, None) => (a, Some(builder.build_const_entity(prop)), None), path => panic!("invalid path {:?}", path), }, Path::Instance(pat) => (Some(resolve_path(pat, scope, builder)), None, None), Path::Binding(_) => (None, Some(resolve_path(&path, scope, builder)), None), } } pub fn event<'a>(path: Path, scope: &Scope<'a>, builder: &mut Builder) -> (ValueRef, ValueRef) { match break_trigger(path, scope, builder) { (Some(inst), Some(ent), Some(method)) => { let method = builder.build_const_atom(method); let method = call_stl( builder, hct_atom!("get_instance"), vec![ &ent, &method ], ); (inst, method) }, (None, Some(ent), Some(method)) => ( ent, builder.build_const_atom(method), ), (Some(inst), Some(ent), None) => { let method = builder.build_const_atom(hct_atom!("Trigger")); let method = call_stl( builder, hct_atom!("get_instance"), vec![ &ent, &method ], ); (inst, method) }, (None, Some(ent), None) => ( ent, builder.build_const_atom(hct_atom!("Trigger")), ), path => panic!("invalid path {:?}", path), } } pub fn unwind_path(path: Path) -> VecDeque<Atom> { match path { Path::Deref(obj, prop) => { let mut res = unwind_path(*obj); res.push_back(prop); res }, Path::Binding(name) => { let mut res = VecDeque::new(); res.push_back(name); res }, Path::Instance(_) => unimplemented!(), } }
use std::collections::VecDeque; use std::borrow::Borrow; use hct::ast::Path; use super::builder::*; use super::function::*; use super::scope::Scope; use super::types::*; use atom::*; pub fn resolve_path<'a, P: Borrow<Path>>(path: P, scope: &Scope<'a>, builder: &mut Builder) -> ValueRef { let path = path.borrow(); match *path { Path::Deref(ref obj, ref prop) => { let res = resolve_path(obj.borrow(), scope, builder); if res.ty == TypeId::Entity { res } else if let TypeId::Object { ref items } = res.ty { let idx = { items.binary_search_by(|&(ref item, _)| item.cmp(prop)) .expect(&format!("key \"{}\" not found in object", prop)) }; let (_, ref ty) = items[idx]; let zero = builder.build_const_i32(0i32); let idx = builder.build_const_i32(idx as i32); let gep = builder.build_in_bounds_gep( &res, &[ zero, idx, ], ); let res = builder.build_load(&gep); ValueRef { ty: ty.clone(), ptr: res.ptr } } else { panic!("trying to deref a non-map value {}", *path) } }, Path::Instance(ref obj) => resolve_path(obj.borrow(), scope, builder), Path::Binding(ref name) => { match scope.binding(builder, name) { Some(val) => val, None => if name.starts_with('@') { builder.build_const_entity(name) } else { panic!("entity {} not found", *path) } } }, } } type Trigger<'a> = (Option<ValueRef>, Option<ValueRef>, Option<Atom>); fn break_trigger<'a>(path: Path, scope: &Scope<'a>, builder: &mut Builder) -> Trigger<'a> { match path { Path::Deref(obj, prop) =>
, Path::Instance(pat) => (Some(resolve_path(pat, scope, builder)), None, None), Path::Binding(_) => (None, Some(resolve_path(&path, scope, builder)), None), } } pub fn event<'a>(path: Path, scope: &Scope<'a>, builder: &mut Builder) -> (ValueRef, ValueRef) { match break_trigger(path, scope, builder) { (Some(inst), Some(ent), Some(method)) => { let method = builder.build_const_atom(method); let method = call_stl( builder, hct_atom!("get_instance"), vec![ &ent, &method ], ); (inst, method) }, (None, Some(ent), Some(method)) => ( ent, builder.build_const_atom(method), ), (Some(inst), Some(ent), None) => { let method = builder.build_const_atom(hct_atom!("Trigger")); let method = call_stl( builder, hct_atom!("get_instance"), vec![ &ent, &method ], ); (inst, method) }, (None, Some(ent), None) => ( ent, builder.build_const_atom(hct_atom!("Trigger")), ), path => panic!("invalid path {:?}", path), } } pub fn unwind_path(path: Path) -> VecDeque<Atom> { match path { Path::Deref(obj, prop) => { let mut res = unwind_path(*obj); res.push_back(prop); res }, Path::Binding(name) => { let mut res = VecDeque::new(); res.push_back(name); res }, Path::Instance(_) => unimplemented!(), } }
match break_trigger(*obj, scope, builder) { (a, Some(b), None) => (a, Some(b), Some(prop)), (a, None, None) => (a, Some(builder.build_const_entity(prop)), None), path => panic!("invalid path {:?}", path), }
if_condition
[ { "content": "/// Create a call to an STL function\n\npub fn call_stl<'a, A>(builder: &mut Builder, name: Atom, args: A) -> ValueRef\n\n where A: IntoIterator<Item=&'a ValueRef>, Type: 'a, Value: 'a {\n\n let mut args = args.into_iter().peekable();\n\n\n\n match name {\n\n hct_atom!(\"length\") => {\n\n let sum = {\n\n args.fold(None, |prev: Option<ValueRef>, val| {\n\n let two = builder.build_const_f64(2.0);\n\n let pow = call_stl(\n\n builder,\n\n hct_atom!(\"pow\"),\n\n vec![ val, &two ],\n\n );\n\n\n\n if let Some(prev) = prev {\n\n Some(builder.build_fadd(&prev, &pow))\n\n } else {\n\n Some(pow)\n\n }\n", "file_path": "lang/src/compiler/function.rs", "rank": 2, "score": 268611.44924542174 }, { "content": "/// Execute an AST Call node\n\npub fn call<'a>(Call { path, args }: Call, scope: &Scope<'a>, builder: &mut Builder) -> ValueRef {\n\n let args = {\n\n args.into_iter()\n\n .map(|arg| expression(arg, scope, builder))\n\n .collect::<Vec<_>>()\n\n };\n\n\n\n if let Some((from, trigger)) = scope.event() {\n\n if from.ty == TypeId::Entity {\n\n let (entity, method) = event(path, scope, builder);\n\n let delay = scope.delay().unwrap_or_else(|| builder.build_const_f64(0.0));\n\n let arg = {\n\n args.get(0)\n\n .map(|val| match val.ty {\n\n TypeId::String => val.clone(),\n\n TypeId::f64 => call_stl(\n\n builder,\n\n hct_atom!(\"to_string\"),\n\n vec![ val ],\n\n ),\n", "file_path": "lang/src/compiler/function.rs", "rank": 3, "score": 267549.18798743497 }, { "content": "/// Compute the return value of an expression\n\npub fn expression<'a>(exp: Expression, scope: &Scope<'a>, builder: &mut Builder) -> ValueRef {\n\n match exp {\n\n Expression::Call(val) => call(val, scope, builder),\n\n\n\n Expression::Reference(Path::Binding(name)) => {\n\n scope.binding(builder, &name)\n\n .expect(&format!(\"value \\\"{}\\\" not found\", name))\n\n },\n\n Expression::Reference(Path::Deref(box obj, prop)) => {\n\n let obj = expression(Expression::Reference(obj), scope, builder);\n\n match &obj.ty {\n\n &TypeId::Object { ref items } => {\n\n let idx = {\n\n items.binary_search_by(|&(ref item, _)| item.cmp(&prop))\n\n .expect(&format!(\"key \\\"{}\\\" not found in object\", prop))\n\n };\n\n\n\n let (_, ref ty) = items[idx];\n\n let zero = builder.build_const_i32(0i32);\n\n let idx = builder.build_const_i32(idx as i32);\n", "file_path": "lang/src/compiler/expression.rs", "rank": 4, "score": 249493.2348423067 }, { "content": "/// Execute a list of script statements in order\n\npub fn statements<'a>(list: Vec<Statement>, mut scope: Scope<'a>, builder: &mut Builder) {\n\n // Hoist the entity declarations to the top of the block\n\n for stmt in &list {\n\n match *stmt {\n\n // Register the new Relay entities\n\n Statement::Relay { ref name, .. } => {\n\n let value = builder.build_const_entity(name);\n\n scope.set_binding(builder, name.clone(), &value);\n\n\n\n builder.add_entity(name.clone(), Entity {\n\n classname: hct_atom!(\"logic_relay\"),\n\n targetname: Some(name.clone()),\n\n .. Default::default()\n\n });\n\n },\n\n\n\n // Insert the logic_auto entity if needed\n\n // The empty string can be used as a name without collision risk,\n\n // as empty buildernames are filtered out when the AST is built\n\n Statement::Auto { .. } => {\n", "file_path": "lang/src/compiler/statements.rs", "rank": 6, "score": 206990.5664010885 }, { "content": "/// Parses a script file, returning its AST\n\npub fn parse_file<P>(path: P) -> Script where P: AsRef<Path> {\n\n let mut f = File::open(path).expect(\"could not open file\");\n\n let mut s = String::new();\n\n f.read_to_string(&mut s).expect(\"could not read file\");\n\n\n\n let parse_start = timer_start!();\n\n\n\n let res = match script(&s) {\n\n IResult::Done(rem, script) => {\n\n if log_enabled!(Trace) {\n\n trace!(\"Parser output:\");\n\n print_script(&script).unwrap();\n\n }\n\n\n\n assert_eq!(skip_whitespace(rem), \"\");\n\n script\n\n },\n\n IResult::Error => panic!(\"hatchet parser error\"),\n\n };\n\n\n\n timer_end!(parse_start, time, \"Parsing time: {}\", time);\n\n\n\n res\n\n}\n", "file_path": "lang/src/hct/mod.rs", "rank": 7, "score": 197620.28749048282 }, { "content": "pub fn path_fmt<P: AsRef<StdPath>>(path: P) -> String {\n\n if log_enabled!(Trace) {\n\n path.as_ref().display().to_string()\n\n } else {\n\n path.as_ref().file_name().unwrap().to_string_lossy().to_string()\n\n }\n\n}\n\n\n", "file_path": "lang/src/logging.rs", "rank": 8, "score": 196070.89882842213 }, { "content": "// Parse a VMF file, returning an IR representation\n\npub fn parse_file<P>(path: P) -> MapFile where P: AsRef<Path> {\n\n let mut f = File::open(path).expect(\"could not open file\");\n\n let mut s = String::new();\n\n f.read_to_string(&mut s).expect(\"could not read file\");\n\n\n\n let parse_start = timer_start!();\n\n let ast = parse(&s).unwrap();\n\n\n\n let trans_start = timer_chain!(parse_start, time, \"Parsing time: {}\", time);\n\n let res = MapFile::from_ast(ast);\n\n\n\n timer_end!(trans_start, time, \"Loading time: {}\", time);\n\n res\n\n}\n", "file_path": "lang/src/vmf/mod.rs", "rank": 9, "score": 194433.68987554684 }, { "content": "pub fn global_name<T: Debug, N: Deref<Target=str>>(ty: &T, name: &N) -> String {\n\n format!(\n\n \"{:?}_{}\", ty,\n\n name.chars()\n\n .filter(|chr| match *chr {\n\n 'a'...'z' | 'A'...'Z' | '0'...'9' | '_' | '-' => true,\n\n _ => false,\n\n })\n\n .collect::<String>()\n\n )\n\n}\n\n\n\nimpl From<Atom> for Global {\n\n fn from(value: Atom) -> Global {\n\n Global::Atom(value)\n\n }\n\n}\n\n\n\nimpl From<String> for Global {\n\n fn from(value: String) -> Global {\n\n Global::String(value)\n\n }\n\n}\n", "file_path": "lang/src/compiler/types.rs", "rank": 11, "score": 183458.67587854736 }, { "content": "#[inline]\n\nfn fold_path(path: Option<Path>, name: Atom) -> Option<Path> {\n\n let name = Atom::from(name);\n\n Some(match path {\n\n Some(p) => Path::Deref(box p, name),\n\n None => Path::Binding(name),\n\n })\n\n}\n\n\n\nnamed!(\n\n path -> Path,\n\n do_parse!(\n\n inst: option!(terminated!(name_chain, punct!(\":\"))) >>\n\n pat: name_chain >>\n\n ({\n\n pat.into_iter()\n\n .fold(\n\n inst.into_iter()\n\n .flat_map(|v| v)\n\n .fold(None, fold_path)\n\n .map(|p| Path::Instance(box p)),\n", "file_path": "lang/src/hct/parser.rs", "rank": 13, "score": 172552.96865144567 }, { "content": "/// Find an instance file relative to a base file by walking up the fs tree\n\n/// Slightly differs from the algorithm used in VBSP, but should work for most cases\n\n/// cf. https://github.com/ValveSoftware/source-sdk-2013/blob/master/mp/src/utils/vbsp/map.cpp#L1904\n\nfn find_instance<P: Copy + AsRef<Path>>(mut base: PathBuf, target: P) -> Option<(PathBuf, PathBuf)> {\n\n while let Some(dir) = base.clone().parent() {\n\n let file = dir.join(target);\n\n if file.exists() {\n\n return Some((base, file));\n\n } else {\n\n base = PathBuf::from(dir);\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "lang/src/main.rs", "rank": 14, "score": 168878.89735361608 }, { "content": "pub fn name(input: &str) -> IResult<&str, Atom> {\n\n let input = skip_whitespace(input);\n\n let input_length = input.len();\n\n if input_length == 0 {\n\n return IResult::Error\n\n }\n\n\n\n for (idx, item) in input.chars().enumerate() {\n\n match item {\n\n 'a'...'z' | 'A'...'Z' | '0'...'9' | '_' | '-' | '$' | '@' => {},\n\n _ => {\n\n if idx == 0 {\n\n return IResult::Error\n\n } else {\n\n return IResult::Done(&input[idx..], Atom::from(&input[..idx]))\n\n }\n\n },\n\n }\n\n }\n\n\n", "file_path": "lang/src/hct/parser.rs", "rank": 15, "score": 153231.21574584115 }, { "content": "type LinkResult = (LLVMExecutionEngineRef, extern \"C\" fn(&mut Context) -> (), Context);\n", "file_path": "lang/src/compiler/mod.rs", "rank": 16, "score": 120112.27971197337 }, { "content": "fn codegen(name: &str, script: ast::Script, entities: HashMap<Atom, Entity>) -> BuilderResult {\n\n let mut builder = Builder::new(name, entities);\n\n\n\n let scope = Scope::root(&mut builder);\n\n statements(\n\n script.body,\n\n scope,\n\n &mut builder,\n\n );\n\n\n\n builder.finalize()\n\n}\n\n\n", "file_path": "lang/src/compiler/mod.rs", "rank": 17, "score": 113871.981018469 }, { "content": "fn print_path(fmt: &mut Terminal<Output=io::Stdout>, path: &Path, is_func: bool) -> TermResult<()> {\n\n match *path {\n\n Path::Deref(ref obj, ref prop) => {\n\n print_path(fmt, obj, false)?;\n\n if let Path::Instance(_) = **obj {} else {\n\n write!(fmt, \".\")?;\n\n }\n\n print_colored(fmt, prop, if is_func { ColorType::Function } else { ColorType::Name })\n\n },\n\n Path::Instance(ref pat) => write_col!(fmt, Path(pat, false), \":\"),\n\n Path::Binding(ref name) => {\n\n print_colored(fmt, name, if is_func { ColorType::Function } else { ColorType::Name })\n\n },\n\n }\n\n}\n\n\n", "file_path": "lang/src/logging.rs", "rank": 18, "score": 112414.82158480925 }, { "content": "fn print_colored<T: Display>(fmt: &mut Terminal<Output=io::Stdout>, val: T, color: ColorType) -> TermResult<()> {\n\n fmt.fg(match color {\n\n ColorType::Number => YELLOW,\n\n ColorType::String => GREEN,\n\n ColorType::Function => CYAN,\n\n ColorType::Name => RED,\n\n ColorType::Keyword => MAGENTA,\n\n })?;\n\n\n\n write!(fmt, \"{}\", val)?;\n\n\n\n fmt.fg(BRIGHT_BLACK)\n\n}\n\n\n\nmacro_rules! write_col {\n\n ( $fmt:expr, Path( $val:expr, $is_func:expr ) $( $rest:tt )* ) => {{\n\n print_path($fmt, $val, $is_func)?;\n\n write_col!( $fmt $( $rest )* )\n\n }};\n\n ( $fmt:expr, Expression( $val:expr ) $( $rest:tt )* ) => {{\n", "file_path": "lang/src/logging.rs", "rank": 19, "score": 103544.08242679946 }, { "content": "fn loop_<EC, PB, LC, R>(builder: &mut Builder, entry_cond: EC, print_body: PB, loop_cond: LC)\n\n where EC: FnOnce(&mut Builder) -> ValueRef,\n\n PB: FnOnce(&mut Builder) -> R, LC: FnOnce(&mut Builder, &R) -> ValueRef {\n\n let entry_block = builder.get_insert_block();\n\n\n\n let loop_block = builder.append_basic_block();\n\n builder.position_at_end(loop_block);\n\n let r = print_body(builder);\n\n\n\n let end_block = builder.append_basic_block();\n\n let loop_cond = loop_cond(builder, &r);\n\n builder.build_cond_br(\n\n &loop_cond,\n\n loop_block,\n\n end_block,\n\n );\n\n\n\n builder.position_at_end(entry_block);\n\n let entry_cond = entry_cond(builder);\n\n builder.build_cond_br(\n\n &entry_cond,\n\n loop_block,\n\n end_block,\n\n );\n\n\n\n builder.position_at_end(end_block);\n\n}\n\n\n", "file_path": "lang/src/compiler/statements.rs", "rank": 20, "score": 98349.47641832501 }, { "content": "/// Compiles and run a script on a map\n\npub fn apply(ent: ir::Script, script: ast::Script, entities: HashMap<Atom, Entity>) -> HashMap<Atom, Entity> {\n\n let codegen_start = timer_start!();\n\n\n\n let (module, entities, globals, externals) = codegen(&ent.script, script, entities);\n\n\n\n let opt_start = timer_chain!(codegen_start, time, \"Codegen time: {}\", time);\n\n\n\n let pre_opt: Option<String> = if log_enabled!(Trace) {\n\n let pre_opt = module_to_string(module.ptr);\n\n trace!(\"Codegen output:\\n{}\", pre_opt);\n\n\n\n Some(pre_opt)\n\n } else {\n\n None\n\n };\n\n\n\n optimize(module.ptr);\n\n\n\n if let Some(pre_opt) = pre_opt {\n\n trace!(\"Optimizer output:\");\n", "file_path": "lang/src/compiler/mod.rs", "rank": 21, "score": 97654.93691707889 }, { "content": "fn position<P>(input: &str, predicate: P) -> Option<usize> where P: Fn(&str) -> IResult<&str, &str> {\n\n for o in 0..input.len() {\n\n if let IResult::Done(_, _) = predicate(&input[o..]) {\n\n return Some(o);\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n\nmacro_rules! take_till {\n\n ($i:expr, $submac:ident!( $($args:tt)* )) => {\n\n take_till!($i, |c| $submac!(c, $($args)*))\n\n };\n\n ($i:expr, $f:expr) => {\n\n match position($i, $f) {\n\n Some(0) | None => IResult::Error,\n\n Some(n) => IResult::Done(&$i[n..], &$i[..n]),\n\n }\n\n };\n", "file_path": "lang/src/hct/parser.rs", "rank": 22, "score": 93525.04535612752 }, { "content": "fn link(ent: ir::Script, module: LLVMModuleRef, entities: HashMap<Atom, Entity>, globals: HashMap<Global, LLVMValueRef>, externals: &Externals) -> LinkResult {\n\n unsafe {\n\n LLVMLinkInMCJIT();\n\n LLVM_InitializeNativeTarget();\n\n LLVM_InitializeNativeAsmPrinter();\n\n }\n\n\n\n // Register external functions\n\n externals.register_symbols();\n\n\n\n let engine = unsafe {\n\n let mut engine = mem::uninitialized();\n\n let mut out = mem::zeroed();\n\n\n\n let res = LLVMCreateExecutionEngineForModule(&mut engine, module, &mut out);\n\n if res == 0 {\n\n engine\n\n } else {\n\n panic!()\n\n }\n", "file_path": "lang/src/compiler/mod.rs", "rank": 23, "score": 85174.04345071728 }, { "content": "pub fn duration_fmt(duration: Duration) -> String {\n\n let mut res = String::new();\n\n\n\n let secs = duration.as_secs();\n\n if secs > 0 {\n\n write!(res, \"{}s \", secs).unwrap();\n\n }\n\n\n\n let nano = duration.subsec_nanos();\n\n let milli = nano / 1_000_000;\n\n if milli > 0 {\n\n write!(res, \"{}ms \", milli).unwrap();\n\n }\n\n\n\n let micro = nano.checked_rem(milli * 1_000_000).unwrap_or(nano) / 1_000;\n\n if micro > 0 {\n\n write!(res, \"{}μs \", micro).unwrap();\n\n }\n\n\n\n if log_enabled!(Trace) {\n\n let nanos = nano.checked_rem(((milli * 1_000) + micro) * 1_000).unwrap_or(nano);\n\n if nanos > 0 {\n\n write!(res, \"{}ns\", nanos).unwrap();\n\n }\n\n }\n\n\n\n res\n\n}\n\n\n", "file_path": "lang/src/logging.rs", "rank": 24, "score": 77463.322261366 }, { "content": "pub fn print_diff(then: &str, now: &str) {\n\n let lines = diff::lines(then, now);\n\n\n\n let (l_max, r_max) = {\n\n lines.par_iter()\n\n .map(|line| match *line {\n\n diff::Result::Left(l) => (l.len(), 0),\n\n diff::Result::Both(l, r) => (l.len(), r.len()),\n\n diff::Result::Right(r) => (0, r.len()),\n\n })\n\n .reduce(\n\n || (0, 0),\n\n |(a_l, a_r), (b_l, b_r)| (\n\n cmp::max(a_l, b_l),\n\n cmp::max(a_r, b_r),\n\n ),\n\n )\n\n };\n\n\n\n let mut t = stdout().unwrap_or_else(|| box BasicTerm::new());\n", "file_path": "lang/src/logging.rs", "rank": 25, "score": 76042.97566740258 }, { "content": "pub fn print_script(script: &Script) -> TermResult<()> {\n\n use std::ops::DerefMut;\n\n\n\n let mut fmt = stdout().unwrap_or_else(|| box BasicTerm::new());\n\n for stmt in &script.body {\n\n writeln_col!(fmt.deref_mut(), Statement(stmt))?;\n\n }\n\n\n\n fmt.reset()?;\n\n Ok(())\n\n}\n\n\n\n/// Utility struct for pretty-printing blocks\n\n/// from https://github.com/rust-lang/rust/blob/master/src/libcore/fmt/builders.rs\n\npub struct PadAdapter<'a> {\n\n fmt: &'a mut FMTWrite,\n\n on_newline: bool,\n\n}\n\n\n\nimpl<'a> PadAdapter<'a> {\n", "file_path": "lang/src/logging.rs", "rank": 26, "score": 75705.95599321168 }, { "content": "pub fn digits(input: &str) -> IResult<&str, &str> {\n\n let input = skip_whitespace(input);\n\n let input_length = input.len();\n\n if input_length == 0 {\n\n return IResult::Error\n\n }\n\n\n\n for (idx, item) in input.chars().enumerate() {\n\n match item {\n\n '0'...'9' => {},\n\n _ => {\n\n if idx == 0 {\n\n return IResult::Error\n\n } else {\n\n return IResult::Done(&input[idx..], &input[..idx])\n\n }\n\n },\n\n }\n\n }\n\n\n", "file_path": "lang/src/hct/parser.rs", "rank": 27, "score": 71365.82254556807 }, { "content": "type Parser = Box<Fn(&str) -> IResult<&str, Expression> + Sync + 'static>;\n", "file_path": "lang/src/hct/expression.rs", "rank": 28, "score": 67492.87055775107 }, { "content": "type PunctParser = Box<Fn(&str) -> IResult<&str, &str> + Sync + 'static>;\n", "file_path": "lang/src/hct/expression.rs", "rank": 29, "score": 66152.26239940399 }, { "content": "/// Main compiler driver function\n\n/// Runs the build for a map file, recursively spawning threads for all instances\n\n/// Returns the newly created file if one or more script was applied to it,\n\n/// or the original unmodified file otherwise\n\nfn build(input: &PathBuf) -> Result<PathBuf, Error> {\n\n let vmf::MapFile {\n\n nodes, entities,\n\n scripts, instances\n\n } = vmf::parse_file(input);\n\n\n\n if scripts.is_empty() {\n\n return Ok(input.clone());\n\n }\n\n\n\n let vmf_dir = input.parent().ok_or_else(\n\n || Error::new(ErrorKind::InvalidInput, \"Not a directory\")\n\n )?;\n\n\n\n // Progressively fold each script on the map AST\n\n let entities = Arc::new(Mutex::new(\n\n scripts.into_iter()\n\n .map(|ent| {\n\n let ast = hct::parse_file(vmf_dir.join(&ent.script));\n\n (ent, ast)\n", "file_path": "lang/src/main.rs", "rank": 30, "score": 65185.71774086147 }, { "content": "fn print_call(fmt: &mut Terminal<Output=io::Stdout>, call: &Call) -> TermResult<()> {\n\n write_col!(fmt, Path(&call.path, true), \"(\")?;\n\n for (i, arg) in call.args.iter().enumerate() {\n\n print_expression(fmt, arg)?;\n\n if i < call.args.len() - 1 {\n\n write!(fmt, \", \")?;\n\n }\n\n }\n\n\n\n Ok(write!(fmt, \")\")?)\n\n}\n\n\n", "file_path": "lang/src/logging.rs", "rank": 31, "score": 62757.57159941229 }, { "content": "fn print_statement(fmt: &mut Terminal<Output=io::Stdout>, stmt: &Statement) -> TermResult<()> {\n\n use hct::ast::Statement::*;\n\n match *stmt {\n\n Auto { ref body } => {\n\n {\n\n let mut fmt = PadAdapterTerm::new(fmt);\n\n writeln_col!(&mut fmt, Keyword(\"auto\"), \" {\")?;\n\n for stmt in body {\n\n writeln_col!(&mut fmt, Statement(stmt))?;\n\n }\n\n }\n\n Ok(write!(fmt, \"}}\")?)\n\n },\n\n\n\n Relay { ref name, ref body } => {\n\n {\n\n let mut fmt = PadAdapterTerm::new(fmt);\n\n writeln_col!(&mut fmt, Keyword(\"relay\"), \" \", name, \" {\")?;\n\n for stmt in body {\n\n writeln_col!(&mut fmt, Statement(stmt))?;\n", "file_path": "lang/src/logging.rs", "rank": 32, "score": 62757.57159941229 }, { "content": "fn print_expression(fmt: &mut Terminal<Output=io::Stdout>, exp: &Expression) -> TermResult<()> {\n\n use hct::ast::Expression::*;\n\n match *exp {\n\n Call(ref call) => print_call(fmt, call),\n\n Reference(ref path) => print_path(fmt, path, false),\n\n Binary { ref lhs, ref op, ref rhs } => {\n\n write_col!(fmt, \"(\", Expression(lhs), \" \", op, \" \", Expression(rhs), \")\")\n\n },\n\n Literal(ref lit) => print_literal(fmt, lit),\n\n\n\n Array(ref items) => {\n\n {\n\n let mut fmt = PadAdapterTerm::new(fmt);\n\n writeln!(fmt, \"[\")?;\n\n for item in items {\n\n writeln_col!(&mut fmt, Expression(item), \",\")?;\n\n }\n\n }\n\n Ok(write!(fmt, \"]\")?)\n\n },\n", "file_path": "lang/src/logging.rs", "rank": 33, "score": 62757.57159941229 }, { "content": "fn print_literal(fmt: &mut Terminal<Output=io::Stdout>, lit: &Literal) -> TermResult<()> {\n\n match *lit {\n\n Literal::Number(ref val) => write_col!(fmt, Number(val)),\n\n Literal::String(ref val) => {\n\n write_col!(fmt, String(\"\\\"\"))?;\n\n for part in val {\n\n match *part {\n\n StringPart::Expression(ref e) => {\n\n write_col!(fmt, String(\"${\"))?;\n\n print_expression(fmt, e)?;\n\n write_col!(fmt, String(\"}\"))?;\n\n },\n\n StringPart::String(ref s) => write_col!(fmt, String(s))?,\n\n }\n\n }\n\n write_col!(fmt, String(\"\\\"\"))\n\n },\n\n }\n\n}\n\n\n", "file_path": "lang/src/logging.rs", "rank": 34, "score": 62757.57159941229 }, { "content": "fn optimize(module: LLVMModuleRef) {\n\n unsafe {\n\n let pm = LLVMCreatePassManager();\n\n\n\n let builder = LLVMPassManagerBuilderCreate();\n\n LLVMPassManagerBuilderSetOptLevel(builder, 2);\n\n LLVMPassManagerBuilderPopulateFunctionPassManager(builder, pm);\n\n LLVMPassManagerBuilderPopulateModulePassManager(builder, pm);\n\n LLVMPassManagerBuilderDispose(builder);\n\n\n\n LLVMRunPassManager(pm, module);\n\n }\n\n}\n\n\n", "file_path": "lang/src/compiler/mod.rs", "rank": 35, "score": 60988.99169115888 }, { "content": "fn module_to_string(module: LLVMModuleRef) -> String {\n\n unsafe {\n\n let c_str = LLVMPrintModuleToString(module);\n\n let val = CStr::from_ptr(c_str).to_string_lossy().into_owned();\n\n LLVMDisposeMessage(c_str);\n\n val\n\n }\n\n}\n\n\n", "file_path": "lang/src/compiler/mod.rs", "rank": 36, "score": 56945.85783389797 }, { "content": "type Op = (Operator, usize, PunctParser);\n\n\n\npub struct OpParser {\n\n op_parsers: Vec<(Operator, PunctParser)>,\n\n sorted_ops: Vec<(usize, Vec<Operator>)>,\n\n atom_parser: Parser,\n\n}\n\n\n\nimpl OpParser {\n\n pub fn new(atom_parser: Parser, operators: Vec<Op>) -> Self {\n\n let sorted_ops: Vec<(usize, Vec<Operator>)> = {\n\n operators.iter()\n\n .fold(\n\n Vec::new(),\n\n |mut list, &(op, precedence, _)| {\n\n match list.binary_search_by_key(&precedence, |&(p, _)| p) {\n\n Ok(i) => {\n\n let &mut (_, ref mut vec) = &mut list[i];\n\n vec.push(op);\n\n },\n", "file_path": "lang/src/hct/expression.rs", "rank": 37, "score": 45456.38741528653 }, { "content": "fn main() {\n\n let out_path = Path::new(&env::var(\"OUT_DIR\").unwrap()).join(\"atom.rs\");\n\n string_cache_codegen::AtomType::new(\"atom::Atom\", \"hct_atom!\")\n\n .atoms(\n\n NAMES.into_iter()\n\n .chain(\n\n FUNCTIONS.into_iter()\n\n )\n\n .map(|n| String::from(*n))\n\n .chain(\n\n GENERICS.into_iter()\n\n .flat_map(|n| {\n\n TYPES.into_iter()\n\n .map(|t| format!(\"{}.{}\", n, t))\n\n .collect::<Vec<_>>()\n\n })\n\n )\n\n )\n\n .write_to_file(&out_path)\n\n .unwrap();\n", "file_path": "lang/build.rs", "rank": 38, "score": 44421.40395512471 }, { "content": "fn main() {\n\n let args = {\n\n App::new(\"Hatchet Compiler\")\n\n .version(crate_version!())\n\n .author(crate_authors!(\"\\n\"))\n\n .about(crate_description!())\n\n .arg(\n\n Arg::with_name(\"INPUT\")\n\n .help(\"Input files to process\")\n\n .required(true)\n\n .multiple(true)\n\n )\n\n .arg(\n\n Arg::with_name(\"d\")\n\n .short(\"d\")\n\n .multiple(true)\n\n .help(\"Show debug informations\")\n\n )\n\n .arg(\n\n Arg::with_name(\"trace\")\n", "file_path": "lang/src/main.rs", "rank": 39, "score": 43002.81511710107 }, { "content": "type Stack<'a> = Vec<(&'a str, Either<Expression, Operator>)>;\n", "file_path": "lang/src/hct/expression.rs", "rank": 40, "score": 40786.57420589868 }, { "content": "fn expression(input: &str) -> IResult<&str, Expression> {\n\n EXPRESSION_PARSER.parse(input)\n\n}\n\n\n\nnamed!(\n\n block -> Vec<Statement>,\n\n delimited!(\n\n punct!(\"{\"),\n\n many0!(statement),\n\n punct!(\"}\")\n\n )\n\n);\n\n\n\nnamed!(\n\n auto -> Statement,\n\n do_parse!(\n\n keyword!(\"auto\") >>\n\n body: block >>\n\n (Statement::Auto { body })\n\n )\n", "file_path": "lang/src/hct/parser.rs", "rank": 41, "score": 33313.3417212805 }, { "content": "#[cfg_attr(feature=\"clippy\", allow(too_many_arguments))]\n\nfn iterator<EC, BP, PB, NV, LC>(\n\n builder: &mut Builder, scope: &Scope,\n\n start_val: ValueRef,\n\n entry_cmp: EC, build_phi: BP, print_body: PB, next_val: NV, loop_cmp: LC)\n\n where EC: FnOnce(&mut Builder) -> ValueRef,\n\n BP: FnOnce(&mut Builder) -> ValueRef,\n\n PB: FnOnce(&mut Builder, &Scope, &ValueRef),\n\n NV: FnOnce(&mut Builder, &ValueRef) -> ValueRef,\n\n LC: FnOnce(&mut Builder, &ValueRef) -> ValueRef {\n\n\n\n let prev_block = builder.get_insert_block();\n\n loop_(\n\n builder,\n\n entry_cmp,\n\n |builder| {\n\n let it = build_phi(builder);\n\n builder.add_incoming(&it, start_val, prev_block);\n\n\n\n print_body(builder, scope, &it);\n\n\n", "file_path": "lang/src/compiler/statements.rs", "rank": 42, "score": 33313.3417212805 }, { "content": "\n\n event: None,\n\n delay: None,\n\n }\n\n }\n\n\n\n fn store_binding(builder: &mut Builder, value: &ValueRef) -> (bool, ValueRef) {\n\n let ptr = builder.build_alloca(&value.ty);\n\n builder.build_store(value, &ptr);\n\n (false, ptr)\n\n }\n\n\n\n /// Creates a new scope with an additional value binding\n\n pub fn with_binding(&'a self, builder: &mut Builder, key: Atom, value: &ValueRef) -> Scope<'a> {\n\n Scope {\n\n parent: Some(self),\n\n bindings: {\n\n let mut res = HashMap::new();\n\n let value = Scope::store_binding(builder, value);\n\n res.insert(key, value);\n", "file_path": "lang/src/compiler/scope.rs", "rank": 43, "score": 30718.75777473216 }, { "content": " res\n\n },\n\n\n\n event: None,\n\n delay: None,\n\n }\n\n }\n\n\n\n /// Mutate this scope to add a new value binding\n\n pub fn set_binding(&mut self, builder: &mut Builder, key: Atom, value: &ValueRef) {\n\n let value = Scope::store_binding(builder, value);\n\n self.bindings.insert(key, value);\n\n }\n\n\n\n fn binding_ptr(&self, key: &Atom) -> Option<(bool, ValueRef)> {\n\n self.bindings.get(key).cloned()\n\n .or_else(|| {\n\n self.parent.and_then(|parent| parent.binding_ptr(key))\n\n })\n\n }\n", "file_path": "lang/src/compiler/scope.rs", "rank": 44, "score": 30716.04132707819 }, { "content": "use std::collections::HashMap;\n\n\n\nuse super::builder::*;\n\nuse super::types::*;\n\nuse atom::*;\n\n\n\n/// Represent a scope (group of bindings in a block) in a script\n\npub struct Scope<'a> {\n\n parent: Option<&'a Scope<'a>>,\n\n bindings: HashMap<Atom, (bool, ValueRef)>,\n\n\n\n event: Option<(ValueRef, ValueRef)>,\n\n delay: Option<ValueRef>,\n\n}\n\n\n\nimpl<'a> Scope<'a> {\n\n /// Creates the root scope for a script using a map's entity list\n\n pub fn root(builder: &mut Builder) -> Scope<'a> {\n\n let keys = builder.get_entities();\n\n Scope {\n", "file_path": "lang/src/compiler/scope.rs", "rank": 45, "score": 30715.288493682147 }, { "content": "\n\n /// Get the value of a binding\n\n pub fn binding(&self, builder: &mut Builder, key: &Atom) -> Option<ValueRef> {\n\n self.binding_ptr(key)\n\n .map(|(is_const, value)| {\n\n if is_const {\n\n value\n\n } else {\n\n builder.build_load(&value)\n\n }\n\n })\n\n }\n\n\n\n /// Get the mutable address of a binding\n\n pub fn binding_mut(&self, key: &Atom) -> Option<ValueRef> {\n\n self.binding_ptr(key)\n\n .and_then(|(is_const, value)| {\n\n if is_const {\n\n None\n\n } else {\n", "file_path": "lang/src/compiler/scope.rs", "rank": 46, "score": 30714.612411249924 }, { "content": "\n\n event: Some((entity, method)),\n\n delay: None,\n\n }\n\n }\n\n\n\n /// Creates a new scope for the execution of an inner delay block\n\n pub fn with_delay(&'a self, builder: &mut Builder, time: ValueRef) -> Scope<'a> {\n\n assert_eq!(time.ty, TypeId::f64, \"Delay is not a number\");\n\n\n\n Scope {\n\n parent: Some(self),\n\n bindings: Default::default(),\n\n\n\n event: None,\n\n delay: Some(\n\n self.delay.clone()\n\n .map_or(\n\n time.clone(),\n\n |val| builder.build_fadd(&val, &time),\n\n )\n\n ),\n\n }\n\n }\n\n}\n", "file_path": "lang/src/compiler/scope.rs", "rank": 47, "score": 30714.265503848463 }, { "content": " builder.build_const_entity(ent),\n\n builder.build_const_atom(hct_atom!(\"OnTrigger\")),\n\n )),\n\n delay: None,\n\n }\n\n }\n\n\n\n /// Creates a new scope for the execution of a subscriber block\n\n pub fn subscriber(&'a self, entity: ValueRef, method: ValueRef) -> Scope<'a> {\n\n if let Some((ref self_entity, ref self_method)) = self.event {\n\n warn!(\n\n \"Nested subscriber blocks: {:?}.{:?} -> {:?}.{:?}\",\n\n self_entity, self_method,\n\n entity, method,\n\n );\n\n }\n\n\n\n Scope {\n\n parent: Some(self),\n\n bindings: Default::default(),\n", "file_path": "lang/src/compiler/scope.rs", "rank": 49, "score": 30710.102279842013 }, { "content": " pub fn auto(&'a self, builder: &mut Builder) -> Scope<'a> {\n\n Scope {\n\n parent: Some(self),\n\n bindings: Default::default(),\n\n\n\n event: Some((\n\n builder.build_const_entity(hct_atom!(\"\")),\n\n builder.build_const_atom(hct_atom!(\"OnMapSpawn\")),\n\n )),\n\n delay: None,\n\n }\n\n }\n\n\n\n /// Creates a new scope for the execution of a relay block\n\n pub fn relay(&'a self, builder: &mut Builder, ent: &Atom) -> Scope<'a> {\n\n Scope {\n\n parent: Some(self),\n\n bindings: Default::default(),\n\n\n\n event: Some((\n", "file_path": "lang/src/compiler/scope.rs", "rank": 50, "score": 30709.899671073443 }, { "content": " parent: None,\n\n bindings: {\n\n keys.into_iter()\n\n .map(|k| {\n\n let val = builder.build_const_entity(&k);\n\n (k, (true, val))\n\n })\n\n .collect()\n\n },\n\n\n\n event: None,\n\n delay: None,\n\n }\n\n }\n\n\n\n /// Create a new child scope\n\n pub fn fork(&'a self) -> Scope<'a> {\n\n Scope {\n\n parent: Some(self),\n\n bindings: Default::default(),\n", "file_path": "lang/src/compiler/scope.rs", "rank": 51, "score": 30706.88695156091 }, { "content": " Some(value)\n\n }\n\n })\n\n }\n\n\n\n /// Get the event of this scope\n\n pub fn event(&self) -> Option<(ValueRef, ValueRef)> {\n\n self.event.clone().or_else(|| {\n\n self.parent.and_then(|parent| parent.event())\n\n })\n\n }\n\n\n\n /// Get the delay of this scope\n\n pub fn delay(&self) -> Option<ValueRef> {\n\n self.delay.clone().or_else(|| {\n\n self.parent.and_then(|parent| parent.delay())\n\n })\n\n }\n\n\n\n /// Creates a new scope for the execution of an auto block\n", "file_path": "lang/src/compiler/scope.rs", "rank": 52, "score": 30704.90723113007 }, { "content": "#[derive(Clone)]\n\npub struct ValueRef {\n\n pub ty: TypeId,\n\n pub ptr: Value,\n\n}\n\n\n\nimpl Debug for ValueRef {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result {\n\n use llvm_sys::core::*;\n\n\n\n let mut ty_ptr = None;\n\n let ty_str = if let TypeId::Other(ptr) = self.ty {\n\n unsafe {\n\n let ptr = LLVMPrintTypeToString(ptr);\n\n ty_ptr = Some(ptr);\n\n\n\n CStr::from_ptr(ptr).to_string_lossy().into_owned()\n\n }\n\n } else {\n\n format!(\"{:?}\", self.ty)\n", "file_path": "lang/src/compiler/types.rs", "rank": 55, "score": 30621.79986938807 }, { "content": "use std::ops::Deref;\n\nuse std::ffi::CStr;\n\nuse std::fmt::*;\n\n\n\nuse compiler::builder::{Type, Value};\n\nuse atom::Atom;\n\n\n\n#[allow(non_camel_case_types)]\n\n#[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd, Hash)]\n\npub enum TypeId {\n\n Void,\n\n f64,\n\n bool,\n\n i64,\n\n\n\n Context,\n\n Atom,\n\n Entity,\n\n String,\n\n\n", "file_path": "lang/src/compiler/types.rs", "rank": 56, "score": 30613.685229749797 }, { "content": " pub args: Vec<TypeId>,\n\n pub ret: TypeId,\n\n}\n\n\n\npub struct Arenas {\n\n pub atoms: Arena<Atom>,\n\n pub strings: Arena<String>,\n\n pub ent_vec: Arena<Vec<*const Atom>>\n\n}\n\n\n\nimpl Arenas {\n\n #[cfg_attr(feature=\"clippy\", allow(not_unsafe_ptr_arg_deref))]\n\n pub fn with_globals(module: *mut LLVMModule, engine: LLVMExecutionEngineRef, globals: HashMap<Global, LLVMValueRef>) -> Arenas {\n\n let atoms = Arena::new();\n\n let strings = Arena::new();\n\n\n\n let globals: HashMap<_, _> = {\n\n globals.into_iter()\n\n .map(|(k, v)| (v, k))\n\n .collect()\n", "file_path": "lang/src/runtime/types.rs", "rank": 57, "score": 30613.51828653787 }, { "content": " Array {\n\n len: u32,\n\n ty: Box<TypeId>,\n\n },\n\n Vec {\n\n ty: Box<TypeId>,\n\n },\n\n\n\n Object {\n\n items: Vec<(Atom, TypeId)>,\n\n },\n\n /* TODO: HashMap {\n\n key: TypeId,\n\n value: TypeId,\n\n },*/\n\n\n\n Other(Type),\n\n}\n\n\n\n/// Wrapper for LLVM values with type metadata\n", "file_path": "lang/src/compiler/types.rs", "rank": 58, "score": 30613.122769541347 }, { "content": " };\n\n\n\n let val_ptr = unsafe {\n\n LLVMPrintValueToString(self.ptr)\n\n };\n\n let val_str = unsafe {\n\n CStr::from_ptr(val_ptr).to_string_lossy().into_owned()\n\n };\n\n\n\n let res = {\n\n fmt.debug_struct(\"ValueRef\")\n\n .field(\"ty\", &ty_str)\n\n .field(\"ptr\", &val_str)\n\n .finish()\n\n };\n\n\n\n if let Some(ptr) = ty_ptr {\n\n unsafe {\n\n LLVMDisposeMessage(ptr);\n\n }\n", "file_path": "lang/src/compiler/types.rs", "rank": 59, "score": 30611.957849012182 }, { "content": "use std::collections::HashMap;\n\nuse std::fmt::*;\n\nuse libc::c_void;\n\n\n\nuse llvm_sys::LLVMModule;\n\nuse llvm_sys::execution_engine::*;\n\nuse llvm_sys::prelude::*;\n\nuse llvm_sys::core::*;\n\nuse rand::{Rng, SeedableRng, StdRng};\n\nuse typed_arena::Arena;\n\n\n\nuse compiler::builder::*;\n\nuse compiler::types::{TypeId, Global};\n\nuse vmf::ir::{Script, Entity};\n\nuse atom::Atom;\n\n\n\n/// Pointer and type metadata for a function\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Function {\n\n pub ptr: Value,\n", "file_path": "lang/src/runtime/types.rs", "rank": 60, "score": 30610.830339203338 }, { "content": " };\n\n\n\n let mut global = unsafe { LLVMGetFirstGlobal(module) };\n\n while !global.is_null() {\n\n let val = &globals[&global];\n\n trace!(\"Register global {:?}\", val);\n\n\n\n let val = match *val {\n\n Global::Atom(ref val) => atoms.alloc(val.clone()) as *mut _ as *mut c_void,\n\n Global::String(ref val) => strings.alloc(val.clone()) as *mut _ as *mut c_void,\n\n };\n\n\n\n unsafe {\n\n LLVMAddGlobalMapping(engine, global, val);\n\n }\n\n\n\n global = unsafe { LLVMGetNextGlobal(global) };\n\n }\n\n\n\n Arenas {\n", "file_path": "lang/src/runtime/types.rs", "rank": 61, "score": 30610.24168246545 }, { "content": " }\n\n unsafe {\n\n LLVMDisposeMessage(val_ptr);\n\n }\n\n\n\n res\n\n }\n\n}\n\n\n\n#[derive(Debug, Hash, PartialEq, Eq)]\n\npub enum Global {\n\n Atom(Atom),\n\n String(String),\n\n}\n\n\n", "file_path": "lang/src/compiler/types.rs", "rank": 62, "score": 30607.42677302068 }, { "content": " atoms, strings,\n\n ent_vec: Arena::new(),\n\n }\n\n }\n\n}\n\n\n\n/// Execution context of a script\n\npub struct Context {\n\n pub entities: HashMap<Atom, Entity>,\n\n pub arenas: Arenas,\n\n pub rng: Box<Rng>,\n\n}\n\n\n\nimpl Context {\n\n pub fn new(ent: Script, arenas: Arenas, entities: HashMap<Atom, Entity>) -> Context {\n\n Context {\n\n arenas, entities,\n\n rng: box StdRng::from_seed(&[ent.seed]),\n\n }\n\n }\n", "file_path": "lang/src/runtime/types.rs", "rank": 63, "score": 30602.65977765284 }, { "content": "}\n\n\n\nimpl Debug for Context {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result {\n\n fmt.debug_struct(\"Context\")\n\n .field(\"entities\", &self.entities)\n\n .finish()\n\n }\n\n}\n", "file_path": "lang/src/runtime/types.rs", "rank": 64, "score": 30598.639891517145 }, { "content": "\n\n unsafe {\n\n LLVMPositionBuilderAtEnd(self.builder, current_block);\n\n }\n\n\n\n ValueRef { ty: val.clone(), ptr }\n\n }\n\n pub fn build_load(&mut self, val: &ValueRef) -> ValueRef {\n\n let ptr = unsafe {\n\n LLVMBuildLoad(self.builder, val.ptr, EMPTY_STRING.as_ptr())\n\n };\n\n\n\n ValueRef { ty: val.ty.clone(), ptr }\n\n }\n\n pub fn build_store(&mut self, val: &ValueRef, ptr: &ValueRef) {\n\n unsafe {\n\n LLVMBuildStore(self.builder, val.ptr, ptr.ptr);\n\n }\n\n }\n\n pub fn build_in_bounds_gep<'a, I: IntoIterator<Item=&'a ValueRef>>(&mut self, obj: &ValueRef, indices: I) -> ValueRef {\n", "file_path": "lang/src/compiler/builder.rs", "rank": 65, "score": 30201.417048376927 }, { "content": " pub fn build_const_i64(&self, val: i64) -> ValueRef {\n\n ValueRef {\n\n ty: TypeId::i64,\n\n ptr: unsafe {\n\n LLVMConstInt(self.get_type(&TypeId::i64), val as u64, 1)\n\n },\n\n }\n\n }\n\n pub fn build_const_entity<A: Borrow<Atom>>(&mut self, val: A) -> ValueRef {\n\n self.global_ptr(TypeId::Entity, val)\n\n }\n\n pub fn build_const_atom<A: Borrow<Atom>>(&mut self, val: A) -> ValueRef {\n\n self.global_ptr(TypeId::Atom, val)\n\n }\n\n pub fn build_const_string<S: Borrow<String>>(&mut self, val: S) -> ValueRef {\n\n self.global_ptr(TypeId::String, val)\n\n }\n\n\n\n pub fn is_constant(&self, val: &ValueRef) -> bool {\n\n unsafe {\n", "file_path": "lang/src/compiler/builder.rs", "rank": 66, "score": 30197.58314388049 }, { "content": " pub fn build_cond_br(&mut self, cond: &ValueRef, cons: Block, alt: Block) {\n\n assert_eq!(cond.ty, TypeId::bool);\n\n unsafe {\n\n LLVMBuildCondBr(self.builder, cond.ptr, cons, alt);\n\n }\n\n }\n\n\n\n pub fn build_phi(&mut self, ty: TypeId) -> ValueRef {\n\n let llvm_ty = self.get_type(&ty);\n\n let ptr = unsafe {\n\n LLVMBuildPhi(self.builder, llvm_ty, EMPTY_STRING.as_ptr())\n\n };\n\n ValueRef { ty, ptr }\n\n }\n\n pub fn add_incoming(&self, phi: &ValueRef, mut val: ValueRef, mut block: Block) {\n\n unsafe {\n\n LLVMAddIncoming(\n\n phi.ptr,\n\n &mut val.ptr,\n\n &mut block,\n", "file_path": "lang/src/compiler/builder.rs", "rank": 67, "score": 30196.683103531996 }, { "content": " .collect()\n\n }\n\n\n\n pub fn build_const_f64(&self, val: f64) -> ValueRef {\n\n ValueRef {\n\n ty: TypeId::f64,\n\n ptr: unsafe {\n\n LLVMConstReal(self.get_type(&TypeId::f64), val)\n\n },\n\n }\n\n }\n\n pub fn build_const_i32(&self, val: i32) -> ValueRef {\n\n let ty = unsafe { LLVMInt32TypeInContext(self.context) };\n\n ValueRef {\n\n ty: TypeId::Other(ty),\n\n ptr: unsafe {\n\n LLVMConstInt(ty, val as u64, 1)\n\n },\n\n }\n\n }\n", "file_path": "lang/src/compiler/builder.rs", "rank": 68, "score": 30195.687467744978 }, { "content": " let mut indices: Vec<_> = {\n\n indices.into_iter()\n\n .map(|i| i.ptr)\n\n .collect()\n\n };\n\n\n\n let ptr = unsafe {\n\n LLVMBuildInBoundsGEP(\n\n self.builder,\n\n obj.ptr,\n\n indices.as_mut_ptr(),\n\n indices.len() as u32,\n\n EMPTY_STRING.as_ptr()\n\n )\n\n };\n\n\n\n ValueRef { ty: obj.ty.clone(), ptr }\n\n }\n\n pub fn build_insert_value(&mut self, container: &ValueRef, elem: &ValueRef, index: u32) -> ValueRef {\n\n let ptr = unsafe {\n", "file_path": "lang/src/compiler/builder.rs", "rank": 69, "score": 30195.621971787285 }, { "content": " ($name:ident<f64, $pred:ident>( $( $args:ident ),* ) ) => {\n\n pub fn $name(&mut self, $( $args: &ValueRef ),* ) -> ValueRef {\n\n ValueRef {\n\n ty: TypeId::bool,\n\n ptr: unsafe {\n\n LLVMBuildFCmp(self.builder, LLVMRealPredicate::$pred, $( $args.ptr, )* EMPTY_STRING.as_ptr())\n\n },\n\n }\n\n }\n\n };\n\n ($name:ident<i64, $pred:ident>( $( $args:ident ),* ) ) => {\n\n pub fn $name(&mut self, $( $args: &ValueRef ),* ) -> ValueRef {\n\n ValueRef {\n\n ty: TypeId::bool,\n\n ptr: unsafe {\n\n LLVMBuildICmp(self.builder, LLVMIntPredicate::$pred, $( $args.ptr, )* EMPTY_STRING.as_ptr())\n\n },\n\n }\n\n }\n\n };\n", "file_path": "lang/src/compiler/builder.rs", "rank": 70, "score": 30195.32344546645 }, { "content": " LLVMBuildRetVoid(self.builder);\n\n LLVMDisposeBuilder(self.builder);\n\n }\n\n\n\n (\n\n ModuleHolder {\n\n context: self.context,\n\n ptr: self.module,\n\n },\n\n self.entities,\n\n self.globals,\n\n self.externals,\n\n )\n\n }\n\n\n\n fn global_ptr<S, T>(&mut self, ty: TypeId, val: S) -> ValueRef where S: Borrow<T>, T: ToOwned + Deref<Target=str>, Global: From<<T as ToOwned>::Owned> {\n\n let cmp_ty = self.get_type(&ty);\n\n let &mut Builder { ref mut globals, module, .. } = self;\n\n\n\n let val = val.borrow();\n", "file_path": "lang/src/compiler/builder.rs", "rank": 71, "score": 30194.248956085565 }, { "content": " LLVMDisposeBuilder(self.builder);\n\n }\n\n }\n\n}*/\n\n\n\nstatic EMPTY_STRING: [i8; 1] = [0];\n\n\n\nmacro_rules! builder_forward {\n\n ($name:ident ( $( $args:ident ),* ) -> $ret:tt = $op:ident ) => {\n\n pub fn $name(&mut self, $( $args: &ValueRef ),* ) -> ValueRef {\n\n ValueRef {\n\n ty: TypeId::$ret,\n\n ptr: unsafe {\n\n $op(self.builder, $( $args.ptr, )* EMPTY_STRING.as_ptr())\n\n },\n\n }\n\n }\n\n };\n\n}\n\nmacro_rules! builder_cmp {\n", "file_path": "lang/src/compiler/builder.rs", "rank": 72, "score": 30193.825943169435 }, { "content": " let llvm_ty = self.get_type(&ty);\n\n let c_name = CString::new(name).unwrap();\n\n let ptr = unsafe {\n\n LLVMAddFunction(self.module, c_name.as_ptr(), llvm_ty)\n\n };\n\n\n\n ValueRef { ty, ptr }\n\n }\n\n pub fn add_function_attribute(&self, func: &ValueRef, attr: u32) {\n\n unsafe {\n\n let attr = LLVMCreateEnumAttribute(self.context, attr, 0);\n\n LLVMAddAttributeAtIndex(func.ptr, LLVMAttributeFunctionIndex, attr);\n\n }\n\n }\n\n\n\n pub fn get_external(&mut self, name: Atom) -> Function {\n\n self.externals.get_function(self, name)\n\n }\n\n pub fn get_runtime_context(&self) -> ValueRef {\n\n ValueRef {\n", "file_path": "lang/src/compiler/builder.rs", "rank": 73, "score": 30193.696399166758 }, { "content": " let glob = Global::from(val.to_owned());\n\n let &mut ptr = globals.entry(glob).or_insert_with(|| {\n\n let name = CString::new(global_name(&ty, val)).unwrap();\n\n unsafe {\n\n LLVMAddGlobalInAddressSpace(\n\n module,\n\n LLVMGetElementType(cmp_ty),\n\n name.as_ptr(),\n\n 0\n\n )\n\n }\n\n });\n\n\n\n ValueRef { ty, ptr }\n\n }\n\n}\n\n\n\n/*impl Drop for Builder {\n\n fn drop(&mut self) {\n\n unsafe {\n", "file_path": "lang/src/compiler/builder.rs", "rank": 74, "score": 30193.692511202888 }, { "content": " ty: TypeId::Context,\n\n ptr: unsafe {\n\n LLVMGetParam(self.func, 0)\n\n },\n\n }\n\n }\n\n pub fn build_call(&mut self, func: Function, args: Vec<ValueRef>) -> ValueRef {\n\n let mut args: Vec<_> = {\n\n args.into_iter()\n\n .map(|val| val.ptr)\n\n .collect()\n\n };\n\n\n\n let ptr = unsafe {\n\n LLVMBuildCall(\n\n self.builder,\n\n func.ptr,\n\n args.as_mut_ptr(),\n\n args.len() as u32,\n\n EMPTY_STRING.as_ptr()\n", "file_path": "lang/src/compiler/builder.rs", "rank": 75, "score": 30193.267162748285 }, { "content": " LLVMIsConstant(val.ptr) == 1\n\n }\n\n }\n\n pub fn get_const_f64(&self, val: &ValueRef) -> Option<f64> {\n\n if val.ty == TypeId::f64 && self.is_constant(val) {\n\n let mut precision_lost = 0;\n\n let res = unsafe {\n\n LLVMConstRealGetDouble(val.ptr, &mut precision_lost)\n\n };\n\n\n\n if precision_lost == 0 {\n\n Some(res)\n\n } else {\n\n None\n\n }\n\n } else {\n\n None\n\n }\n\n }\n\n\n", "file_path": "lang/src/compiler/builder.rs", "rank": 76, "score": 30193.13322999163 }, { "content": " }\n\n }\n\n\n\n pub fn get_function_type<'a, A: IntoIterator<Item=&'a TypeId>>(&self, ret: TypeId, args: A) -> TypeId {\n\n let mut args: Vec<_> = {\n\n args.into_iter()\n\n .map(|ty| self.get_type(ty))\n\n .collect()\n\n };\n\n\n\n TypeId::Other(unsafe {\n\n LLVMFunctionType(\n\n self.get_type(&ret),\n\n args.as_mut_ptr(),\n\n args.len() as u32,\n\n 0\n\n )\n\n })\n\n }\n\n pub fn add_function(&self, ty: TypeId, name: &str) -> ValueRef {\n", "file_path": "lang/src/compiler/builder.rs", "rank": 77, "score": 30192.11169362922 }, { "content": " 1,\n\n );\n\n }\n\n }\n\n\n\n pub fn build_alloca(&mut self, val: &TypeId) -> ValueRef {\n\n let current_block = unsafe {\n\n LLVMGetInsertBlock(self.builder)\n\n };\n\n\n\n unsafe {\n\n let alloca_block = LLVMGetEntryBasicBlock(self.func);\n\n let term = LLVMGetBasicBlockTerminator(alloca_block);\n\n LLVMPositionBuilderBefore(self.builder, term);\n\n }\n\n\n\n let ty = self.get_type(val);\n\n let ptr = unsafe {\n\n LLVMBuildAlloca(self.builder, ty, EMPTY_STRING.as_ptr())\n\n };\n", "file_path": "lang/src/compiler/builder.rs", "rank": 78, "score": 30191.279714993827 }, { "content": "}\n\n\n\nimpl Builder {\n\n pub fn get_type(&self, ty: &TypeId) -> Type {\n\n match *ty {\n\n TypeId::Context | TypeId::Entity |\n\n TypeId::Atom | TypeId::String |\n\n TypeId::Vec { .. } => unsafe {\n\n LLVMPointerType(LLVMInt8TypeInContext(self.context), 0)\n\n },\n\n TypeId::Array { ref len, ref ty } => unsafe {\n\n LLVMPointerType(\n\n LLVMArrayType(\n\n self.get_type(ty),\n\n *len as u32,\n\n ),\n\n 0,\n\n )\n\n },\n\n TypeId::Object { ref items } => {\n", "file_path": "lang/src/compiler/builder.rs", "rank": 79, "score": 30191.12548132451 }, { "content": " )\n\n };\n\n\n\n ValueRef { ty: func.ret, ptr }\n\n }\n\n\n\n pub fn add_entity(&mut self, name: Atom, ent: Entity) {\n\n self.entities.insert(name, ent);\n\n }\n\n pub fn add_auto_entity(&mut self) {\n\n self.entities\n\n .entry(hct_atom!(\"\"))\n\n .or_insert(Entity {\n\n classname: hct_atom!(\"logic_auto\"),\n\n .. Default::default()\n\n });\n\n }\n\n pub fn get_entities(&self) -> Vec<Atom> {\n\n self.entities.keys()\n\n .cloned()\n", "file_path": "lang/src/compiler/builder.rs", "rank": 80, "score": 30191.002732714656 }, { "content": "use std::collections::HashMap;\n\nuse std::borrow::Borrow;\n\nuse std::ops::Deref;\n\nuse std::ffi::CString;\n\n\n\nuse llvm_sys::*;\n\nuse llvm_sys::core::*;\n\nuse llvm_sys::prelude::*;\n\n\n\nuse atom::Atom;\n\nuse compiler::types::*;\n\nuse runtime::stl::Externals;\n\nuse runtime::types::Function;\n\nuse vmf::ir::Entity;\n\n\n\npub type Type = LLVMTypeRef;\n\npub type Value = LLVMValueRef;\n\npub type Block = LLVMBasicBlockRef;\n\n\n\n#[derive(Debug)]\n", "file_path": "lang/src/compiler/builder.rs", "rank": 81, "score": 30189.334443559677 }, { "content": "pub struct Builder {\n\n context: LLVMContextRef,\n\n module: LLVMModuleRef,\n\n func: Value,\n\n builder: LLVMBuilderRef,\n\n entities: HashMap<Atom, Entity>,\n\n globals: HashMap<Global, Value>,\n\n externals: Externals,\n\n}\n\n\n\npub struct ModuleHolder {\n\n context: LLVMContextRef,\n\n pub ptr: LLVMModuleRef,\n\n}\n\n\n\nimpl Drop for ModuleHolder {\n\n fn drop(&mut self) {\n\n unsafe {\n\n LLVMContextDispose(self.context);\n\n }\n", "file_path": "lang/src/compiler/builder.rs", "rank": 82, "score": 30188.355799568464 }, { "content": " let mut items: Vec<_> = {\n\n items.iter()\n\n .map(|&(_, ref ty)| self.get_type(ty))\n\n .collect()\n\n };\n\n\n\n unsafe {\n\n LLVMPointerType(\n\n LLVMStructTypeInContext(\n\n self.context,\n\n items.as_mut_ptr(),\n\n items.len() as _,\n\n 0,\n\n ),\n\n 0,\n\n )\n\n }\n\n },\n\n TypeId::f64 => unsafe { LLVMDoubleTypeInContext(self.context) },\n\n TypeId::bool => unsafe { LLVMInt1TypeInContext(self.context) },\n", "file_path": "lang/src/compiler/builder.rs", "rank": 83, "score": 30187.697757972757 }, { "content": " TypeId::i64 => unsafe { LLVMInt64TypeInContext(self.context) },\n\n TypeId::Void => unsafe { LLVMVoidTypeInContext(self.context) },\n\n TypeId::Other(ty) => ty,\n\n }\n\n }\n\n pub fn get_element_type(&self, ty: &TypeId) -> TypeId {\n\n TypeId::Other(unsafe {\n\n LLVMGetElementType(\n\n self.get_type(ty)\n\n )\n\n })\n\n }\n\n pub fn build_undef(&self, ty: &TypeId) -> ValueRef {\n\n ValueRef {\n\n ty: ty.clone(),\n\n ptr: unsafe {\n\n LLVMGetUndef(\n\n self.get_type(ty)\n\n )\n\n },\n", "file_path": "lang/src/compiler/builder.rs", "rank": 84, "score": 30187.558388779762 }, { "content": " }\n\n}\n\n\n\npub type BuilderResult = (\n\n ModuleHolder,\n\n HashMap<Atom, Entity>,\n\n HashMap<Global, Value>,\n\n Externals,\n\n);\n\n\n\nimpl Builder {\n\n pub fn new(name: &str, entities: HashMap<Atom, Entity>) -> Builder {\n\n let context = unsafe {\n\n LLVMContextCreate()\n\n };\n\n\n\n let module = {\n\n let mod_name = CString::new(name).unwrap();\n\n unsafe {\n\n LLVMModuleCreateWithNameInContext(mod_name.as_ptr(), context)\n", "file_path": "lang/src/compiler/builder.rs", "rank": 85, "score": 30186.22813348217 }, { "content": " }\n\n };\n\n\n\n let builder = unsafe {\n\n LLVMCreateBuilderInContext(context)\n\n };\n\n\n\n let main = {\n\n let i8_ty = unsafe { LLVMInt8TypeInContext(context) };\n\n let mut params = vec![unsafe {\n\n LLVMPointerType(\n\n i8_ty,\n\n 0\n\n )\n\n }];\n\n let main_type = unsafe {\n\n LLVMFunctionType(\n\n LLVMVoidTypeInContext(context),\n\n params.as_mut_ptr(),\n\n params.len() as u32,\n", "file_path": "lang/src/compiler/builder.rs", "rank": 86, "score": 30184.678143648685 }, { "content": " pub fn append_basic_block(&mut self) -> Block {\n\n unsafe {\n\n LLVMAppendBasicBlockInContext(self.context, self.func, EMPTY_STRING.as_ptr())\n\n }\n\n }\n\n pub fn get_insert_block(&self) -> Block {\n\n unsafe {\n\n LLVMGetInsertBlock(self.builder)\n\n }\n\n }\n\n pub fn position_at_end(&mut self, block: Block) {\n\n unsafe {\n\n LLVMPositionBuilderAtEnd(self.builder, block);\n\n }\n\n }\n\n pub fn build_br(&mut self, block: Block) {\n\n unsafe {\n\n LLVMBuildBr(self.builder, block);\n\n }\n\n }\n", "file_path": "lang/src/compiler/builder.rs", "rank": 87, "score": 30182.028908649012 }, { "content": " LLVMBuildInsertValue(\n\n self.builder,\n\n container.ptr,\n\n elem.ptr,\n\n index,\n\n EMPTY_STRING.as_ptr()\n\n )\n\n };\n\n\n\n ValueRef { ty: container.ty.clone(), ptr }\n\n }\n\n\n\n builder_forward!{ build_fadd(rhs, lhs) -> f64 = LLVMBuildFAdd }\n\n builder_forward!{ build_fsub(rhs, lhs) -> f64 = LLVMBuildFSub }\n\n builder_forward!{ build_fmul(rhs, lhs) -> f64 = LLVMBuildFMul }\n\n builder_forward!{ build_fdiv(rhs, lhs) -> f64 = LLVMBuildFDiv }\n\n builder_forward!{ build_frem(rhs, lhs) -> f64 = LLVMBuildFRem }\n\n\n\n builder_forward!{ build_nswadd(rhs, lhs) -> i64 = LLVMBuildNSWAdd }\n\n builder_forward!{ build_shl(rhs, lhs) -> i64 = LLVMBuildShl }\n", "file_path": "lang/src/compiler/builder.rs", "rank": 88, "score": 30181.84384011771 }, { "content": " 0\n\n )\n\n };\n\n\n\n let func_name = CString::new(\"main\").unwrap();\n\n unsafe {\n\n LLVMAddFunction(module, func_name.as_ptr(), main_type)\n\n }\n\n };\n\n\n\n let entry_name = CString::new(\"entry\").unwrap();\n\n let entry_block = unsafe {\n\n LLVMAppendBasicBlockInContext(context, main, entry_name.as_ptr())\n\n };\n\n\n\n let start_name = CString::new(\"start\").unwrap();\n\n let start_block = unsafe {\n\n LLVMAppendBasicBlockInContext(context, main, start_name.as_ptr())\n\n };\n\n\n", "file_path": "lang/src/compiler/builder.rs", "rank": 89, "score": 30179.057939861737 }, { "content": " unsafe {\n\n LLVMPositionBuilderAtEnd(builder, entry_block);\n\n LLVMBuildBr(builder, start_block);\n\n LLVMPositionBuilderAtEnd(builder, start_block)\n\n }\n\n\n\n Builder {\n\n context,\n\n module,\n\n builder,\n\n func: main,\n\n\n\n entities,\n\n globals: Default::default(),\n\n externals: Externals::new(),\n\n }\n\n }\n\n\n\n pub fn finalize(self) -> BuilderResult {\n\n unsafe {\n", "file_path": "lang/src/compiler/builder.rs", "rank": 90, "score": 30176.65839214192 }, { "content": " builder_forward!{ build_lshr(rhs, lhs) -> i64 = LLVMBuildLShr }\n\n\n\n builder_cmp!{ build_float_lt<f64, LLVMRealOLT>(rhs, lhs) }\n\n builder_cmp!{ build_float_le<f64, LLVMRealOLE>(rhs, lhs) }\n\n builder_cmp!{ build_float_gt<f64, LLVMRealOGT>(rhs, lhs) }\n\n builder_cmp!{ build_float_ge<f64, LLVMRealOGE>(rhs, lhs) }\n\n builder_cmp!{ build_float_eq<f64, LLVMRealOEQ>(rhs, lhs) }\n\n builder_cmp!{ build_float_ne<f64, LLVMRealONE>(rhs, lhs) }\n\n\n\n builder_forward!{ build_and(rhs, lhs) -> bool = LLVMBuildAnd }\n\n builder_forward!{ build_or(rhs, lhs) -> bool = LLVMBuildOr }\n\n builder_forward!{ build_xor(rhs, lhs) -> bool = LLVMBuildXor }\n\n builder_forward!{ build_not(val) -> bool = LLVMBuildNot }\n\n\n\n builder_cmp!{ build_int_lt<i64, LLVMIntULT>(rhs, lhs) }\n\n builder_cmp!{ build_int_eq<i64, LLVMIntEQ>(rhs, lhs) }\n\n builder_cmp!{ build_int_ne<i64, LLVMIntNE>(rhs, lhs) }\n\n}", "file_path": "lang/src/compiler/builder.rs", "rank": 91, "score": 30175.00731192932 }, { "content": "enum ColorType {\n\n Number,\n\n String,\n\n Function,\n\n Name,\n\n Keyword,\n\n}\n\n\n", "file_path": "lang/src/logging.rs", "rank": 92, "score": 29276.70935099769 }, { "content": " let gep = builder.build_in_bounds_gep(\n\n &obj,\n\n vec![\n\n &zero,\n\n &idx,\n\n ],\n\n );\n\n\n\n let res = builder.build_load(&gep);\n\n ValueRef { ty: ty.clone(), ptr: res.ptr }\n\n },\n\n\n\n &TypeId::Entity => {\n\n let prop = builder.build_const_atom(prop);\n\n call_stl(\n\n builder,\n\n hct_atom!(\"get_property\"),\n\n vec![ &obj, &prop ],\n\n )\n\n },\n", "file_path": "lang/src/compiler/expression.rs", "rank": 93, "score": 32.39902446477873 }, { "content": " let ty = TypeId::Object { items };\n\n\n\n let obj_elem = builder.get_element_type(&ty);\n\n let init = builder.build_undef(&obj_elem);\n\n\n\n let ptr = {\n\n values.into_iter()\n\n .enumerate()\n\n .fold(init, |prev, (i, val)| {\n\n builder.build_insert_value(\n\n &prev, &val, i as _,\n\n )\n\n })\n\n };\n\n\n\n let mut alloc = builder.build_alloca(&obj_elem);\n\n alloc.ty = ty;\n\n\n\n builder.build_store(&ptr, &alloc);\n\n\n", "file_path": "lang/src/compiler/expression.rs", "rank": 94, "score": 27.389431451936538 }, { "content": " let value = expression(value, &scope, builder);\n\n let mut prop = unwind_path(prop);\n\n\n\n let head = prop.pop_front().unwrap();\n\n let head = {\n\n scope.binding_mut(&head)\n\n .expect(&format!(\"binding \\\"{}\\\" not found\", head))\n\n };\n\n\n\n match head.ty {\n\n _ if prop.is_empty() => {\n\n assert_eq!(head.ty, value.ty);\n\n builder.build_store(&value, &head);\n\n },\n\n\n\n TypeId::Entity => {\n\n let value = if value.ty == TypeId::String {\n\n value\n\n } else if value.ty == TypeId::f64 {\n\n call_stl(\n", "file_path": "lang/src/compiler/statements.rs", "rank": 95, "score": 26.802949529930025 }, { "content": " val.into_iter()\n\n .fold(None, |prev, item| {\n\n let item = match *item {\n\n StringPart::String(ref s) => builder.build_const_string(s),\n\n StringPart::Expression(ref e) => {\n\n let exp = expression(e.clone(), scope, builder);\n\n match exp.ty {\n\n TypeId::String => exp,\n\n TypeId::f64 => call_stl(\n\n builder,\n\n hct_atom!(\"to_string\"),\n\n vec![ &exp ],\n\n ),\n\n _ => unimplemented!(),\n\n }\n\n },\n\n };\n\n\n\n Some(if let Some(prev) = prev {\n\n call_stl(\n", "file_path": "lang/src/compiler/expression.rs", "rank": 96, "score": 23.774048098956943 }, { "content": " rhs: Box<Expression>,\n\n },\n\n\n\n Literal(Literal),\n\n}\n\n\n\n/// Defines a path to an entity or method\n\n#[derive(Clone, Debug, Eq, PartialEq, Hash)]\n\npub enum Path {\n\n Deref(Box<Path>, Atom),\n\n Instance(Box<Path>),\n\n Binding(Atom),\n\n}\n\n\n\nimpl Display for Path {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n\n match *self {\n\n Path::Deref(ref obj, ref prop) => {\n\n if let Path::Instance(_) = **obj {\n\n write!(fmt, \"{}{}\", obj, prop)\n", "file_path": "lang/src/hct/ast.rs", "rank": 97, "score": 23.68510252323799 }, { "content": " builder, &scope,\n\n start_val.clone(),\n\n |builder| {\n\n builder.build_int_lt(\n\n &start_val,\n\n &end_val,\n\n )\n\n },\n\n |builder| {\n\n builder.build_phi(\n\n TypeId::i64,\n\n )\n\n },\n\n |builder, scope, it| {\n\n let value = {\n\n let zero = builder.build_const_i32(0);\n\n let gep = builder.build_in_bounds_gep(\n\n &array,\n\n vec![ &zero, it ],\n\n );\n", "file_path": "lang/src/compiler/statements.rs", "rank": 98, "score": 23.630580638461126 }, { "content": "\n\n builder.build_store(&ptr, &alloc);\n\n\n\n alloc\n\n },\n\n Expression::Map(val) => {\n\n let mut data: Vec<_> = {\n\n val.into_iter()\n\n .map(|(k, v)| {\n\n let val = expression(v, scope, builder);\n\n ((k.clone(), val.ty.clone()), val)\n\n })\n\n .collect()\n\n };\n\n\n\n data.sort_by(|&((ref a, _), _), &((ref b, _), _)| a.cmp(b));\n\n let (items, values): (Vec<_>, Vec<_>) = {\n\n data.into_iter().unzip()\n\n };\n\n\n", "file_path": "lang/src/compiler/expression.rs", "rank": 99, "score": 22.563168260857786 } ]
Rust
src/day21.rs
codedstructure/aoc2021
27e151e4c8cbcda78b29fe734df6733818783461
use std::collections::HashMap; #[derive(Default, Debug, Clone)] struct DetDie { state: i32, roll_count: i32, } impl Iterator for DetDie { type Item = i32; fn next(&mut self) -> Option<i32> { self.roll_count += 1; let value = self.state + 1; self.state = (self.state + 1) % 100; Some(value) } } struct Player { position: i32, score: i32, } impl Player { fn new(start: i32) -> Self { Self { position: (start - 1) % 10, score: 0, } } fn advance(&mut self, amount: i32) { self.position = (self.position + amount) % 10; self.score += self.position + 1; } } pub fn step1() { let mut dd: DetDie = Default::default(); let dd = dd.by_ref(); let mut p1 = Player::new(8); let mut p2 = Player::new(6); let losing_score = loop { p1.advance(dd.take(3).sum()); if p1.score >= 1000 { break p2.score; } p2.advance(dd.take(3).sum()); if p2.score >= 1000 { break p1.score; } }; println!("Final result: {}", dd.roll_count * losing_score); } fn run_game( remain: i32, pos: i32, ways: i128, throw_count: i32, throw_way_map: &mut HashMap<i32, i128>, ) -> i128 { let roll_dist: HashMap<i32, i128> = HashMap::from_iter([(3, 1), (4, 3), (5, 6), (6, 7), (7, 6), (8, 3), (9, 1)]); if remain > 0 { let mut new_ways = 0; for roll_sum in 3..=9 { let possibilities = roll_dist.get(&roll_sum).unwrap(); let new_pos = ((pos - 1) + roll_sum) % 10 + 1; new_ways += run_game( remain - new_pos, new_pos, ways * possibilities, throw_count + 1, throw_way_map, ); } return new_ways; } *throw_way_map.entry(throw_count).or_insert(0) += ways; ways } pub fn step2() { let mut p1_throw_ways = HashMap::new(); let p1_complete = run_game(21, 8, 1, 0, &mut p1_throw_ways); println!("p1: {}", p1_complete); println!("{:?}", p1_throw_ways); let mut p2_throw_ways = HashMap::new(); let p2_complete = run_game(21, 6, 1, 0, &mut p2_throw_ways); println!("p2: {}", p2_complete); println!("{:?}", p2_throw_ways); let mut p1_win_count = 0; let mut p2_win_count = 0; let mut total_universes_p1 = 1; let mut total_universes_p2 = 1; for round in 1..=10 { total_universes_p1 *= 27; if let Some(p1_wins_this_throw) = p1_throw_ways.get(&round) { total_universes_p1 -= p1_wins_this_throw; p1_win_count += p1_wins_this_throw * total_universes_p2; } total_universes_p2 *= 27; if let Some(p2_wins_this_throw) = p2_throw_ways.get(&round) { total_universes_p2 -= p2_wins_this_throw; p2_win_count += p2_wins_this_throw * total_universes_p1; } println!( "round {:2}: universes p1: {:8}, p2: {:8}", round, total_universes_p1, total_universes_p2 ); } println!("p1 win universes: {}", p1_win_count); println!("p2 win universes: {}", p2_win_count); }
use std::collections::HashMap; #[derive(Default, Debug, Clone)] struct DetDie { state: i32, roll_count: i32, } impl Iterator for DetDie { type Item = i32; fn next(&mut self) -> Option<i32> { self.roll_count += 1; let value = self.state + 1; self.state = (self.state + 1) % 100; Some(value) } } struct Player { position: i32, score: i32, } impl Player { fn new(start: i32) -> Self { Self { position: (start - 1) % 10, score: 0, } } fn advance(&mut self, amount: i32) { self.position = (self.position + amount) % 10; self.score += self.position + 1; } } pub fn step1() { let mut dd: DetDie = Default::default(); let dd = dd.by_ref(); let mut p1 = Player::new(8); let mut p2 = Player::new(6); let losing_score = loop { p1.advance(dd.take(3).sum()); if p1.score >= 1000 { break p2.score; } p2.advance(dd.take(3).sum()); if p2.score >= 1000 { break p1.score; } }; println!("Final result: {}", dd.roll_count * losing_score); } fn run_game( remain: i32, pos: i32, ways: i128, throw_count: i32, throw_way_map: &mut HashMap<i32
throw_count + 1, throw_way_map, ); } return new_ways; } *throw_way_map.entry(throw_count).or_insert(0) += ways; ways } pub fn step2() { let mut p1_throw_ways = HashMap::new(); let p1_complete = run_game(21, 8, 1, 0, &mut p1_throw_ways); println!("p1: {}", p1_complete); println!("{:?}", p1_throw_ways); let mut p2_throw_ways = HashMap::new(); let p2_complete = run_game(21, 6, 1, 0, &mut p2_throw_ways); println!("p2: {}", p2_complete); println!("{:?}", p2_throw_ways); let mut p1_win_count = 0; let mut p2_win_count = 0; let mut total_universes_p1 = 1; let mut total_universes_p2 = 1; for round in 1..=10 { total_universes_p1 *= 27; if let Some(p1_wins_this_throw) = p1_throw_ways.get(&round) { total_universes_p1 -= p1_wins_this_throw; p1_win_count += p1_wins_this_throw * total_universes_p2; } total_universes_p2 *= 27; if let Some(p2_wins_this_throw) = p2_throw_ways.get(&round) { total_universes_p2 -= p2_wins_this_throw; p2_win_count += p2_wins_this_throw * total_universes_p1; } println!( "round {:2}: universes p1: {:8}, p2: {:8}", round, total_universes_p1, total_universes_p2 ); } println!("p1 win universes: {}", p1_win_count); println!("p2 win universes: {}", p2_win_count); }
, i128>, ) -> i128 { let roll_dist: HashMap<i32, i128> = HashMap::from_iter([(3, 1), (4, 3), (5, 6), (6, 7), (7, 6), (8, 3), (9, 1)]); if remain > 0 { let mut new_ways = 0; for roll_sum in 3..=9 { let possibilities = roll_dist.get(&roll_sum).unwrap(); let new_pos = ((pos - 1) + roll_sum) % 10 + 1; new_ways += run_game( remain - new_pos, new_pos, ways * possibilities,
function_block-random_span
[ { "content": "pub fn step1() {\n\n let hm = HeightMap::new(\"inputs/day09.txt\");\n\n\n\n println!(\"{}\", hm.risk_level());\n\n}\n\n\n", "file_path": "src/day09.rs", "rank": 0, "score": 153179.67823020153 }, { "content": "pub fn step1() {\n\n let mut game = Game::new(\"inputs/day04.txt\");\n\n\n\n println!(\"Winning score: {}\", game.play());\n\n}\n\n\n", "file_path": "src/day04.rs", "rank": 1, "score": 153179.67823020153 }, { "content": "pub fn step1() {\n\n let cg = CaveGraph::new(\"inputs/day12.txt\");\n\n\n\n cg.count_paths();\n\n}\n\n\n", "file_path": "src/day12.rs", "rank": 2, "score": 153179.67823020153 }, { "content": "pub fn step1() {\n\n let mut gamma = 0;\n\n let mut epsilon = 0;\n\n let mut one_count = [0; 12];\n\n let mut zero_count = [0; 12];\n\n for reading in read_list(\"inputs/day03.txt\") {\n\n for idx in 0..12 {\n\n match reading.as_str().chars().nth(idx) {\n\n Some('1') => one_count[idx] += 1,\n\n Some('0') => zero_count[idx] += 1,\n\n _ => ()\n\n }\n\n }\n\n }\n\n println!(\"one_count: {:?}\", one_count);\n\n println!(\"zero_count: {:?}\", zero_count);\n\n for idx in 0..12 {\n\n let bit = 11 - idx;\n\n if one_count[idx] > zero_count[idx] {\n\n gamma |= 1 << bit;\n\n } else {\n\n epsilon |= 1 << bit;\n\n }\n\n }\n\n println!(\"gamma: {:?}\", gamma);\n\n println!(\"epsilon: {:?}\", epsilon);\n\n println!(\"epsilon * gamma = {}\", gamma * epsilon);\n\n}\n\n\n", "file_path": "src/day03.rs", "rank": 4, "score": 153179.67823020153 }, { "content": "pub fn step1() {\n\n let mut polymer = Polymer::new(\"inputs/day14.txt\");\n\n\n\n for _ in 0..10 {\n\n polymer.polymerize();\n\n }\n\n println!(\n\n \"{}\",\n\n polymer.most_common_count() - polymer.least_common_count()\n\n );\n\n}\n\n\n", "file_path": "src/day14.rs", "rank": 5, "score": 153179.67823020153 }, { "content": "pub fn step1() {\n\n let mut sim = LanternSim::new(\"inputs/day06.txt\");\n\n\n\n for _ in 0..80 {\n\n sim.step();\n\n }\n\n\n\n println!(\"Fish: {}\", sim.total_fish());\n\n}\n\n\n", "file_path": "src/day06.rs", "rank": 6, "score": 153179.67823020153 }, { "content": "pub fn step1() {\n\n let mut result: Option<Sfn> = None;\n\n\n\n for line in read_list(\"inputs/day18.txt\") {\n\n let next_sfn = Sfn::from_str(&line);\n\n if let Some(sfn) = result {\n\n result = Some(sfn.add(next_sfn).reduce());\n\n } else {\n\n result = Some(next_sfn);\n\n }\n\n }\n\n\n\n // 3359\n\n println!(\"Magnitude: {}\", result.unwrap().magnitude());\n\n}\n\n\n", "file_path": "src/day18.rs", "rank": 7, "score": 153179.67823020153 }, { "content": "pub fn step1() {\n\n let grid = Grid::new(\"inputs/day05.txt\", true);\n\n\n\n println!(\"Orthogonal danger points: {}\", grid.count_danger_points());\n\n}\n\n\n", "file_path": "src/day05.rs", "rank": 8, "score": 153179.67823020153 }, { "content": "pub fn step1() {\n\n let mut om = OctoMap::new(\"inputs/day11.txt\");\n\n\n\n for _ in 0..100 {\n\n om.step();\n\n }\n\n\n\n println!(\"{}\", om.flash_count);\n\n}\n\n\n", "file_path": "src/day11.rs", "rank": 9, "score": 153179.67823020153 }, { "content": "pub fn step1() {\n\n let nav = NavSystem::new(\"inputs/day10.txt\");\n\n\n\n println!(\"{}\", nav.syntax_error_score());\n\n}\n\n\n", "file_path": "src/day10.rs", "rank": 10, "score": 153179.67823020153 }, { "content": "pub fn step1() {\n\n let mut reactor = Reactor::new(\"inputs/day22.txt\");\n\n\n\n reactor.evaluate(false);\n\n // 561032\n\n println!(\n\n \"total_volume (init only) {}\",\n\n reactor.regions.total_volume()\n\n );\n\n}\n\n\n", "file_path": "src/day22.rs", "rank": 11, "score": 153179.67823020153 }, { "content": "pub fn step1() {\n\n let sm = ScannerMap::new(\"inputs/day19.txt\");\n\n\n\n let (fixed, _) = sm.build_map();\n\n\n\n let mut beacons_fixed = HashSet::new();\n\n for smf in fixed.values() {\n\n for bf in &smf.beacons {\n\n beacons_fixed.insert(bf);\n\n }\n\n }\n\n println!(\"Beacon count: {}\", beacons_fixed.len());\n\n}\n\n\n", "file_path": "src/day19.rs", "rank": 12, "score": 153179.67823020153 }, { "content": "pub fn step1() {\n\n let mut im = Image::new(\"inputs/day20.txt\");\n\n\n\n im = im.enhance();\n\n im = im.enhance();\n\n println!(\"Lit pixels {}\", im.count_lit());\n\n}\n\n\n", "file_path": "src/day20.rs", "rank": 13, "score": 153179.67823020153 }, { "content": "pub fn step1() {\n\n let mut pd = PaperDots::new(\"inputs/day13.txt\");\n\n\n\n let x = pd.instructions.get(0).unwrap();\n\n // Rust question - what's the 'deep-copy' equivalent for a Tuple?\n\n // or should I have pulled the tuple out to a struct deriving Clone?\n\n let x: (String, i32) = (x.0.clone(), x.1);\n\n pd.fold_step(&x);\n\n\n\n println!(\"{}\", pd.dots.len());\n\n}\n\n\n", "file_path": "src/day13.rs", "rank": 14, "score": 153179.67823020153 }, { "content": "pub fn step1() {\n\n let rm = RiskMaze::new(\"inputs/day15.txt\");\n\n // 602\n\n println!(\"{}\", rm.bellman_ford());\n\n}\n\n\n", "file_path": "src/day15.rs", "rank": 15, "score": 153179.67823020153 }, { "content": "pub fn step1() {\n\n let mut count = 0;\n\n for entry in read_list(\"inputs/day08.txt\") {\n\n let mut entry_parts = entry.split(\" | \");\n\n let _controls = entry_parts.next().unwrap();\n\n let outputs = entry_parts.next().unwrap();\n\n for out in outputs.split(' ') {\n\n match out.len() {\n\n 2 | 3 | 4 | 7 => count += 1,\n\n _ => (),\n\n }\n\n }\n\n }\n\n println!(\"Count: {}\", count);\n\n}\n\n\n", "file_path": "src/day08.rs", "rank": 16, "score": 153179.67823020153 }, { "content": "pub fn step1() {\n\n let mut xpos = 0;\n\n let mut depth = 0;\n\n for instr in read_list(\"inputs/day02.txt\") {\n\n let mut instr_iter = instr.split_whitespace();\n\n let movement = instr_iter.next().unwrap();\n\n let amount: i32 = instr_iter.next().unwrap().parse().unwrap();\n\n match movement {\n\n \"up\" => depth -= amount,\n\n \"down\" => depth += amount,\n\n \"forward\" => xpos += amount,\n\n _ => ()\n\n }\n\n }\n\n println!(\"xpos * depth = {}\", xpos * depth);\n\n}\n\n\n", "file_path": "src/day02.rs", "rank": 17, "score": 153179.67823020153 }, { "content": "pub fn step1() {\n\n let mut probe = Probe::new(\"inputs/day17.txt\");\n\n\n\n // 5151\n\n println!(\"Max height: {}\", probe.search());\n\n}\n\n\n", "file_path": "src/day17.rs", "rank": 18, "score": 153179.67823020153 }, { "content": "pub fn step1() {\n\n let mut pr = PacketReader::new(read_hex_chars(\"inputs/day16.txt\"));\n\n\n\n let _ = pr.read_packet();\n\n // 967\n\n println!(\"Total version: {}\", pr.total_ver);\n\n}\n\n\n", "file_path": "src/day16.rs", "rank": 19, "score": 153179.67823020153 }, { "content": "pub fn step1() {\n\n let subs = CrabSumSwarm::new(\"inputs/day07.txt\");\n\n\n\n println!(\"Min Cost: {}\", subs.min_cost_naive(false));\n\n}\n\n\n", "file_path": "src/day07.rs", "rank": 20, "score": 153179.67823020153 }, { "content": "pub fn step1() {\n\n let mut count = 0;\n\n let mut current = None;\n\n for value in read_int_list(\"inputs/day01.txt\").unwrap() {\n\n if let Some(old_val) = current {\n\n if old_val < value {\n\n count += 1;\n\n }\n\n }\n\n current = Some(value);\n\n }\n\n println!(\"Increment count: {}\", count);\n\n}\n\n\n", "file_path": "src/day01.rs", "rank": 21, "score": 153179.67823020153 }, { "content": "pub fn read_int_list(filename: &str) -> Result<Vec<i32>, Error> {\n\n let f = File::open(filename)?;\n\n Ok(BufReader::new(f)\n\n .lines()\n\n .map(|l| l.expect(\"Err\"))\n\n .map(|l| l.parse::<i32>().unwrap())\n\n .collect())\n\n}\n\n\n", "file_path": "src/day01.rs", "rank": 22, "score": 138692.8294122426 }, { "content": "#[derive(Debug, Hash, PartialEq, Eq, Clone)]\n\nstruct Delta(i32, i32, i32);\n\n\n\nimpl From<Vec<i32>> for Delta {\n\n fn from(item: Vec<i32>) -> Self {\n\n assert!(item.len() == 3);\n\n Delta(item[0], item[1], item[2])\n\n }\n\n}\n\n\n\nimpl Delta {\n\n fn translate(&self, other: &Self) -> Self {\n\n Self(self.0 + other.0, self.1 + other.1, self.2 + other.2)\n\n }\n\n\n\n fn delta(&self, other: &Self) -> Self {\n\n Self(other.0 - self.0, other.1 - self.1, other.2 - self.2)\n\n }\n\n\n\n fn rotate(&self, dir: i32) -> Self {\n\n match dir {\n", "file_path": "src/day19.rs", "rank": 23, "score": 120661.67689436127 }, { "content": "pub fn read_csv_ints(filename: &str) -> Vec<i32> {\n\n let f = File::open(filename).expect(\"Could not read file\");\n\n let mut line = String::new();\n\n BufReader::new(f).read_line(&mut line).unwrap();\n\n // parse() breaks on line ending, so need to trim that...\n\n line.retain(|c| !c.is_whitespace());\n\n line.split(',').map(|x| x.parse::<i32>().unwrap()).collect()\n\n}\n\n\n", "file_path": "src/day07.rs", "rank": 25, "score": 113493.17719398694 }, { "content": "pub fn step2() {\n\n let hm = HeightMap::new(\"inputs/day09.txt\");\n\n\n\n println!(\"{}\", hm.biggest_basin_mult());\n\n}\n", "file_path": "src/day09.rs", "rank": 26, "score": 99588.71722442278 }, { "content": "pub fn step2() {\n\n let mut count = 0;\n\n let mut previous = 0;\n\n let mut current;\n\n let mut window: Vec<i32> = vec![];\n\n for value in read_int_list(\"inputs/day01.txt\").unwrap() {\n\n if window.len() < 3 {\n\n window.push(value);\n\n continue;\n\n } else {\n\n window.remove(0); // fine for a 3-element list\n\n window.push(value);\n\n }\n\n assert!(window.len() == 3);\n\n current = window.iter().sum();\n\n if current > previous {\n\n count += 1;\n\n }\n\n previous = current;\n\n }\n\n count -= 1; // because we shouldn't count the first 0->anything transition\n\n println!(\"Windowed Increment count: {}\", count);\n\n}\n", "file_path": "src/day01.rs", "rank": 27, "score": 99588.71722442278 }, { "content": "pub fn step2() {\n\n let mut om = OctoMap::new(\"inputs/day11.txt\");\n\n let mut counter = 1; // pesky off-by-one errors...\n\n loop {\n\n if om.step() {\n\n break;\n\n }\n\n counter += 1;\n\n }\n\n\n\n println!(\"{}\", counter);\n\n}\n", "file_path": "src/day11.rs", "rank": 28, "score": 99588.71722442278 }, { "content": "pub fn step2() {\n\n let subs = CrabSumSwarm::new(\"inputs/day07.txt\");\n\n\n\n println!(\"Min Cost - new calc: {}\", subs.min_cost_naive(true));\n\n}\n", "file_path": "src/day07.rs", "rank": 29, "score": 99588.71722442278 }, { "content": "pub fn step2() {\n\n let mut polymer = EfficientPolymer::new(\"inputs/day14.txt\");\n\n\n\n for _ in 0..40 {\n\n polymer.polymerize();\n\n }\n\n println!(\n\n \"{}\",\n\n polymer.most_common_count() - polymer.least_common_count()\n\n );\n\n}\n", "file_path": "src/day14.rs", "rank": 30, "score": 99588.71722442278 }, { "content": "pub fn step2() {\n\n let mut im = Image::new(\"inputs/day20.txt\");\n\n\n\n for _ in 0..50 {\n\n im = im.enhance();\n\n }\n\n println!(\"Lit pixels {}\", im.count_lit());\n\n}\n", "file_path": "src/day20.rs", "rank": 31, "score": 99588.71722442278 }, { "content": "pub fn step2() {\n\n let grid = Grid::new(\"inputs/day05.txt\", false);\n\n\n\n println!(\"All danger points: {}\", grid.count_danger_points());\n\n}\n", "file_path": "src/day05.rs", "rank": 33, "score": 99588.71722442278 }, { "content": "pub fn step2() {\n\n let mut probe = Probe::new(\"inputs/day17.txt\");\n\n\n\n // 968\n\n println!(\"Count: {}\", probe.count_good());\n\n}\n", "file_path": "src/day17.rs", "rank": 34, "score": 99588.71722442278 }, { "content": "pub fn step2() {\n\n let mut max_mag = 0;\n\n for line1 in read_list(\"inputs/day18.txt\") {\n\n for line2 in read_list(\"inputs/day18.txt\") {\n\n if line1 == line2 {\n\n continue;\n\n }\n\n let mag = Sfn::from_str(&line1)\n\n .add(Sfn::from_str(&line2))\n\n .reduce()\n\n .magnitude();\n\n if mag > max_mag {\n\n max_mag = mag;\n\n }\n\n }\n\n }\n\n // 4616\n\n println!(\"Max Magnitude: {}\", max_mag);\n\n}\n\n\n", "file_path": "src/day18.rs", "rank": 35, "score": 99588.71722442278 }, { "content": "pub fn step2() {\n\n let mut xpos = 0;\n\n let mut depth = 0;\n\n let mut aim = 0;\n\n for instr in read_list(\"inputs/day02.txt\") {\n\n let mut instr_iter = instr.split_whitespace();\n\n let movement = instr_iter.next().unwrap();\n\n let amount: i32 = instr_iter.next().unwrap().parse().unwrap();\n\n match movement {\n\n \"up\" => aim -= amount,\n\n \"down\" => aim += amount,\n\n \"forward\" => {\n\n xpos += amount;\n\n depth += aim * amount;\n\n }\n\n _ => ()\n\n }\n\n }\n\n println!(\"xpos * depth = {}\", xpos * depth);\n\n}\n", "file_path": "src/day02.rs", "rank": 36, "score": 99588.71722442278 }, { "content": "pub fn step2() {\n\n let mut rm = RiskMaze::new(\"inputs/day15.txt\");\n\n rm.expand();\n\n // 2935\n\n println!(\"{}\", rm.bellman_ford());\n\n}\n", "file_path": "src/day15.rs", "rank": 37, "score": 99588.71722442278 }, { "content": "pub fn step2() {\n\n let mut game = Game::new(\"inputs/day04.txt\");\n\n\n\n println!(\"Losing score: {}\", game.play_to_lose());\n\n}\n", "file_path": "src/day04.rs", "rank": 38, "score": 99588.71722442278 }, { "content": "pub fn step2() {\n\n let mut sim = LanternSim::new(\"inputs/day06.txt\");\n\n\n\n for _ in 0..256 {\n\n sim.step();\n\n }\n\n\n\n println!(\"Fish: {}\", sim.total_fish());\n\n}\n", "file_path": "src/day06.rs", "rank": 39, "score": 99588.71722442278 }, { "content": "pub fn step2() {\n\n let mut pd = PaperDots::new(\"inputs/day13.txt\");\n\n\n\n pd.fold_all();\n\n pd.draw();\n\n}\n", "file_path": "src/day13.rs", "rank": 40, "score": 99588.71722442278 }, { "content": "pub fn step2() {\n\n let mut total = 0;\n\n\n\n assert!(\n\n 5353 == decode_entry(\n\n \"acedgfb cdfbe gcdfa fbcad dab cefabd cdfgeb eafb cagedb ab | cdfeb fcadb cdfeb cdbaf\"\n\n )\n\n );\n\n\n\n for entry in read_list(\"inputs/day08.txt\") {\n\n total += decode_entry(&entry);\n\n }\n\n println!(\"Total: {}\", total);\n\n}\n", "file_path": "src/day08.rs", "rank": 41, "score": 99588.71722442278 }, { "content": "pub fn step2() {\n\n let mut reactor = Reactor::new(\"inputs/day22.txt\");\n\n\n\n reactor.evaluate(true);\n\n // 1322825263376414\n\n println!(\"total_volume {}\", reactor.regions.total_volume());\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_from_str() {\n\n let i = Instruction::from_str(\"on x=1..10,y=11..20,z=-21..30\");\n\n assert_eq!(i.r.x, (1, 10));\n\n assert_eq!(i.r.y, (11, 20));\n\n assert_eq!(i.r.z, (-21, 30));\n\n assert_eq!(i.on, true);\n\n }\n", "file_path": "src/day22.rs", "rank": 42, "score": 99588.71722442278 }, { "content": "pub fn step2() {\n\n let mut o2_rating = 0;\n\n let mut co2_rating = 0;\n\n\n\n // Part 2\n\n let mut o2_readings: Vec<Vec<_>> = read_list(\"inputs/day03.txt\").iter()\n\n .map(|l| l.as_str().chars().collect())\n\n .collect();\n\n\n\n //let mut co2_readings = o2_readings.clone();\n\n let mut co2_readings: Vec<Vec<_>> = read_list(\"inputs/day03.txt\").iter()\n\n .map(|l| l.as_str().chars().collect())\n\n .collect();\n\n\n\n let mut test_pos = 0;\n\n while o2_readings.len() > 1 && test_pos < 12 {\n\n // annoyance: why aren't vectors their own iterators?\n\n // e.g. iter/collect would be nice if inferred...\n\n //o2_readings = o2_readings.iter().filter(|r| criteria(r[test_pos]));\n\n //\n", "file_path": "src/day03.rs", "rank": 43, "score": 99588.71722442278 }, { "content": "pub fn step2() {\n\n // yes it is silly rebuilding this again for part2 since it's slow,\n\n // but I'm going for consistency of the 'framework'... :)\n\n let sm = ScannerMap::new(\"inputs/day19.txt\");\n\n\n\n let (_, offsets) = sm.build_map();\n\n\n\n let mut max_distance = 0;\n\n for s1 in &offsets {\n\n for s2 in &offsets {\n\n if s1 == s2 {\n\n continue;\n\n }\n\n let distance = s1.mdist(s2);\n\n if distance > max_distance {\n\n max_distance = distance;\n\n }\n\n }\n\n }\n\n println!(\"Max distance: {}\", max_distance);\n\n}\n", "file_path": "src/day19.rs", "rank": 44, "score": 99588.71722442278 }, { "content": "pub fn step2() {\n\n let cg = CaveGraph::new(\"inputs/day12.txt\");\n\n\n\n cg.count_paths_alt();\n\n}\n", "file_path": "src/day12.rs", "rank": 45, "score": 99588.71722442278 }, { "content": "pub fn step2() {\n\n let nav = NavSystem::new(\"inputs/day10.txt\");\n\n\n\n println!(\"{}\", nav.autocomplete_score());\n\n}\n", "file_path": "src/day10.rs", "rank": 46, "score": 99588.71722442278 }, { "content": "pub fn step2() {\n\n let mut pr = PacketReader::new(read_hex_chars(\"inputs/day16.txt\"));\n\n\n\n // 12883091136209\n\n println!(\"Final result: {}\", pr.read_packet());\n\n}\n", "file_path": "src/day16.rs", "rank": 47, "score": 99588.71722442278 }, { "content": "struct BitIter {\n\n input: Box<dyn Iterator<Item = char>>,\n\n current_char: Option<u32>,\n\n nibble_offset: i32,\n\n}\n\n\n\nimpl BitIter {\n\n fn new(input: Vec<char>) -> Self {\n\n Self {\n\n input: Box::new(input.into_iter()),\n\n current_char: None,\n\n nibble_offset: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl Iterator for BitIter {\n\n type Item = i32;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n", "file_path": "src/day16.rs", "rank": 48, "score": 84351.98739851985 }, { "content": "pub fn read_list(filename: &str) -> Vec<String> {\n\n let f = File::open(filename).expect(\"Could not read file\");\n\n BufReader::new(f).lines().map(|l| l.expect(\"Err\")).collect()\n\n}\n\n\n", "file_path": "src/day04.rs", "rank": 49, "score": 74938.88600004507 }, { "content": "pub fn read_list(filename: &str) -> Vec<String> {\n\n let f = File::open(filename).expect(\"Could not read file\");\n\n BufReader::new(f).lines().map(|l| l.expect(\"Err\")).collect()\n\n}\n\n\n", "file_path": "src/day18.rs", "rank": 50, "score": 74938.88600004507 }, { "content": "pub fn read_list(filename: &str) -> Vec<String> {\n\n let f = File::open(filename).expect(\"Could not read file\");\n\n BufReader::new(f).lines().map(|l| l.expect(\"Err\")).collect()\n\n}\n\n\n", "file_path": "src/day10.rs", "rank": 51, "score": 74938.88600004507 }, { "content": "pub fn read_list(filename: &str) -> Vec<String> {\n\n let f = File::open(filename).expect(\"Could not read file\");\n\n BufReader::new(f)\n\n .lines()\n\n .map(|l| l.expect(\"Err\"))\n\n .collect()\n\n}\n\n\n", "file_path": "src/day02.rs", "rank": 52, "score": 74938.88600004507 }, { "content": "pub fn read_list(filename: &str) -> Vec<String> {\n\n let f = File::open(filename).expect(\"Could not read file\");\n\n BufReader::new(f).lines().map(|l| l.expect(\"Err\")).collect()\n\n}\n\n\n", "file_path": "src/day14.rs", "rank": 53, "score": 74938.88600004507 }, { "content": "pub fn read_list(filename: &str) -> Vec<String> {\n\n let f = File::open(filename).expect(\"Could not read file\");\n\n BufReader::new(f).lines().map(|l| l.expect(\"Err\")).collect()\n\n}\n\n\n", "file_path": "src/day12.rs", "rank": 54, "score": 74938.88600004507 }, { "content": "pub fn read_list(filename: &str) -> Vec<String> {\n\n let f = File::open(filename).expect(\"Could not read file\");\n\n BufReader::new(f).lines().map(|l| l.expect(\"Err\")).collect()\n\n}\n\n\n", "file_path": "src/day22.rs", "rank": 55, "score": 74938.88600004507 }, { "content": "pub fn read_list(filename: &str) -> Vec<String> {\n\n let f = File::open(filename).expect(\"Could not read file\");\n\n BufReader::new(f).lines().map(|l| l.expect(\"Err\")).collect()\n\n}\n\n\n", "file_path": "src/day20.rs", "rank": 56, "score": 74938.88600004507 }, { "content": "pub fn read_list(filename: &str) -> Vec<String> {\n\n let f = File::open(filename).expect(\"Could not read file\");\n\n BufReader::new(f).lines().map(|l| l.expect(\"Err\")).collect()\n\n}\n\n\n", "file_path": "src/day17.rs", "rank": 57, "score": 74938.88600004507 }, { "content": "pub fn read_list(filename: &str) -> Vec<String> {\n\n let f = File::open(filename).expect(\"Could not read file\");\n\n BufReader::new(f).lines().map(|l| l.expect(\"Err\")).collect()\n\n}\n\n\n", "file_path": "src/day11.rs", "rank": 58, "score": 74938.88600004507 }, { "content": "pub fn read_list(filename: &str) -> Vec<String> {\n\n let f = File::open(filename).expect(\"Could not read file\");\n\n BufReader::new(f).lines().map(|l| l.expect(\"Err\")).collect()\n\n}\n\n\n", "file_path": "src/day08.rs", "rank": 59, "score": 74938.88600004507 }, { "content": "pub fn read_list(filename: &str) -> Vec<String> {\n\n let f = File::open(filename).expect(\"Could not read file\");\n\n BufReader::new(f).lines().map(|l| l.expect(\"Err\")).collect()\n\n}\n\n\n", "file_path": "src/day19.rs", "rank": 60, "score": 74938.88600004507 }, { "content": "pub fn read_list(filename: &str) -> Vec<String> {\n\n let f = File::open(filename).expect(\"Could not read file\");\n\n BufReader::new(f)\n\n .lines()\n\n .map(|l| l.expect(\"Err\"))\n\n .collect()\n\n}\n\n\n", "file_path": "src/day03.rs", "rank": 61, "score": 74938.88600004507 }, { "content": "pub fn read_list(filename: &str) -> Vec<String> {\n\n let f = File::open(filename).expect(\"Could not read file\");\n\n BufReader::new(f).lines().map(|l| l.expect(\"Err\")).collect()\n\n}\n\n\n", "file_path": "src/day13.rs", "rank": 62, "score": 74938.88600004507 }, { "content": "pub fn read_list(filename: &str) -> Vec<String> {\n\n let f = File::open(filename).expect(\"Could not read file\");\n\n BufReader::new(f).lines().map(|l| l.expect(\"Err\")).collect()\n\n}\n\n\n", "file_path": "src/day05.rs", "rank": 63, "score": 74938.88600004507 }, { "content": "pub fn read_list(filename: &str) -> Vec<String> {\n\n let f = File::open(filename).expect(\"Could not read file\");\n\n BufReader::new(f).lines().map(|l| l.expect(\"Err\")).collect()\n\n}\n\n\n", "file_path": "src/day15.rs", "rank": 64, "score": 74938.88600004507 }, { "content": "pub fn read_list(filename: &str) -> Vec<String> {\n\n let f = File::open(filename).expect(\"Could not read file\");\n\n BufReader::new(f).lines().map(|l| l.expect(\"Err\")).collect()\n\n}\n\n\n", "file_path": "src/day09.rs", "rank": 65, "score": 74938.88600004507 }, { "content": "pub fn read_hex_chars(filename: &str) -> Vec<char> {\n\n let f = File::open(filename).expect(\"Could not read file\");\n\n let mut line = String::new();\n\n BufReader::new(f).read_line(&mut line).unwrap();\n\n line.chars().collect()\n\n}\n\n\n", "file_path": "src/day16.rs", "rank": 66, "score": 73224.49535561775 }, { "content": "pub fn read_csv_ints(filename: &str) -> Vec<usize> {\n\n let f = File::open(filename).expect(\"Could not read file\");\n\n let mut line = String::new();\n\n BufReader::new(f).read_line(&mut line).unwrap();\n\n // parse() breaks on line ending, so need to trim that...\n\n line.retain(|c| !c.is_whitespace());\n\n line.split(',').map(|x| {x.parse::<usize>().unwrap()}).collect()\n\n}\n\n\n", "file_path": "src/day06.rs", "rank": 67, "score": 73224.49535561775 }, { "content": "fn digitize(value: &str) -> u8 {\n\n let mut result = 0;\n\n for ch in value.chars() {\n\n match ch {\n\n 'a' => result |= 1,\n\n 'b' => result |= 2,\n\n 'c' => result |= 4,\n\n 'd' => result |= 8,\n\n 'e' => result |= 16,\n\n 'f' => result |= 32,\n\n 'g' => result |= 64,\n\n _ => unimplemented!(\"invalid char\"),\n\n }\n\n }\n\n result\n\n}\n\n\n", "file_path": "src/day08.rs", "rank": 68, "score": 68704.98235411724 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Image {\n\n pixels: Vec<Vec<u8>>,\n\n algorithm: Vec<u8>,\n\n background: u8,\n\n line_width: usize,\n\n}\n\n\n\nimpl Image {\n\n fn new(filename: &str) -> Self {\n\n let mut pixels = vec![];\n\n let mut algorithm = vec![];\n\n let background = 0u8;\n\n let mut line_width = 0;\n\n\n\n for line in read_list(filename) {\n\n if line.is_empty() {\n\n continue;\n\n }\n\n if algorithm.is_empty() {\n\n algorithm = line\n", "file_path": "src/day20.rs", "rank": 69, "score": 57535.25288501772 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nstruct Line {\n\n start: Point,\n\n end: Point,\n\n\n\n dx: i32,\n\n dy: i32,\n\n}\n\n\n\nimpl Line {\n\n fn from_line(l: &str) -> Self {\n\n let mut line_iter = l.split(\" -> \");\n\n let mut first = line_iter\n\n .next()\n\n .unwrap()\n\n .split(',')\n\n .map(|x| x.parse::<i32>().unwrap());\n\n let mut second = line_iter\n\n .next()\n\n .unwrap()\n\n .split(',')\n", "file_path": "src/day05.rs", "rank": 70, "score": 57535.042081826905 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nstruct Instruction {\n\n r: Region,\n\n on: bool,\n\n}\n\n\n\nimpl Instruction {\n\n fn from_str(s: &str) -> Self {\n\n let mut words = s.split_whitespace();\n\n let on = match words.next() {\n\n Some(\"on\") => true,\n\n Some(\"off\") => false,\n\n _ => panic!(\"Invalid region state\"),\n\n };\n\n let r = Region::from_str(words.next().unwrap());\n\n Instruction { r, on }\n\n }\n\n}\n\n\n", "file_path": "src/day22.rs", "rank": 71, "score": 57535.042081826905 }, { "content": "#[derive(Debug, Default, PartialEq, Clone)]\n\nstruct Scanner {\n\n beacons: HashSet<Delta>,\n\n\n\n ident: i32,\n\n}\n\n\n\nimpl Scanner {\n\n fn new(lines: Vec<String>, ident: i32) -> Self {\n\n let mut beacons = HashSet::new();\n\n for beacon in lines {\n\n let d: Delta = beacon\n\n .split(',')\n\n .map(|x| x.parse::<i32>().unwrap())\n\n .collect::<Vec<i32>>()\n\n .into();\n\n\n\n beacons.insert(d);\n\n }\n\n Self { beacons, ident }\n\n }\n", "file_path": "src/day19.rs", "rank": 72, "score": 57534.656631365026 }, { "content": "#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]\n\nstruct Region {\n\n x: (i64, i64),\n\n y: (i64, i64),\n\n z: (i64, i64),\n\n}\n\n\n\nimpl Region {\n\n fn from_str(s: &str) -> Self {\n\n let mut extents = s.split(',');\n\n let x = extents.next().unwrap().strip_prefix(\"x=\").unwrap();\n\n let y = extents.next().unwrap().strip_prefix(\"y=\").unwrap();\n\n let z = extents.next().unwrap().strip_prefix(\"z=\").unwrap();\n\n\n\n let pairify = |v: Vec<i64>| {\n\n let mut vi = v.iter();\n\n (*vi.next().unwrap(), *vi.next().unwrap())\n\n };\n\n Region {\n\n x: pairify(x.split(\"..\").map(|v| v.parse::<i64>().unwrap()).collect()),\n\n y: pairify(y.split(\"..\").map(|v| v.parse::<i64>().unwrap()).collect()),\n", "file_path": "src/day22.rs", "rank": 73, "score": 57534.31287110738 }, { "content": "#[derive(Debug, PartialEq, Clone, Copy, Hash, Eq)]\n\nstruct Point {\n\n pub x: i32,\n\n pub y: i32,\n\n}\n\n\n\nimpl Point {\n\n fn new(x: i32, y: i32) -> Self {\n\n Self { x, y }\n\n }\n\n}\n\n\n", "file_path": "src/day05.rs", "rank": 74, "score": 57534.31287110738 }, { "content": "#[derive(Debug)]\n\nstruct Reactor {\n\n instructions: Vec<Instruction>,\n\n\n\n regions: RegionSet,\n\n}\n\n\n\nimpl Reactor {\n\n fn new(filename: &str) -> Self {\n\n let mut instructions = vec![];\n\n for line in read_list(filename) {\n\n let instr = Instruction::from_str(&line);\n\n instructions.push(instr);\n\n }\n\n\n\n let regions = RegionSet::new();\n\n\n\n Self {\n\n instructions,\n\n regions,\n\n }\n", "file_path": "src/day22.rs", "rank": 75, "score": 57531.88132514665 }, { "content": "#[derive(Debug)]\n\nstruct Polymer {\n\n template: String,\n\n rules: HashMap<String, String>,\n\n}\n\n\n\nimpl Polymer {\n\n fn new(filename: &str) -> Self {\n\n let mut lines = read_list(filename).into_iter();\n\n\n\n let template = lines.next().unwrap();\n\n lines.next(); // skip blank line\n\n\n\n let mut rules = HashMap::new();\n\n for line in lines {\n\n let mut rule = line.split(\" -> \");\n\n let before = rule.next().unwrap();\n\n let after = rule.next().unwrap();\n\n\n\n // Rust: seems there's a choice between noisy 'to_string()'\n\n // everywhere or noisy lifetimes everywhere?\n", "file_path": "src/day14.rs", "rank": 76, "score": 57531.88132514665 }, { "content": "#[derive(Debug)]\n\nstruct Grid {\n\n lines: Vec<Line>,\n\n}\n\n\n\nimpl Grid {\n\n fn new(filename: &str, orthogonal: bool) -> Self {\n\n let mut lines = vec![];\n\n for line in read_list(filename) {\n\n let candidate = Line::from_line(&line);\n\n if !orthogonal || (candidate.is_horizontal() || candidate.is_vertical()) {\n\n lines.push(candidate);\n\n }\n\n }\n\n\n\n Self { lines }\n\n }\n\n\n\n fn count_danger_points(&self) -> i32 {\n\n let mut count = 0;\n\n let mut map = HashMap::new();\n", "file_path": "src/day05.rs", "rank": 77, "score": 57531.88132514665 }, { "content": "#[derive(Debug, Default)]\n\nstruct Probe {\n\n xpos: i32,\n\n ypos: i32,\n\n\n\n xvel: i32,\n\n yvel: i32,\n\n\n\n targetx: (i32, i32),\n\n targety: (i32, i32),\n\n max_height: i32,\n\n}\n\n\n\nimpl Probe {\n\n fn new(filename: &str) -> Self {\n\n let desc = &read_list(filename)[0];\n\n\n\n let mut parts = desc.split(' ');\n\n parts.next(); // 'target'\n\n parts.next(); // 'area:'\n\n let tx = parts.next().unwrap(); // xx..xx,\n", "file_path": "src/day17.rs", "rank": 78, "score": 57531.76935010291 }, { "content": "struct Game {\n\n boards: Vec<BingoBoard>,\n\n sequence: Vec<i32>,\n\n}\n\n\n\nimpl Game {\n\n fn new(filename: &str) -> Self {\n\n let mut boards = vec![];\n\n let mut sequence = vec![];\n\n let mut board: Vec<i32> = vec![];\n\n for (idx, line) in read_list(filename).iter().enumerate() {\n\n if idx == 0 {\n\n sequence = line.split(',').map(|x| x.parse::<i32>().unwrap()).collect();\n\n } else {\n\n if line.len() == 0 {\n\n continue;\n\n }\n\n board.extend::<Vec<i32>>(\n\n line.split_whitespace()\n\n .map(|x| x.parse::<i32>().unwrap())\n", "file_path": "src/day04.rs", "rank": 79, "score": 57528.300099777334 }, { "content": "#[derive(Debug)]\n\nstruct HeightMap {\n\n height: Vec<Vec<u8>>,\n\n\n\n line_width: usize,\n\n}\n\n\n\nimpl HeightMap {\n\n fn new(filename: &str) -> Self {\n\n let mut height = vec![];\n\n let mut line_width = 0;\n\n for line in read_list(filename) {\n\n line_width = line.len(); // don't care about repeated setting\n\n height.push(\n\n line.chars()\n\n .map(|x| x.to_string().parse::<u8>().unwrap())\n\n .collect(),\n\n );\n\n }\n\n Self { height, line_width }\n\n }\n", "file_path": "src/day09.rs", "rank": 81, "score": 55946.816276851554 }, { "content": "#[derive(Debug)]\n\nstruct RiskMaze {\n\n risk: Vec<Vec<i32>>,\n\n\n\n line_width: usize,\n\n}\n\n\n\nimpl RiskMaze {\n\n fn new(filename: &str) -> Self {\n\n let mut risk = vec![];\n\n let mut line_width = 0;\n\n for line in read_list(filename) {\n\n line_width = line.len(); // don't care about repeated setting\n\n risk.push(\n\n line.chars()\n\n .map(|x| x.to_string().parse::<i32>().unwrap())\n\n .collect(),\n\n );\n\n }\n\n Self { risk, line_width }\n\n }\n", "file_path": "src/day15.rs", "rank": 82, "score": 55946.816276851554 }, { "content": "#[derive(Debug)]\n\nstruct BingoBoard {\n\n numbers: Vec<i32>,\n\n // marked is a bitmap - bit 0 is the first (top-left) number\n\n marked: u32,\n\n}\n\n\n\nimpl BingoBoard {\n\n fn new(numbers: Vec<i32>) -> Self {\n\n Self { numbers, marked: 0 }\n\n }\n\n\n\n fn complete(&self) -> bool {\n\n let bingo_mask = vec![\n\n 0b00000_00000_00000_00000_11111,\n\n 0b00000_00000_00000_11111_00000,\n\n 0b00000_00000_11111_00000_00000,\n\n 0b00000_11111_00000_00000_00000,\n\n 0b11111_00000_00000_00000_00000,\n\n 0b00001_00001_00001_00001_00001,\n\n 0b00010_00010_00010_00010_00010,\n", "file_path": "src/day04.rs", "rank": 83, "score": 55946.816276851554 }, { "content": "#[derive(Debug)]\n\nstruct OctoMap {\n\n energy: [[u8; 10]; 10],\n\n\n\n flash_count: i32,\n\n}\n\n\n\nimpl OctoMap {\n\n fn new(filename: &str) -> Self {\n\n let mut energy: [[u8; 10]; 10] = Default::default();\n\n for (idx, line) in read_list(filename).iter().enumerate() {\n\n energy[idx] = line\n\n .chars()\n\n .map(|x| x.to_string().parse::<u8>().unwrap())\n\n .collect::<Vec<u8>>()\n\n .try_into()\n\n .unwrap();\n\n }\n\n Self {\n\n energy,\n\n flash_count: 0,\n", "file_path": "src/day11.rs", "rank": 84, "score": 55946.816276851554 }, { "content": "#[derive(Debug)]\n\nstruct ScannerMap {\n\n scanners: Vec<Scanner>,\n\n}\n\n\n\nimpl ScannerMap {\n\n fn new(filename: &str) -> Self {\n\n let mut scanners: Vec<Scanner> = vec![];\n\n let mut delta_lines: Vec<String> = vec![];\n\n let mut s_id = 0;\n\n for line in read_list(filename) {\n\n if line.starts_with(\"---\") {\n\n delta_lines = vec![];\n\n } else if line.is_empty() {\n\n scanners.push(Scanner::new(delta_lines.clone(), s_id));\n\n s_id += 1;\n\n } else {\n\n delta_lines.push(line);\n\n }\n\n }\n\n // don't forget the last set of data\n", "file_path": "src/day19.rs", "rank": 85, "score": 55946.816276851554 }, { "content": "#[derive(Debug)]\n\nstruct CaveGraph {\n\n // Using String here rather than &str to avoid caring about lifetimes.\n\n adj_map: HashMap<String, HashSet<String>>,\n\n}\n\n\n\nimpl CaveGraph {\n\n fn new(filename: &str) -> Self {\n\n let mut adj_map = HashMap::new();\n\n for line in read_list(filename) {\n\n // There must be a better way of 'splitting to a pair' (without\n\n // resorting to regex...)\n\n let mut line_parts = line.split('-');\n\n let from = line_parts.next().unwrap();\n\n let to = line_parts.next().unwrap();\n\n\n\n // since this is not a directed graph, add both directions\n\n let from_set = adj_map.entry(from.to_string()).or_insert_with(HashSet::new);\n\n from_set.insert(to.to_string());\n\n\n\n let to_set = adj_map.entry(to.to_string()).or_insert_with(HashSet::new);\n", "file_path": "src/day12.rs", "rank": 86, "score": 55946.816276851554 }, { "content": "#[derive(Debug)]\n\nstruct RegionSet {\n\n regions: HashSet<Region>,\n\n}\n\n\n\nimpl RegionSet {\n\n fn new() -> Self {\n\n Self {\n\n regions: HashSet::new(),\n\n }\n\n }\n\n\n\n fn total_volume(&self) -> i64 {\n\n self.check_disjoint();\n\n self.regions.iter().map(|r| r.volume()).sum()\n\n }\n\n\n\n fn check_disjoint(&self) {\n\n for r in &self.regions {\n\n for s in &self.regions {\n\n if s == r {\n", "file_path": "src/day22.rs", "rank": 87, "score": 55946.816276851554 }, { "content": "#[derive(Debug)]\n\nstruct EfficientPolymer {\n\n pair_count: HashMap<String, i64>,\n\n element_count: HashMap<char, i64>,\n\n rules: HashMap<String, String>,\n\n}\n\n\n\nimpl EfficientPolymer {\n\n fn new(filename: &str) -> Self {\n\n let mut lines = read_list(filename).into_iter();\n\n\n\n let mut pair_count = HashMap::new();\n\n let mut element_count = HashMap::new();\n\n\n\n let template = lines.next().unwrap();\n\n let mut ch_iter = template.chars();\n\n let mut last_ch = ch_iter.next().unwrap();\n\n\n\n // I missed this to begin with, resulting in an off-by-one error :(\n\n *element_count.entry(last_ch).or_insert(0) += 1;\n\n\n", "file_path": "src/day14.rs", "rank": 88, "score": 55946.816276851554 }, { "content": "#[derive(Debug)]\n\nstruct NavSystem {\n\n lines: Vec<String>,\n\n}\n\n\n\nimpl NavSystem {\n\n fn new(filename: &str) -> Self {\n\n Self {\n\n lines: read_list(filename),\n\n }\n\n }\n\n\n\n // given line, return remaining stack and optional illegal character\n\n fn preprocess(&self, line: &str) -> (Vec<char>, Option<char>) {\n\n let mut stack = vec![];\n\n let mut illegal = None;\n\n for ch in line.chars() {\n\n match ch {\n\n '(' | '[' | '{' | '<' => stack.push(ch),\n\n // These following patterns are painful...\n\n ')' => {\n", "file_path": "src/day10.rs", "rank": 89, "score": 55946.816276851554 }, { "content": "#[derive(Debug)]\n\nstruct LanternSim {\n\n remaining: [usize; 9],\n\n}\n\n\n\nimpl LanternSim {\n\n fn new(filename: &str) -> Self {\n\n let fish = read_csv_ints(filename);\n\n let mut remaining = [0; 9];\n\n for f in fish {\n\n remaining[f] += 1;\n\n }\n\n Self {\n\n remaining\n\n }\n\n }\n\n\n\n fn step(&mut self) {\n\n let mut next_remaining = [0; 9];\n\n for idx in 0..9 {\n\n if idx == 0 {\n", "file_path": "src/day06.rs", "rank": 90, "score": 55946.816276851554 }, { "content": "#[derive(Debug)]\n\nstruct PaperDots {\n\n dots: HashSet<(i32, i32)>,\n\n\n\n instructions: Vec<(String, i32)>,\n\n}\n\n\n\nimpl PaperDots {\n\n fn new(filename: &str) -> Self {\n\n let mut dots = HashSet::new();\n\n let mut instructions = Vec::new();\n\n // Rust TIL: I've been using iter() too much when I should be using\n\n // into_iter()...\n\n let mut lines = read_list(filename).into_iter();\n\n // Rust TIL: iteration takes ownership of iterator, so can't just\n\n // re-use after break. Using `by_ref()` solves that.\n\n // https://stackoverflow.com/a/57172670\n\n for line in lines.by_ref() {\n\n if line.is_empty() {\n\n break;\n\n }\n", "file_path": "src/day13.rs", "rank": 91, "score": 55946.816276851554 }, { "content": "struct PacketReader {\n\n bits: BitIter,\n\n total_ver: i128,\n\n bit_pos: i32,\n\n}\n\n\n\nimpl PacketReader {\n\n fn new(input: Vec<char>) -> Self {\n\n Self {\n\n bits: BitIter::new(input),\n\n total_ver: 0,\n\n bit_pos: 0,\n\n }\n\n }\n\n\n\n fn read_packet(&mut self) -> i128 {\n\n let pver = self.bits_value(3);\n\n self.total_ver += pver as i128;\n\n let ptype = self.bits_value(3);\n\n if ptype == 4 {\n", "file_path": "src/day16.rs", "rank": 92, "score": 55943.23505148223 }, { "content": "struct LineSpan {\n\n line: Line,\n\n cursor: Option<Point>,\n\n}\n\n\n\nimpl LineSpan {\n\n fn new(line: Line) -> Self {\n\n Self {\n\n line,\n\n cursor: Some(line.start),\n\n }\n\n }\n\n}\n\n\n\nimpl Iterator for LineSpan {\n\n type Item = Point;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let item = self.cursor;\n\n if self.cursor == Some(self.line.end) {\n\n self.cursor = None;\n\n } else if let Some(mut cursor) = item {\n\n cursor.x += self.line.dx.signum();\n\n cursor.y += self.line.dy.signum();\n\n self.cursor = Some(cursor);\n\n }\n\n item\n\n }\n\n}\n\n\n", "file_path": "src/day05.rs", "rank": 93, "score": 55943.23505148223 }, { "content": "#[derive(Debug)]\n\nstruct CrabSumSwarm {\n\n crabsubs: Vec<i32>,\n\n}\n\n\n\nimpl CrabSumSwarm {\n\n fn new(filename: &str) -> Self {\n\n let crabsubs = read_csv_ints(filename);\n\n Self { crabsubs }\n\n }\n\n\n\n fn cost(&self, pos: i32) -> i32 {\n\n let mut total = 0;\n\n for cs in &self.crabsubs {\n\n total += (cs - pos).abs();\n\n }\n\n total\n\n }\n\n\n\n fn cost_2(&self, pos: i32) -> i32 {\n\n let mut total = 0;\n", "file_path": "src/day07.rs", "rank": 94, "score": 54524.28668430167 }, { "content": "fn main() {\n\n day22::step1();\n\n day22::step2();\n\n}\n", "file_path": "src/main.rs", "rank": 95, "score": 53632.999195697426 }, { "content": "fn count_v<T: PartialEq>(i: &Vec<Vec<T>>, bit_pos: usize, value: T) -> usize {\n\n let mut c = 0;\n\n for x in i.iter() {\n\n if x[bit_pos] == value {\n\n c += 1;\n\n }\n\n }\n\n c\n\n //i.iter().filter(|&x| *x == value).count()\n\n}\n\n\n", "file_path": "src/day03.rs", "rank": 97, "score": 45898.692079827466 }, { "content": "fn decode_entry(entry: &str) -> usize {\n\n let mut entry_parts = entry.split(\" | \");\n\n let mut map = HashMap::new();\n\n let controls = entry_parts.next().unwrap().split(' ').map(|s| digitize(s));\n\n let outputs = entry_parts.next().unwrap().split(' ').map(|s| digitize(s));\n\n\n\n let mut one_pattern = 0;\n\n let mut four_pattern = 0;\n\n for sample in controls.clone() {\n\n match sample.count_ones() {\n\n 2 => {\n\n one_pattern = sample;\n\n map.insert(sample, 1);\n\n } // 2 segments => '1'\n\n 3 => {\n\n map.insert(sample, 7);\n\n } // 3 segments => '7'\n\n 4 => {\n\n four_pattern = sample;\n\n map.insert(sample, 4);\n", "file_path": "src/day08.rs", "rank": 98, "score": 41973.31642002293 }, { "content": " self.read_literal()\n\n } else {\n\n self.read_operator(ptype)\n\n }\n\n }\n\n\n\n fn read_literal(&mut self) -> i128 {\n\n let mut literal_value: i128 = 0;\n\n // read 5-bit blocks and accumulate in literal_value\n\n loop {\n\n let x: i128 = self.bits_value(5).into();\n\n literal_value = (literal_value << 4) | (x & 15);\n\n if x & 16 != 16 {\n\n break literal_value;\n\n }\n\n }\n\n }\n\n\n\n fn read_operator(&mut self, ptype: i32) -> i128 {\n\n let len_type = self.bits_value(1);\n", "file_path": "src/day16.rs", "rank": 99, "score": 16.497980998819457 } ]
Rust
src/types.rs
pitkley/i3nator
c79dc059d7f174ac0ef4823769149dec537ed063
use crate::{configfiles::ConfigFile, layouts::Layout as ManagedLayout, shlex}; use serde::{ de::{self, Deserializer}, Deserialize, }; #[cfg(unix)] use std::os::unix::ffi::OsStrExt; use std::{ borrow::Cow, ffi::{OsStr, OsString}, fmt, marker::PhantomData, path::{Path, PathBuf}, time::Duration, }; #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(deny_unknown_fields)] pub struct Config { pub general: General, pub applications: Vec<Application>, } #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(deny_unknown_fields)] pub struct General { #[serde(default, deserialize_with = "deserialize_opt_pathbuf_with_tilde")] pub working_directory: Option<PathBuf>, pub workspace: Option<String>, #[serde(deserialize_with = "deserialize_layout")] pub layout: Layout, } #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(rename_all = "lowercase")] pub enum Layout { Contents(String), Managed(String), Path(PathBuf), } #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(deny_unknown_fields)] pub struct Application { #[serde(deserialize_with = "deserialize_application_command")] pub command: ApplicationCommand, #[serde(default, deserialize_with = "deserialize_opt_pathbuf_with_tilde")] pub working_directory: Option<PathBuf>, #[serde(default, deserialize_with = "deserialize_opt_exec")] pub exec: Option<Exec>, } #[derive(Deserialize, Debug, Default, Clone, PartialEq, Eq)] pub struct ApplicationCommand { pub program: String, #[serde(default)] pub args: Vec<String>, } #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] pub struct Exec { pub commands: Vec<String>, #[serde(default = "default_exec_type")] pub exec_type: ExecType, #[serde(default = "default_timeout", deserialize_with = "deserialize_duration")] pub timeout: Duration, } fn default_exec_type() -> ExecType { ExecType::Text } fn default_timeout() -> Duration { Duration::from_secs(5) } #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(rename_all = "snake_case")] pub enum ExecType { Text, TextNoReturn, Keys, } struct Phantom<T>(PhantomData<T>); fn deserialize_application_command<'de, D>(deserializer: D) -> Result<ApplicationCommand, D::Error> where D: Deserializer<'de>, { impl<'de> de::Visitor<'de> for Phantom<ApplicationCommand> { type Value = ApplicationCommand; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("string, sequence of strings or map") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: de::Error, { match shlex::split(value) { Some(mut v) => { if v.is_empty() { Err(de::Error::custom("command can not be empty")) } else { Ok(ApplicationCommand { program: v.remove(0).to_owned(), args: v.into_iter().map(str::to_owned).collect::<Vec<_>>(), }) } } None => Err(de::Error::custom("command can not be empty")), } } fn visit_seq<S>(self, visitor: S) -> Result<Self::Value, S::Error> where S: de::SeqAccess<'de>, { let mut v: Vec<String> = de::Deserialize::deserialize(de::value::SeqAccessDeserializer::new(visitor))?; if v.is_empty() { Err(de::Error::custom("command can not be empty")) } else { Ok(ApplicationCommand { program: v.remove(0), args: v, }) } } fn visit_map<M>(self, visitor: M) -> Result<Self::Value, M::Error> where M: de::MapAccess<'de>, { de::Deserialize::deserialize(de::value::MapAccessDeserializer::new(visitor)) } } deserializer.deserialize_any(Phantom::<ApplicationCommand>(PhantomData)) } fn deserialize_duration<'de, D>(deserializer: D) -> Result<Duration, D::Error> where D: Deserializer<'de>, { impl<'de> de::Visitor<'de> for Phantom<Duration> { type Value = Duration; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("integer or map") } fn visit_i64<E>(self, value: i64) -> Result<Self::Value, E> where E: de::Error, { Ok(Duration::from_secs(value as u64)) } fn visit_map<M>(self, visitor: M) -> Result<Self::Value, M::Error> where M: de::MapAccess<'de>, { de::Deserialize::deserialize(de::value::MapAccessDeserializer::new(visitor)) } } deserializer.deserialize_any(Phantom::<Duration>(PhantomData)) } fn deserialize_exec<'de, D>(deserializer: D) -> Result<Exec, D::Error> where D: Deserializer<'de>, { impl<'de> de::Visitor<'de> for Phantom<Exec> { type Value = Exec; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("string, sequence of strings or map") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: de::Error, { Ok(Exec { commands: vec![value.to_owned()], exec_type: default_exec_type(), timeout: default_timeout(), }) } fn visit_seq<S>(self, visitor: S) -> Result<Self::Value, S::Error> where S: de::SeqAccess<'de>, { let v: Vec<String> = de::Deserialize::deserialize(de::value::SeqAccessDeserializer::new(visitor))?; if v.is_empty() { Err(de::Error::custom("commands can not be empty")) } else { Ok(Exec { commands: v, exec_type: default_exec_type(), timeout: default_timeout(), }) } } fn visit_map<M>(self, visitor: M) -> Result<Self::Value, M::Error> where M: de::MapAccess<'de>, { de::Deserialize::deserialize(de::value::MapAccessDeserializer::new(visitor)) } } deserializer.deserialize_any(Phantom::<Exec>(PhantomData)) } fn deserialize_opt_exec<'de, D>(deserializer: D) -> Result<Option<Exec>, D::Error> where D: Deserializer<'de>, { deserialize_exec(deserializer).map(Some) } fn deserialize_layout<'de, D>(deserializer: D) -> Result<Layout, D::Error> where D: Deserializer<'de>, { impl<'de> de::Visitor<'de> for Phantom<Layout> { type Value = Layout; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("string") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: de::Error, { if value.find('{').is_some() { Ok(Layout::Contents(value.into())) } else if ManagedLayout::open(value).is_ok() { Ok(Layout::Managed(value.to_owned())) } else { Ok(Layout::Path(tilde(value).into_owned())) } } } deserializer.deserialize_any(Phantom::<Layout>(PhantomData)) } fn deserialize_pathbuf_with_tilde<'de, D>(deserializer: D) -> Result<PathBuf, D::Error> where D: Deserializer<'de>, { let pathbuf: PathBuf = de::Deserialize::deserialize(deserializer)?; Ok(tilde(&pathbuf).into_owned()) } fn deserialize_opt_pathbuf_with_tilde<'de, D>(deserializer: D) -> Result<Option<PathBuf>, D::Error> where D: Deserializer<'de>, { deserialize_pathbuf_with_tilde(deserializer).map(Some) } #[doc(hidden)] fn tilde_with_context<SI: ?Sized, P, HD>(input: &SI, home_dir: HD) -> Cow<Path> where SI: AsRef<Path>, P: AsRef<Path>, HD: FnOnce() -> Option<P>, { let input_str = input.as_ref(); let bytes = input_str.as_os_str().as_bytes(); if bytes[0] == b'~' { let input_after_tilde = &bytes[1..]; if input_after_tilde.is_empty() || input_after_tilde[0] == b'/' { if let Some(hd) = home_dir() { let mut s = OsString::new(); s.push(hd.as_ref().to_path_buf()); s.push(OsStr::from_bytes(input_after_tilde)); PathBuf::from(s).into() } else { input_str.into() } } else { input_str.into() } } else { input_str.into() } } fn tilde<SI: ?Sized>(input: &SI) -> Cow<Path> where SI: AsRef<Path>, { tilde_with_context(input, dirs_next::home_dir) }
use crate::{configfiles::ConfigFile, layouts::Layout as ManagedLayout, shlex}; use serde::{ de::{self, Deserializer}, Deserialize, }; #[cfg(unix)] use std::os::unix::ffi::OsStrExt; use std::{ borrow::Cow, ffi::{OsStr, OsString}, fmt, marker::PhantomData, path::{Path, PathBuf}, time::Duration, }; #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(deny_unknown_fields)] pub struct Config { pub general: General, pub applications: Vec<Application>, } #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(deny_unknown_fields)] pub struct General { #[serde(default, deserialize_with = "deserialize_opt_pathbuf_with_tilde")] pub working_directory: Option<PathBuf>, pub workspace: Option<String>, #[serde(deserialize_with = "deserialize_layout")] pub layout: Layout, } #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(rename_all = "lowercase")] pub enum Layout { Contents(String), Managed(String), Path(PathBuf), } #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(deny_unknown_fields)] pub struct Application { #[serde(deserialize_with = "deserialize_application_command")] pub command: ApplicationCommand, #[serde(default, deserialize_with = "deserialize_opt_pathbuf_with_tilde")] pub working_directory: Option<PathBuf>, #[serde(default, deserialize_with = "deserialize_opt_exec")] pub exec: Option<Exec>, } #[derive(Deserialize, Debug, Default, Clone, PartialEq, Eq)] pub struct ApplicationCommand { pub program: String, #[serde(default)] pub args: Vec<String>, } #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] pub struct Exec { pub commands: Vec<String>, #[serde(default = "default_exec_type")] pub exec_type: ExecType, #[serde(default = "default_timeout", deserialize_with = "deserialize_duration")] pub timeout: Duration, } fn default_exec_type() -> ExecType { ExecType::Text } fn default_timeout() -> Duration { Duration::from_secs(5) } #[derive(Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(rename_all = "snake_case")] pub enum ExecType { Text, TextNoReturn, Keys, } struct Phantom<T>(PhantomData<T>); fn deserialize_application_command<'de, D>(deserializer: D) -> Result<ApplicationCommand, D::Error> where D: Deserializer<'de>, { impl<'de> de::Visitor<'de> for Phantom<ApplicationCommand> { type Value = ApplicationCommand; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("string, sequence of strings or map") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: de::Error, { match shlex::split(value) { Some(mut v) => { if v.is_empty() { Err(de::Error::custom("command can not be empty")) } else { Ok(ApplicationCommand { program: v.remove(0).to_owned(), args: v.into_iter().map(str::to_owned).collect::<Vec<_>>(), }) } } None => Err(de::Error::custom("command can not be empty")), } } fn visit_seq<S>(self, visitor: S) -> Result<Self::Value, S::Error> where S: de::SeqAccess<'de>, { let mut v: Vec<String> = de::Deserialize::deserialize(de::value::SeqAccessDeserializer::new(visitor))?; if v.is_empty() { Err(de::Error::custom("command can not be empty")) } else { Ok(ApplicationCommand { program: v.remove(0), args: v, }) } } fn visit_map<M>(self, visitor: M) -> Result<Self::Value, M::Error> where M: de::MapAccess<'de>, { de::Deserialize::deserialize(de::value::MapAccessDeserializer::new(visitor)) } } deserializer.deserialize_any(Phantom::<ApplicationCommand>(PhantomData)) }
fn deserialize_exec<'de, D>(deserializer: D) -> Result<Exec, D::Error> where D: Deserializer<'de>, { impl<'de> de::Visitor<'de> for Phantom<Exec> { type Value = Exec; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("string, sequence of strings or map") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: de::Error, { Ok(Exec { commands: vec![value.to_owned()], exec_type: default_exec_type(), timeout: default_timeout(), }) } fn visit_seq<S>(self, visitor: S) -> Result<Self::Value, S::Error> where S: de::SeqAccess<'de>, { let v: Vec<String> = de::Deserialize::deserialize(de::value::SeqAccessDeserializer::new(visitor))?; if v.is_empty() { Err(de::Error::custom("commands can not be empty")) } else { Ok(Exec { commands: v, exec_type: default_exec_type(), timeout: default_timeout(), }) } } fn visit_map<M>(self, visitor: M) -> Result<Self::Value, M::Error> where M: de::MapAccess<'de>, { de::Deserialize::deserialize(de::value::MapAccessDeserializer::new(visitor)) } } deserializer.deserialize_any(Phantom::<Exec>(PhantomData)) } fn deserialize_opt_exec<'de, D>(deserializer: D) -> Result<Option<Exec>, D::Error> where D: Deserializer<'de>, { deserialize_exec(deserializer).map(Some) } fn deserialize_layout<'de, D>(deserializer: D) -> Result<Layout, D::Error> where D: Deserializer<'de>, { impl<'de> de::Visitor<'de> for Phantom<Layout> { type Value = Layout; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("string") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: de::Error, { if value.find('{').is_some() { Ok(Layout::Contents(value.into())) } else if ManagedLayout::open(value).is_ok() { Ok(Layout::Managed(value.to_owned())) } else { Ok(Layout::Path(tilde(value).into_owned())) } } } deserializer.deserialize_any(Phantom::<Layout>(PhantomData)) } fn deserialize_pathbuf_with_tilde<'de, D>(deserializer: D) -> Result<PathBuf, D::Error> where D: Deserializer<'de>, { let pathbuf: PathBuf = de::Deserialize::deserialize(deserializer)?; Ok(tilde(&pathbuf).into_owned()) } fn deserialize_opt_pathbuf_with_tilde<'de, D>(deserializer: D) -> Result<Option<PathBuf>, D::Error> where D: Deserializer<'de>, { deserialize_pathbuf_with_tilde(deserializer).map(Some) } #[doc(hidden)] fn tilde_with_context<SI: ?Sized, P, HD>(input: &SI, home_dir: HD) -> Cow<Path> where SI: AsRef<Path>, P: AsRef<Path>, HD: FnOnce() -> Option<P>, { let input_str = input.as_ref(); let bytes = input_str.as_os_str().as_bytes(); if bytes[0] == b'~' { let input_after_tilde = &bytes[1..]; if input_after_tilde.is_empty() || input_after_tilde[0] == b'/' { if let Some(hd) = home_dir() { let mut s = OsString::new(); s.push(hd.as_ref().to_path_buf()); s.push(OsStr::from_bytes(input_after_tilde)); PathBuf::from(s).into() } else { input_str.into() } } else { input_str.into() } } else { input_str.into() } } fn tilde<SI: ?Sized>(input: &SI) -> Cow<Path> where SI: AsRef<Path>, { tilde_with_context(input, dirs_next::home_dir) }
fn deserialize_duration<'de, D>(deserializer: D) -> Result<Duration, D::Error> where D: Deserializer<'de>, { impl<'de> de::Visitor<'de> for Phantom<Duration> { type Value = Duration; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("integer or map") } fn visit_i64<E>(self, value: i64) -> Result<Self::Value, E> where E: de::Error, { Ok(Duration::from_secs(value as u64)) } fn visit_map<M>(self, visitor: M) -> Result<Self::Value, M::Error> where M: de::MapAccess<'de>, { de::Deserialize::deserialize(de::value::MapAccessDeserializer::new(visitor)) } } deserializer.deserialize_any(Phantom::<Duration>(PhantomData)) }
function_block-full_function
[ { "content": "fn exec_text(base_parameters: &[&str], text: &str, timeout: Duration) -> Result<()> {\n\n let args = &[base_parameters, &[\"type\", \"--window\", \"%1\", text]].concat();\n\n let mut child = Command::new(\"xdotool\")\n\n .args(args)\n\n .stdin(Stdio::null())\n\n .stdout(Stdio::null())\n\n .stderr(Stdio::null())\n\n .spawn()?;\n\n\n\n // Return of `wait_timeout` is `None` if the process didn't exit.\n\n if child.wait_timeout(timeout)?.is_none() {\n\n // Kill the xdotool process, return error\n\n child.kill()?;\n\n child.wait()?;\n\n Err(ErrorKind::TextOrKeyInputFailed.into())\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/projects.rs", "rank": 0, "score": 187132.69678933747 }, { "content": "#[test]\n\n#[should_panic(expected = \"command can not be empty\")]\n\nfn application_command_empty_str() {\n\n toml::from_str::<Application>(r#\"command = \"\"\"#).unwrap();\n\n}\n\n\n", "file_path": "tests/types.rs", "rank": 2, "score": 151829.97971815788 }, { "content": "#[test]\n\nfn application_command_map_no_args() {\n\n let expected = Application {\n\n command: ApplicationCommand {\n\n program: \"mycommand\".to_owned(),\n\n args: vec![],\n\n },\n\n working_directory: None,\n\n exec: None,\n\n };\n\n\n\n equivalent! {\n\n r#\"command = { program = \"mycommand\" }\"#,\n\n expected;\n\n Application\n\n }\n\n}\n\n\n", "file_path": "tests/types.rs", "rank": 3, "score": 151688.02446085602 }, { "content": "#[test]\n\nfn application_command_str_no_args() {\n\n let expected = Application {\n\n command: ApplicationCommand {\n\n program: \"mycommand\".to_owned(),\n\n args: vec![],\n\n },\n\n working_directory: None,\n\n exec: None,\n\n };\n\n\n\n equivalent! {\n\n r#\"command = \"mycommand\"\"#,\n\n expected;\n\n Application\n\n }\n\n}\n\n\n", "file_path": "tests/types.rs", "rank": 4, "score": 151502.8955863033 }, { "content": "#[test]\n\nfn exec_commands_type_and_timeout() {\n\n let expected = Exec {\n\n commands: vec![\"command one\".to_owned(), \"command two\".to_owned()],\n\n exec_type: ExecType::TextNoReturn,\n\n timeout: Duration::from_secs(10),\n\n };\n\n\n\n equivalent! {\n\n r#\"\n\n commands = [\"command one\", \"command two\"]\n\n exec_type = \"text_no_return\"\n\n timeout = 10\n\n \"#,\n\n expected;\n\n Exec\n\n }\n\n}\n\n\n", "file_path": "tests/types.rs", "rank": 6, "score": 136552.10891353074 }, { "content": "pub fn split<'a>(in_str: &'a str) -> Option<Vec<&'a str>> {\n\n let shl = Shlex::new(in_str);\n\n let res: Vec<&'a str> = shl.collect();\n\n\n\n if res.is_empty() {\n\n None\n\n } else {\n\n Some(res)\n\n }\n\n}\n", "file_path": "src/shlex.rs", "rank": 7, "score": 136033.35597930825 }, { "content": "#[test]\n\nfn application_command_map() {\n\n let expected = Application {\n\n command: ApplicationCommand {\n\n program: \"mycommand\".to_owned(),\n\n args: vec![\"--with\".to_owned(), \"multiple args\".to_owned()],\n\n },\n\n working_directory: None,\n\n exec: None,\n\n };\n\n\n\n equivalent! {\n\n r#\"command = { program = \"mycommand\", args = [\"--with\", \"multiple args\"] }\"#,\n\n expected;\n\n Application\n\n }\n\n}\n\n\n", "file_path": "tests/types.rs", "rank": 8, "score": 134352.8038792602 }, { "content": "#[test]\n\nfn application_command_str() {\n\n let expected = Application {\n\n command: ApplicationCommand {\n\n program: \"mycommand\".to_owned(),\n\n args: vec![\"--with\".to_owned(), \"multiple args\".to_owned()],\n\n },\n\n working_directory: None,\n\n exec: None,\n\n };\n\n\n\n equivalent! {\n\n r#\"command = \"mycommand --with 'multiple args'\"\"#,\n\n expected;\n\n Application\n\n }\n\n}\n\n\n", "file_path": "tests/types.rs", "rank": 9, "score": 134157.81671512406 }, { "content": "#[test]\n\n#[should_panic(expected = \"command can not be empty\")]\n\nfn application_command_empty_seq() {\n\n toml::from_str::<Application>(r#\"command = []\"#).unwrap();\n\n}\n\n\n", "file_path": "tests/types.rs", "rank": 10, "score": 128248.0106644827 }, { "content": "#[test]\n\nfn application_command_seq_no_args() {\n\n let expected = Application {\n\n command: ApplicationCommand {\n\n program: \"mycommand\".to_owned(),\n\n args: vec![],\n\n },\n\n working_directory: None,\n\n exec: None,\n\n };\n\n\n\n equivalent! {\n\n r#\"command = [\"mycommand\"]\"#,\n\n expected;\n\n Application\n\n }\n\n}\n\n\n", "file_path": "tests/types.rs", "rank": 11, "score": 127920.92653262813 }, { "content": "fn command_layout(matches: &ArgMatches<'static>) -> Result<()> {\n\n match matches.subcommand() {\n\n (\"copy\", Some(sub_matches)) => command_copy::<Layout>(sub_matches),\n\n (\"delete\", Some(sub_matches)) => command_delete::<Layout>(sub_matches),\n\n (\"edit\", Some(sub_matches)) => command_edit::<Layout>(sub_matches),\n\n (\"info\", Some(sub_matches)) => command_info::<Layout>(sub_matches),\n\n (\"list\", Some(sub_matches)) => command_list::<Layout>(sub_matches),\n\n (\"new\", Some(sub_matches)) => layout_new(sub_matches),\n\n (\"rename\", Some(sub_matches)) => command_rename::<Layout>(sub_matches),\n\n (\"\", None) =>\n\n // No subcommand given. The clap `AppSettings` should be set to output the help by\n\n // default, so this is unreachable.\n\n {\n\n unreachable!()\n\n }\n\n _ =>\n\n // If all subcommands are defined above, this should be unreachable.\n\n {\n\n unreachable!()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 14, "score": 118603.34846413639 }, { "content": "/// Get a list of all layout names.\n\n///\n\n/// This will check the current users XDG base directories for `i3nator` layout configurations,\n\n/// and return a list of their names for use with e.g. [`Layout::open`][fn-Layout-open].\n\n///\n\n/// [fn-Layout-open]: struct.Layout.html#method.open\n\npub fn list() -> Vec<OsString> {\n\n configfiles::list(&*LAYOUTS_PREFIX)\n\n}\n", "file_path": "src/layouts.rs", "rank": 15, "score": 118603.25356821113 }, { "content": "#[test]\n\nfn exec_commands_and_type() {\n\n let expected = Exec {\n\n commands: vec![\"command one\".to_owned(), \"command two\".to_owned()],\n\n exec_type: ExecType::TextNoReturn,\n\n timeout: Duration::from_secs(5),\n\n };\n\n\n\n equivalent! {\n\n r#\"\n\n commands = [\"command one\", \"command two\"]\n\n exec_type = \"text_no_return\"\n\n \"#,\n\n expected;\n\n Exec\n\n }\n\n}\n\n\n", "file_path": "tests/types.rs", "rank": 16, "score": 117538.15665159904 }, { "content": "#[test]\n\nfn duration_map() {\n\n equivalent! {\n\n r#\"commands = []\n\n timeout = { secs = 10, nanos = 42 }\"#,\n\n Exec {\n\n commands: vec![],\n\n exec_type: ExecType::Text,\n\n timeout: Duration::new(10, 42),\n\n };\n\n Exec\n\n }\n\n}\n\n\n", "file_path": "tests/types.rs", "rank": 18, "score": 115310.9333346904 }, { "content": "#[test]\n\n#[should_panic(expected = \"invalid type: string\")]\n\nfn duration_str() {\n\n toml::from_str::<Exec>(\n\n r#\"\n\n commands = []\n\n timeout = \"10\"\n\n \"#,\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/types.rs", "rank": 19, "score": 115111.43323364717 }, { "content": "#[test]\n\nfn exec_str() {\n\n let expected = Application {\n\n command: ApplicationCommand {\n\n program: \"-\".to_owned(),\n\n args: vec![],\n\n },\n\n working_directory: None,\n\n exec: Some(Exec {\n\n commands: vec![\"command one\".to_owned()],\n\n exec_type: ExecType::Text,\n\n timeout: Duration::from_secs(5),\n\n }),\n\n };\n\n\n\n equivalent! {\n\n r#\"\n\n command = \"-\"\n\n exec = \"command one\"\n\n \"#,\n\n expected;\n\n Application\n\n }\n\n}\n\n\n", "file_path": "tests/types.rs", "rank": 20, "score": 114957.16903567675 }, { "content": "fn exec_keys<S: AsRef<OsStr>>(\n\n base_parameters: &[&str],\n\n keys: &[S],\n\n timeout: Duration,\n\n) -> Result<()> {\n\n let args = &[base_parameters, &[\"key\", \"--window\", \"%1\"]].concat();\n\n let mut child = Command::new(\"xdotool\")\n\n .args(args)\n\n .args(keys)\n\n .stdin(Stdio::null())\n\n .stdout(Stdio::null())\n\n .stderr(Stdio::null())\n\n .spawn()?;\n\n\n\n // Return of `wait_timeout` is `None` if the process didn't exit.\n\n if child.wait_timeout(timeout)?.is_none() {\n\n // Kill the xdotool process, return error\n\n child.kill()?;\n\n child.wait()?;\n\n Err(ErrorKind::TextOrKeyInputFailed.into())\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/projects.rs", "rank": 21, "score": 114805.41583642643 }, { "content": "#[test]\n\nfn exec_commands_only() {\n\n let expected = Exec {\n\n commands: vec![\"command one\".to_owned(), \"command two\".to_owned()],\n\n exec_type: ExecType::Text,\n\n timeout: Duration::from_secs(5),\n\n };\n\n\n\n equivalent! {\n\n r#\"commands = [\"command one\", \"command two\"]\"#,\n\n expected;\n\n Exec\n\n }\n\n}\n\n\n", "file_path": "tests/types.rs", "rank": 22, "score": 114651.2646340615 }, { "content": "struct Shlex<'a> {\n\n in_str: &'a str,\n\n in_bytes: Bytes<'a>,\n\n offset: usize,\n\n}\n\n\n\nimpl<'a> Shlex<'a> {\n\n pub fn new(in_str: &'a str) -> Shlex<'a> {\n\n Shlex {\n\n in_str,\n\n in_bytes: in_str.bytes(),\n\n offset: 0,\n\n }\n\n }\n\n\n\n fn next_word(&mut self) -> Result<Option<&'a str>> {\n\n let start_offset = self.offset;\n\n let mut ch = self.next_byte();\n\n\n\n if ch.is_none() {\n", "file_path": "src/shlex.rs", "rank": 23, "score": 112935.25612791939 }, { "content": "#[test]\n\nfn application_command_seq() {\n\n let expected = Application {\n\n command: ApplicationCommand {\n\n program: \"mycommand\".to_owned(),\n\n args: vec![\"--with\".to_owned(), \"multiple args\".to_owned()],\n\n },\n\n working_directory: None,\n\n exec: None,\n\n };\n\n\n\n equivalent! {\n\n r#\"command = [\"mycommand\", \"--with\", \"multiple args\"]\"#,\n\n expected;\n\n Application\n\n }\n\n}\n\n\n", "file_path": "tests/types.rs", "rank": 25, "score": 109320.08526112078 }, { "content": "fn exec_commands(child: &Child, exec: &Exec) -> Result<()> {\n\n let timeout = exec.timeout;\n\n let pid = child.id().to_string();\n\n let base_parameters = &[\n\n \"search\",\n\n \"--sync\",\n\n \"--onlyvisible\",\n\n \"--any\",\n\n \"--pid\",\n\n &pid,\n\n \"ignorepattern\",\n\n \"windowfocus\",\n\n \"--sync\",\n\n \"%1\",\n\n ];\n\n\n\n let commands = &exec.commands;\n\n match exec.exec_type {\n\n ExecType::Text => {\n\n for command in commands {\n", "file_path": "src/projects.rs", "rank": 26, "score": 107225.96222933839 }, { "content": "fn command_delete<C: ConfigFile>(matches: &ArgMatches<'static>) -> Result<()> {\n\n // `NAME`s should not be empty, clap ensures this.\n\n let configfiles = matches.values_of_os(\"NAME\").unwrap();\n\n\n\n for configfile_name in configfiles {\n\n C::open(configfile_name)?.delete()?;\n\n println!(\"Deleted configfile '{}'\", configfile_name.to_string_lossy());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 27, "score": 105551.12264369977 }, { "content": "fn command_edit<C: ConfigFile>(matches: &ArgMatches<'static>) -> Result<()> {\n\n // `NAME` should not be empty, clap ensures this.\n\n let configfile_name = matches.value_of_os(\"NAME\").unwrap();\n\n let configfile = C::open(configfile_name)?;\n\n\n\n open_editor(&configfile)?;\n\n\n\n // Verify configfile contents\n\n if !matches.is_present(\"no-verify\") {\n\n verify_configfile(&configfile)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 28, "score": 105551.12264369977 }, { "content": "fn command_copy<C: ConfigFile>(matches: &ArgMatches<'static>) -> Result<()> {\n\n // `EXISTING` and `NEW` should not be empty, clap ensures this.\n\n let existing_configfile_name = matches.value_of_os(\"EXISTING\").unwrap();\n\n let new_configfile_name = matches.value_of_os(\"NEW\").unwrap();\n\n\n\n let existing_configfile = C::open(existing_configfile_name)?;\n\n let new_configfile = existing_configfile.copy(new_configfile_name)?;\n\n\n\n println!(\n\n \"Copied existing configfile '{}' to new configfile '{}'\",\n\n existing_configfile.name(),\n\n new_configfile.name()\n\n );\n\n\n\n // Open config file for editing\n\n if !matches.is_present(\"no-edit\") {\n\n open_editor(&new_configfile)?;\n\n if !matches.is_present(\"no-verify\") {\n\n verify_configfile(&new_configfile)?;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 29, "score": 105551.12264369977 }, { "content": "fn command_rename<C: ConfigFile>(matches: &ArgMatches<'static>) -> Result<()> {\n\n // `CURRENT` and `NEW` should not be empty, clap ensures this.\n\n let current_configfile_name = matches.value_of_os(\"CURRENT\").unwrap();\n\n let new_configfile_name = matches.value_of_os(\"NEW\").unwrap();\n\n\n\n let current_configfile = C::open(current_configfile_name)?;\n\n println!(\n\n \"Renaming configfile from '{}' to '{}'\",\n\n current_configfile_name.to_string_lossy(),\n\n new_configfile_name.to_string_lossy()\n\n );\n\n let new_configfile = current_configfile.rename(new_configfile_name)?;\n\n\n\n // Open editor for new configfile if desired\n\n if matches.is_present(\"edit\") {\n\n open_editor(&new_configfile)?;\n\n if !matches.is_present(\"no-verify\") {\n\n verify_configfile(&new_configfile)?;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 30, "score": 105551.12264369977 }, { "content": "fn command_info<C: ConfigFile>(matches: &ArgMatches<'static>) -> Result<()> {\n\n // `NAME` should not be empty, clap ensures this.\n\n let configfile_name = matches.value_of_os(\"NAME\").unwrap();\n\n let configfile = C::open(configfile_name)?;\n\n\n\n println!(\"Name: {}\", configfile.name());\n\n println!(\n\n \"Configuration path: {}\",\n\n configfile.path().to_string_lossy()\n\n );\n\n println!(\n\n \"Configuration valid: {}\",\n\n if configfile.verify().is_ok() {\n\n \"yes\"\n\n } else {\n\n \"NO\"\n\n }\n\n );\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 31, "score": 105551.12264369977 }, { "content": "fn command_list<C: ConfigFile>(matches: &ArgMatches<'static>) -> Result<()> {\n\n let configfiles = C::list();\n\n let quiet = matches.is_present(\"quiet\");\n\n\n\n if configfiles.is_empty() {\n\n Err(ErrorKind::NoConfigExist.into())\n\n } else {\n\n if !quiet {\n\n println!(\"i3nator {}:\", C::prefix().to_string_lossy());\n\n }\n\n for configfile in configfiles {\n\n if quiet {\n\n println!(\"{}\", configfile.to_string_lossy());\n\n } else {\n\n println!(\" {}\", configfile.to_string_lossy());\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 32, "score": 105551.12264369977 }, { "content": "/// Get a list of all configfile names for a given prefix.\n\n///\n\n/// This will check the current users XDG base directories for configuration files, and return a\n\n/// list of their names for use with e.g. [`ConfigFile::open`][fn-ConfigFile-open].\n\n///\n\n/// [fn-ConfigFile-open]: struct.Layout.html#method.open\n\npub fn list<S: AsRef<OsStr> + ?Sized>(prefix: &S) -> Vec<OsString> {\n\n let mut files = XDG_DIRS.list_config_files_once(prefix.as_ref().to_string_lossy().into_owned());\n\n files.sort();\n\n files\n\n .iter()\n\n .map(|file| file.file_stem())\n\n .filter(Option::is_some)\n\n .map(Option::unwrap)\n\n .map(OsStr::to_os_string)\n\n .collect::<Vec<_>>()\n\n}\n", "file_path": "src/configfiles.rs", "rank": 33, "score": 100655.75460976621 }, { "content": "fn layout_new(matches: &ArgMatches<'static>) -> Result<()> {\n\n // `NAME` should not be empty, clap ensures this.\n\n let layout_name = matches.value_of_os(\"NAME\").unwrap();\n\n\n\n let layout = if !matches.is_present(\"template\") {\n\n Layout::create(layout_name)?\n\n } else {\n\n let template = matches.value_of_os(\"template\").unwrap();\n\n\n\n // Open appropriate reader\n\n let stdin_;\n\n let reader: Box<dyn Read> = if template == \"-\" {\n\n stdin_ = stdin();\n\n Box::new(stdin_.lock())\n\n } else {\n\n Box::new(File::open(template)?)\n\n };\n\n let mut reader = BufReader::new(reader);\n\n\n\n // Load bytes from reader\n", "file_path": "src/main.rs", "rank": 34, "score": 98365.88815665174 }, { "content": "/// Get a list of all project names.\n\n///\n\n/// This will check the current users XDG base directories for `i3nator` project configurations,\n\n/// and return a list of their names for use with e.g. [`Project::open`][fn-Project-open].\n\n///\n\n/// [fn-Project-open]: struct.Project.html#method.open\n\npub fn list() -> Vec<OsString> {\n\n configfiles::list(&*PROJECTS_PREFIX)\n\n}\n\n\n", "file_path": "src/projects.rs", "rank": 35, "score": 96177.86860443254 }, { "content": "#[test]\n\nfn config() {\n\n with_projects_dir(|projects_dir| {\n\n let template = r#\"[general]\n\n layout = \"{ ... }\"\n\n\n\n [[applications]]\n\n command = \"mycommand\"\"#;\n\n let mut project =\n\n Project::create_from_template(\"project-template\", template.as_bytes()).unwrap();\n\n\n\n assert_eq!(project.name, \"project-template\");\n\n assert_eq!(project.path, projects_dir.join(\"project-template.toml\"));\n\n assert!(project.path.exists());\n\n assert!(project.verify().is_ok());\n\n\n\n let expected = Config {\n\n general: General {\n\n working_directory: None,\n\n workspace: None,\n\n layout: Layout::Contents(\"{ ... }\".to_owned()),\n", "file_path": "tests/projects.rs", "rank": 38, "score": 93434.76895198529 }, { "content": "pub fn cli_layout() -> App<'static, 'static> {\n\n SubCommand::with_name(\"layout\")\n\n .about(\"Manage layouts which can used in projects\")\n\n .setting(AppSettings::SubcommandRequiredElseHelp)\n\n .subcommand(\n\n SubCommand::with_name(\"copy\")\n\n .about(\"Copy an existing layout to a new layout\")\n\n .arg(\n\n Arg::with_name(\"EXISTING\")\n\n .help(\"Name of the existing layout\")\n\n .required(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"NEW\")\n\n .help(\"Name of the new, destination layout\")\n\n .required(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"no-edit\")\n\n .help(\"Don't open the new layout for editing after copying\")\n", "file_path": "src/cli.rs", "rank": 39, "score": 90786.25485792384 }, { "content": "#[test]\n\nfn duration_secs() {\n\n equivalent! {\n\n r#\"commands = []\n\n timeout = 10\"#,\n\n Exec {\n\n commands: vec![],\n\n exec_type: ExecType::Text,\n\n timeout: Duration::from_secs(10),\n\n };\n\n Exec\n\n }\n\n}\n\n\n", "file_path": "tests/types.rs", "rank": 41, "score": 88870.22153137908 }, { "content": "#[test]\n\nfn exec_seq() {\n\n let expected = Application {\n\n command: ApplicationCommand {\n\n program: \"-\".to_owned(),\n\n args: vec![],\n\n },\n\n working_directory: None,\n\n exec: Some(Exec {\n\n commands: vec![\"command one\".to_owned(), \"command two\".to_owned()],\n\n exec_type: ExecType::Text,\n\n timeout: Duration::from_secs(5),\n\n }),\n\n };\n\n\n\n equivalent! {\n\n r#\"\n\n command = \"-\"\n\n exec = [\"command one\", \"command two\"]\n\n \"#,\n\n expected;\n\n Application\n\n }\n\n}\n", "file_path": "tests/types.rs", "rank": 42, "score": 88722.41166707601 }, { "content": "#[test]\n\nfn full_config() {\n\n let expected = Config {\n\n general: General {\n\n working_directory: Some(\"/path/to/my/working/directory\".to_owned().into()),\n\n workspace: Some(\"0\".to_owned()),\n\n layout: Layout::Path(\"/path/to/my/layout.json\".into()),\n\n },\n\n applications: vec![Application {\n\n command: ApplicationCommand {\n\n program: \"mycommand\".to_owned(),\n\n args: vec![\"--with\".to_owned(), \"multiple args\".to_owned()],\n\n },\n\n working_directory: Some(\"/path/to/a/different/working/directory\".to_owned().into()),\n\n exec: Some(Exec {\n\n commands: vec![\"command one\".to_owned(), \"command two\".to_owned()],\n\n exec_type: ExecType::TextNoReturn,\n\n timeout: Duration::from_secs(5),\n\n }),\n\n }],\n\n };\n", "file_path": "tests/types.rs", "rank": 43, "score": 88662.27525194477 }, { "content": "fn config_path<S: AsRef<OsStr> + ?Sized>(prefix: &S, name: &S) -> PathBuf {\n\n let mut path = OsString::new();\n\n path.push(prefix);\n\n path.push(\"/\");\n\n path.push(name);\n\n path.push(\".toml\");\n\n\n\n path.into()\n\n}\n\n\n", "file_path": "src/configfiles.rs", "rank": 44, "score": 82101.22112141765 }, { "content": "fn project_verify(matches: &ArgMatches<'static>) -> Result<()> {\n\n // `NAME`s can be empty, if so, use the entire configfile list\n\n let configfiles: Vec<OsString> = matches\n\n .values_of_os(\"NAME\")\n\n .map(|v| v.map(OsStr::to_os_string).collect::<Vec<_>>())\n\n .unwrap_or_else(Project::list);\n\n\n\n for configfile_name in configfiles {\n\n if let Err(e) = Project::open(&configfile_name)?.verify() {\n\n println!(\n\n \"Configuration INVALID: '{}'\",\n\n configfile_name.to_string_lossy()\n\n );\n\n println!(\"Error:\");\n\n println!(\" {}\", e);\n\n println!();\n\n } else {\n\n println!(\n\n \"Configuration VALID: '{}'\",\n\n configfile_name.to_string_lossy()\n\n );\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 45, "score": 77912.95300226765 }, { "content": "fn project_local(matches: &ArgMatches<'static>) -> Result<()> {\n\n // `FILE` should not be empty, clap ensures this.\n\n let project_path = matches.value_of_os(\"file\").unwrap();\n\n let mut project = Project::from_path(project_path)?;\n\n let mut i3 = I3Connection::connect()?;\n\n\n\n println!(\"Starting project '{}'\", project.name);\n\n project.start(\n\n &mut i3,\n\n matches.value_of_os(\"working-directory\"),\n\n matches.value_of(\"workspace\"),\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 46, "score": 77912.95300226765 }, { "content": "fn project_start(matches: &ArgMatches<'static>) -> Result<()> {\n\n // `NAME` should not be empty, clap ensures this.\n\n let project_name = matches.value_of_os(\"NAME\").unwrap();\n\n let mut project = Project::open(project_name)?;\n\n let mut i3 = I3Connection::connect()?;\n\n\n\n println!(\"Starting project '{}'\", project.name);\n\n project.start(\n\n &mut i3,\n\n matches.value_of_os(\"working-directory\"),\n\n matches.value_of(\"workspace\"),\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 47, "score": 77912.95300226765 }, { "content": "fn project_new(matches: &ArgMatches<'static>) -> Result<()> {\n\n // `NAME` should not be empty, clap ensures this.\n\n let project_name = matches.value_of_os(\"NAME\").unwrap();\n\n let project = Project::create_from_template(project_name, PROJECT_TEMPLATE)?;\n\n println!(\"Created project '{}'\", project.name);\n\n\n\n // Open config file for editing\n\n if !matches.is_present(\"no-edit\") {\n\n open_editor(&project)?;\n\n if !matches.is_present(\"no-verify\") {\n\n verify_configfile(&project)?;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 48, "score": 77912.95300226765 }, { "content": "pub fn cli() -> App<'static, 'static> {\n\n let working_directory = Arg::with_name(\"working-directory\")\n\n .help(\"Directory used as context for starting the applications\")\n\n .long_help(\n\n \"Directory used as context for starting the applications. This overrides any specified \\\n\n working-directory in the projects configuration.\",\n\n )\n\n .short(\"d\")\n\n .long(\"working-directory\")\n\n .takes_value(true)\n\n .value_name(\"PATH\")\n\n .required(false);\n\n let workspace = Arg::with_name(\"workspace\")\n\n .help(\"Workspace to apply the layout to\")\n\n .long_help(\n\n \"Workspace to apply the layout to. This overrides the specified workspace in the \\\n\n projects configuration.\",\n\n )\n\n .short(\"w\")\n\n .long(\"workspace\")\n", "file_path": "src/cli.rs", "rank": 49, "score": 71371.3090097834 }, { "content": "/// Helping type to consolidate common functionality between projects and layouts.\n\npub trait ConfigFile: Sized {\n\n /// Create a copy of the current configfile, that is a copy of the configuration file on disk,\n\n /// with a name of `new_name`.\n\n ///\n\n /// This will keep the same prefix.\n\n ///\n\n /// # Parameters\n\n ///\n\n /// - `new_name`: A `OsStr` that is the name of the destination configfile.\n\n ///\n\n /// # Returns\n\n ///\n\n /// A `Result` which is:\n\n ///\n\n /// - `Ok`: an instance of `ConfigFile` for the new configfile.\n\n /// - `Err`: an error, e.g. if a configfile with `new_name` already exists or copying the file\n\n /// failed.\n\n fn copy<S: AsRef<OsStr> + ?Sized>(&self, new_name: &S) -> Result<Self>;\n\n\n\n /// Create a configfile given a `name`.\n", "file_path": "src/configfiles.rs", "rank": 50, "score": 69048.2292594042 }, { "content": "#[test]\n\nfn empty_list() {\n\n with_projects_dir(|_| {\n\n assert!(projects::list().is_empty());\n\n })\n\n}\n\n\n", "file_path": "tests/projects.rs", "rank": 52, "score": 63921.268789208996 }, { "content": "#[test]\n\nfn config_invalid() {\n\n with_projects_dir(|projects_dir| {\n\n let template = r#\"invalid template\"#;\n\n let mut project =\n\n Project::create_from_template(\"project-template\", template.as_bytes()).unwrap();\n\n\n\n assert_eq!(project.name, \"project-template\");\n\n assert_eq!(project.path, projects_dir.join(\"project-template.toml\"));\n\n assert!(project.path.exists());\n\n assert!(project.verify().is_err());\n\n assert!(project.config().is_err());\n\n })\n\n}\n\n\n", "file_path": "tests/projects.rs", "rank": 53, "score": 63625.346500389045 }, { "content": "fn get_editor() -> Result<OsString> {\n\n env::var_os(\"VISUAL\")\n\n .or_else(|| env::var_os(\"EDITOR\"))\n\n .and_then(|s| if !s.is_empty() { Some(s) } else { None })\n\n .ok_or_else(|| ErrorKind::EditorNotFound.into())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 54, "score": 54340.80319651863 }, { "content": "fn verify_configfile<C: ConfigFile>(configfile: &C) -> Result<()> {\n\n while let Err(e) = configfile.verify() {\n\n println!();\n\n println!(\"VERIFICATION FAILED!\");\n\n println!(\"Error:\");\n\n println!(\" {}\", e);\n\n println!();\n\n\n\n let mut ch: Option<char>;\n\n while {\n\n println!(\"What do you want to do?\");\n\n println!(\"(R)eopen editor, (A)ccept anyway\");\n\n\n\n ch = GETCH\n\n .getch()\n\n .ok()\n\n .map(|byte| byte.to_ascii_lowercase())\n\n .map(|byte| byte as char);\n\n\n\n if ch.is_none() {\n", "file_path": "src/main.rs", "rank": 56, "score": 46437.588380167246 }, { "content": "fn open_editor<C: ConfigFile>(configfile: &C) -> Result<ExitStatus> {\n\n println!(\"Opening your editor to edit '{}'\", configfile.name());\n\n Command::new(get_editor()?)\n\n .arg(configfile.path().as_os_str())\n\n .status()\n\n .map_err(|e| e.into())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 57, "score": 42964.8880582514 }, { "content": "fn main() {\n\n let outdir = match env::var_os(\"OUT_DIR\") {\n\n None => return,\n\n Some(outdir) => outdir,\n\n };\n\n\n\n let mut app = cli();\n\n app.gen_completions(crate_name!(), Shell::Bash, outdir.clone());\n\n app.gen_completions(crate_name!(), Shell::Zsh, outdir.clone());\n\n app.gen_completions(crate_name!(), Shell::Fish, outdir);\n\n}\n", "file_path": "build.rs", "rank": 58, "score": 40522.78367058969 }, { "content": "#[test]\n\nfn copy() {\n\n with_projects_dir(|projects_dir| {\n\n let project = Project::create(\"project-existing\").unwrap();\n\n assert_eq!(project.name, \"project-existing\");\n\n assert_eq!(project.path, projects_dir.join(\"project-existing.toml\"));\n\n assert!(project.verify().is_err());\n\n\n\n // Create project file\n\n File::create(&project.path).expect(\"couldn't create project file\");\n\n\n\n let project_new = project.copy(\"project-new\").unwrap();\n\n assert_eq!(project_new.name, \"project-new\");\n\n assert_eq!(project_new.path, projects_dir.join(\"project-new.toml\"));\n\n assert!(project.verify().is_err());\n\n })\n\n}\n\n\n", "file_path": "tests/projects.rs", "rank": 59, "score": 38911.014577514456 }, { "content": "#[test]\n\nfn from_path() {\n\n let tempfile = NamedTempFile::new().expect(\"couldn't create temporary file\");\n\n let project = Project::from_path(tempfile.path()).unwrap();\n\n assert_eq!(project.name, \"local\");\n\n assert_eq!(project.path, tempfile.path());\n\n assert!(project.verify().is_err());\n\n}\n\n\n", "file_path": "tests/projects.rs", "rank": 60, "score": 38911.014577514456 }, { "content": "#[test]\n\nfn create() {\n\n with_projects_dir(|projects_dir| {\n\n let project = Project::create(\"project-one\").unwrap();\n\n assert_eq!(project.name, \"project-one\");\n\n assert_eq!(project.path, projects_dir.join(\"project-one.toml\"));\n\n assert!(project.verify().is_err());\n\n\n\n // File does not get created by default, list should still be empty\n\n assert!(projects::list().is_empty());\n\n })\n\n}\n\n\n", "file_path": "tests/projects.rs", "rank": 61, "score": 38911.014577514456 }, { "content": "#[test]\n\nfn rename() {\n\n with_projects_dir(|projects_dir| {\n\n let project = Project::create(\"project-rename-old\").unwrap();\n\n assert_eq!(project.name, \"project-rename-old\");\n\n assert_eq!(project.path, projects_dir.join(\"project-rename-old.toml\"));\n\n assert!(project.verify().is_err());\n\n\n\n // Create project file\n\n File::create(&project.path).expect(\"couldn't create project file\");\n\n\n\n let project_new = project.rename(\"project-rename-new\").unwrap();\n\n assert_eq!(project_new.name, \"project-rename-new\");\n\n assert_eq!(\n\n project_new.path,\n\n projects_dir.join(\"project-rename-new.toml\")\n\n );\n\n assert!(project_new.verify().is_err());\n\n\n\n assert!(!project.path.exists());\n\n assert!(project_new.path.exists());\n\n })\n\n}\n", "file_path": "tests/projects.rs", "rank": 62, "score": 38911.014577514456 }, { "content": "#[test]\n\nfn open() {\n\n with_projects_dir(|projects_dir| {\n\n let project = Project::create(\"project-open\").unwrap();\n\n assert_eq!(project.name, \"project-open\");\n\n assert_eq!(project.path, projects_dir.join(\"project-open.toml\"));\n\n assert!(project.verify().is_err());\n\n\n\n // Create project file\n\n File::create(&project.path).expect(\"couldn't create project file\");\n\n\n\n // Open project\n\n let project_open = Project::open(\"project-open\").unwrap();\n\n assert_eq!(project_open, project);\n\n })\n\n}\n\n\n", "file_path": "tests/projects.rs", "rank": 63, "score": 38911.014577514456 }, { "content": "#[test]\n\nfn delete() {\n\n with_projects_dir(|projects_dir| {\n\n let project = Project::create(\"project-delete\").unwrap();\n\n assert_eq!(project.name, \"project-delete\");\n\n assert_eq!(project.path, projects_dir.join(\"project-delete.toml\"));\n\n assert!(project.verify().is_err());\n\n\n\n // Create project file\n\n File::create(&project.path).expect(\"couldn't create project file\");\n\n\n\n assert!(project.delete().is_ok());\n\n assert!(!project.path.exists())\n\n })\n\n}\n\n\n", "file_path": "tests/projects.rs", "rank": 64, "score": 38911.014577514456 }, { "content": "#[test]\n\n#[should_panic(expected = \"ConfigExists\")]\n\nfn create_exists() {\n\n with_projects_dir(|projects_dir| {\n\n let project = Project::create(\"project-one\").unwrap();\n\n assert_eq!(project.name, \"project-one\");\n\n assert_eq!(project.path, projects_dir.join(\"project-one.toml\"));\n\n assert!(project.verify().is_err());\n\n\n\n // Create project file\n\n File::create(&project.path).expect(\"couldn't create project file\");\n\n\n\n // File created, list should contain it\n\n assert_eq!(projects::list(), vec![OsString::from(\"project-one\")]);\n\n\n\n // Create project with same name, this should fail\n\n Project::create(\"project-one\").unwrap();\n\n })\n\n}\n\n\n", "file_path": "tests/projects.rs", "rank": 65, "score": 37483.67518102619 }, { "content": "#[test]\n\n#[should_panic(expected = \"PathDoesntExist\")]\n\nfn from_path_not_exists() {\n\n Project::from_path(\"/this/path/does/not/exist\").unwrap();\n\n}\n\n\n", "file_path": "tests/projects.rs", "rank": 66, "score": 37480.55698589767 }, { "content": "#[test]\n\nfn create_from_template() {\n\n with_projects_dir(|projects_dir| {\n\n let template = \"this is my template\";\n\n let project =\n\n Project::create_from_template(\"project-template\", template.as_bytes()).unwrap();\n\n\n\n assert_eq!(project.name, \"project-template\");\n\n assert_eq!(project.path, projects_dir.join(\"project-template.toml\"));\n\n assert!(project.path.exists());\n\n assert!(project.verify().is_err());\n\n\n\n let mut file = File::open(project.path).unwrap();\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents).unwrap();\n\n\n\n assert_eq!(contents, template);\n\n })\n\n}\n\n\n", "file_path": "tests/projects.rs", "rank": 67, "score": 37480.55698589767 }, { "content": "fn run() -> Result<()> {\n\n let matches = cli::cli().get_matches();\n\n\n\n match matches.subcommand() {\n\n (\"copy\", Some(sub_matches)) => command_copy::<Project>(sub_matches),\n\n (\"delete\", Some(sub_matches)) => command_delete::<Project>(sub_matches),\n\n (\"edit\", Some(sub_matches)) => command_edit::<Project>(sub_matches),\n\n (\"info\", Some(sub_matches)) => command_info::<Project>(sub_matches),\n\n (\"layout\", Some(sub_matches)) => command_layout(sub_matches),\n\n (\"list\", Some(sub_matches)) => command_list::<Project>(sub_matches),\n\n (\"local\", Some(sub_matches)) => project_local(sub_matches),\n\n (\"new\", Some(sub_matches)) => project_new(sub_matches),\n\n (\"rename\", Some(sub_matches)) => command_rename::<Project>(sub_matches),\n\n (\"start\", Some(sub_matches)) => project_start(sub_matches),\n\n (\"verify\", Some(sub_matches)) => project_verify(sub_matches),\n\n (\"\", None) =>\n\n // No subcommand given. The clap `AppSettings` should be set to output the help by\n\n // default, so this is unreachable.\n\n {\n\n unreachable!()\n", "file_path": "src/main.rs", "rank": 69, "score": 36239.65335779472 }, { "content": "#[test]\n\n#[should_panic(expected = \"UnknownConfig\")]\n\nfn open_unknown_project() {\n\n with_projects_dir(|_| {\n\n Project::open(\"unknown-project\").unwrap();\n\n })\n\n}\n\n\n", "file_path": "tests/projects.rs", "rank": 70, "score": 36205.564058419484 }, { "content": "#[test]\n\n#[should_panic(expected = \"No such file or directory\")]\n\nfn delete_without_file() {\n\n with_projects_dir(|projects_dir| {\n\n let project = Project::create(\"project-delete\").unwrap();\n\n assert_eq!(project.name, \"project-delete\");\n\n assert_eq!(project.path, projects_dir.join(\"project-delete.toml\"));\n\n assert!(project.verify().is_err());\n\n\n\n project.delete().unwrap();\n\n })\n\n}\n\n\n", "file_path": "tests/projects.rs", "rank": 71, "score": 36202.44586329096 }, { "content": "#[test]\n\n#[should_panic(expected = \"No such file or directory\")]\n\nfn copy_without_file() {\n\n with_projects_dir(|projects_dir| {\n\n let project = Project::create(\"project-existing\").unwrap();\n\n assert_eq!(project.name, \"project-existing\");\n\n assert_eq!(project.path, projects_dir.join(\"project-existing.toml\"));\n\n assert!(project.verify().is_err());\n\n\n\n project.copy(\"project-new\").unwrap();\n\n })\n\n}\n\n\n", "file_path": "tests/projects.rs", "rank": 72, "score": 36202.44586329096 }, { "content": "fn with_projects_dir<F: FnOnce(&Path) -> ()>(body: F)\n\nwhere\n\n F: UnwindSafe,\n\n{\n\n // Create the temporary directories if they do not exist\n\n if !PROJECTS_DIR.exists() {\n\n fs::create_dir_all(&*PROJECTS_DIR).expect(\"couldn't create temporary directories\");\n\n }\n\n\n\n // Set up temporary XDG config directory\n\n env::set_var(\"XDG_CONFIG_HOME\", TMP_DIR.path());\n\n\n\n // Run body\n\n let panic_result = panic::catch_unwind(|| body(PROJECTS_DIR.as_ref()));\n\n\n\n // Remove the temporary directories\n\n fs::remove_dir_all(&*TMP_DIR).expect(\"couldn't delete temporary directories\");\n\n\n\n if let Err(err) = panic_result {\n\n panic::resume_unwind(err);\n\n }\n\n}\n\n\n", "file_path": "tests/projects.rs", "rank": 73, "score": 35748.82175101237 }, { "content": " if let '\\'' = ch as char {\n\n return Ok(());\n\n }\n\n } else {\n\n return Err(\"\".into());\n\n }\n\n }\n\n }\n\n\n\n fn next_byte(&mut self) -> Option<u8> {\n\n self.offset += 1;\n\n self.in_bytes.next()\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for Shlex<'a> {\n\n type Item = &'a str;\n\n\n\n fn next(&mut self) -> Option<&'a str> {\n\n match self.next_word().ok() {\n\n None | Some(None) => None,\n\n Some(o) => o,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/shlex.rs", "rank": 74, "score": 29707.058876856125 }, { "content": " str::from_utf8(&self.in_str.as_bytes()[start_offset..self.offset - 1])\n\n .map(|s| Some(s.trim_matches(|c| c == '\\'' || c == '\"')))\n\n .map_err(|e| e.into())\n\n }\n\n\n\n fn parse_double(&mut self) -> Result<()> {\n\n loop {\n\n if let Some(ch) = self.next_byte() {\n\n if let '\"' = ch as char {\n\n return Ok(());\n\n }\n\n } else {\n\n return Err(\"\".into());\n\n }\n\n }\n\n }\n\n\n\n fn parse_single(&mut self) -> Result<()> {\n\n loop {\n\n if let Some(ch) = self.next_byte() {\n", "file_path": "src/shlex.rs", "rank": 75, "score": 29703.863372530283 }, { "content": " return Ok(None);\n\n }\n\n\n\n loop {\n\n if ch.is_some() {\n\n let result = match ch.unwrap() as char {\n\n '\"' => self.parse_double(),\n\n '\\'' => self.parse_single(),\n\n ' ' | '\\t' | '\\n' => break,\n\n _ => Ok(()),\n\n };\n\n if result.is_err() {\n\n return result.map(|_| None);\n\n }\n\n ch = self.next_byte();\n\n } else {\n\n break;\n\n }\n\n }\n\n\n", "file_path": "src/shlex.rs", "rank": 76, "score": 29700.975895526346 }, { "content": "// Copyright Pit Kleyersburg <[email protected]>\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified or distributed\n\n// except according to those terms.\n\n\n\nuse crate::errors::*;\n\nuse std::str::{self, Bytes};\n\n\n\n// Implementation based in parts on:\n\n// https://github.com/comex/rust-shlex/blob/95ef6961a2500d89bc065b2873ca3e77850539e3/src/lib.rs\n\n//\n\n// which is dual-licensed under MIT and Apache-2.0:\n\n// https://github.com/comex/rust-shlex/blob/95ef6961a2500d89bc065b2873ca3e77850539e3/Cargo.toml#L5\n\n\n", "file_path": "src/shlex.rs", "rank": 77, "score": 29699.896738793515 }, { "content": "\n\nlazy_static! {\n\n static ref LAYOUTS_PREFIX: OsString = OsString::from(\"layouts\");\n\n}\n\n\n\n/// A structure representing a managed i3-layout.\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct Layout {\n\n configfile: ConfigFileImpl,\n\n\n\n /// The name of the layout.\n\n ///\n\n /// As represented by the stem of the filename on disk.\n\n pub name: String,\n\n\n\n /// The path to the layout configuration.\n\n pub path: PathBuf,\n\n}\n\n\n\nimpl Deref for Layout {\n", "file_path": "src/layouts.rs", "rank": 78, "score": 29543.944786418906 }, { "content": "// Copyright Pit Kleyersburg <[email protected]>\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified or distributed\n\n// except according to those terms.\n\n\n\n//! Module for layout handling.\n\n\n\nuse crate::{\n\n configfiles::{self, ConfigFile, ConfigFileImpl},\n\n errors::*,\n\n};\n\nuse lazy_static::lazy_static;\n\nuse std::{\n\n ffi::{OsStr, OsString},\n\n ops::Deref,\n\n path::{Path, PathBuf},\n\n};\n", "file_path": "src/layouts.rs", "rank": 79, "score": 29536.76376292129 }, { "content": " fn verify(&self) -> Result<()> {\n\n Ok(())\n\n }\n\n\n\n fn list() -> Vec<OsString> {\n\n configfiles::list(&*LAYOUTS_PREFIX)\n\n }\n\n\n\n fn name(&self) -> String {\n\n self.name.to_owned()\n\n }\n\n\n\n fn path(&self) -> PathBuf {\n\n self.path.to_owned()\n\n }\n\n\n\n fn prefix() -> &'static OsStr {\n\n &*LAYOUTS_PREFIX\n\n }\n\n}\n\n\n\n/// Get a list of all layout names.\n\n///\n\n/// This will check the current users XDG base directories for `i3nator` layout configurations,\n\n/// and return a list of their names for use with e.g. [`Layout::open`][fn-Layout-open].\n\n///\n\n/// [fn-Layout-open]: struct.Layout.html#method.open\n", "file_path": "src/layouts.rs", "rank": 80, "score": 29535.69221696675 }, { "content": "impl ConfigFile for Layout {\n\n fn create<S: AsRef<OsStr> + ?Sized>(name: &S) -> Result<Self> {\n\n let configfile = ConfigFileImpl::create(LAYOUTS_PREFIX.as_os_str(), name.as_ref())?;\n\n Ok(Layout::from_configfile(configfile))\n\n }\n\n\n\n fn create_from_template<S: AsRef<OsStr> + ?Sized>(name: &S, template: &[u8]) -> Result<Self> {\n\n let configfile = ConfigFileImpl::create_from_template(\n\n LAYOUTS_PREFIX.as_os_str(),\n\n name.as_ref(),\n\n template,\n\n )?;\n\n Ok(Layout::from_configfile(configfile))\n\n }\n\n\n\n fn from_path<P: AsRef<Path> + ?Sized>(path: &P) -> Result<Self> {\n\n let configfile = ConfigFileImpl::from_path(path)?;\n\n Ok(Layout::from_configfile(configfile))\n\n }\n\n\n", "file_path": "src/layouts.rs", "rank": 81, "score": 29535.092220751314 }, { "content": " type Target = ConfigFileImpl;\n\n\n\n fn deref(&self) -> &ConfigFileImpl {\n\n &self.configfile\n\n }\n\n}\n\n\n\nimpl Layout {\n\n fn from_configfile(configfile: ConfigFileImpl) -> Self {\n\n let name = configfile.name.to_owned();\n\n let path = configfile.path.clone();\n\n\n\n Layout {\n\n configfile,\n\n name,\n\n path,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/layouts.rs", "rank": 82, "score": 29534.7826704212 }, { "content": " fn open<S: AsRef<OsStr> + ?Sized>(name: &S) -> Result<Self> {\n\n let configfile = ConfigFileImpl::open(LAYOUTS_PREFIX.as_os_str(), name.as_ref())?;\n\n Ok(Layout::from_configfile(configfile))\n\n }\n\n\n\n fn copy<S: AsRef<OsStr> + ?Sized>(&self, new_name: &S) -> Result<Self> {\n\n let configfile = self.configfile.copy(new_name)?;\n\n Ok(Layout::from_configfile(configfile))\n\n }\n\n\n\n fn delete(&self) -> Result<()> {\n\n self.configfile.delete()?;\n\n Ok(())\n\n }\n\n\n\n fn rename<S: AsRef<OsStr> + ?Sized>(&self, new_name: &S) -> Result<Self> {\n\n let configfile = self.configfile.rename(new_name)?;\n\n Ok(Layout::from_configfile(configfile))\n\n }\n\n\n", "file_path": "src/layouts.rs", "rank": 83, "score": 29533.480046056593 }, { "content": "\n\n equivalent! {\n\n r#\"\n\n [general]\n\n working_directory = \"/path/to/my/working/directory\"\n\n workspace = \"0\"\n\n layout = \"/path/to/my/layout.json\"\n\n\n\n [[applications]]\n\n command = \"mycommand --with 'multiple args'\"\n\n working_directory = \"/path/to/a/different/working/directory\"\n\n exec = { commands = [\"command one\", \"command two\"], exec_type = \"text_no_return\" }\n\n \"#,\n\n expected;\n\n Config\n\n }\n\n}\n\n\n", "file_path": "tests/types.rs", "rank": 94, "score": 28231.55012632362 }, { "content": "// Copyright Pit Kleyersburg <[email protected]>\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified or distributed\n\n// except according to those terms.\n\n\n\nuse i3nator::types::*;\n\nuse std::time::Duration;\n\n\n\nmacro_rules! equivalent {\n\n ( $fragment:expr, $expected:expr; $ty:ty ) => {\n\n let actual: $ty = toml::from_str($fragment).unwrap();\n\n assert_eq!(actual, $expected);\n\n };\n\n}\n\n\n\n#[test]\n", "file_path": "tests/types.rs", "rank": 99, "score": 28218.59350235821 } ]
Rust
snapshot/src/logic.rs
AllSafeCybercurity/RClient
88aa5fe784621041b05038ae62139398a34b74bc
use sodiumoxide::crypto::{ hash, pwhash, secretstream::{self, Header, Key, Pull, Push, Stream, Tag}, }; use std::{ fs::File, io::{Read, Write}, }; const CHUNK_SIZE: usize = 4096; const SIGN: [u8; 5] = [0x50, 0x41, 0x52, 0x54, 0x49]; const VERSION: [u8; 2] = [0x1, 0x0]; fn generate_salt() -> crate::Result<pwhash::Salt> { let salt = pwhash::gen_salt(); let hash = hash::sha256::hash(&salt.0); let salt = pwhash::Salt::from_slice(hash.as_ref()).expect("Unable to rewrap salt"); Ok(salt) } fn derive_key_from_password(password: &[u8], salt: &pwhash::Salt) -> crate::Result<Key> { let mut key = [0; secretstream::KEYBYTES]; match pwhash::derive_key( &mut key, password, &salt, pwhash::OPSLIMIT_INTERACTIVE, pwhash::MEMLIMIT_INTERACTIVE, ) { Ok(_) => Ok(Key(key)), Err(_) => Err(crate::Error::SnapshotError("Could not derive key from password".into())), } } fn create_stream(&Key(ref key): &Key) -> crate::Result<(Stream<Push>, Header)> { let stream_key = secretstream::Key(key.to_owned()); Stream::init_push(&stream_key).map_err(|_| crate::Error::SnapshotError("Unable to create stream".into())) } fn pull_stream(header: &[u8], &Key(ref key): &Key) -> crate::Result<Stream<Pull>> { let stream_key = secretstream::Key(key.to_owned()); let header = Header::from_slice(header).expect("Invalid Header size"); Stream::init_pull(&header, &stream_key).map_err(|_| crate::Error::SnapshotError("Unable to open stream".into())) } pub fn encrypt_snapshot(input: Vec<u8>, out: &mut File, password: &[u8]) -> crate::Result<()> { let mut slice = input.as_slice(); let mut buf = [0; CHUNK_SIZE]; let mut input_len = slice.len(); out.write_all(&SIGN)?; out.write_all(&VERSION)?; let salt = generate_salt()?; out.write_all(&salt.0)?; let key = derive_key_from_password(password, &salt)?; let (mut stream, header) = create_stream(&key)?; out.write_all(&header.0)?; loop { match slice.read(&mut buf) { Ok(amount_read) if amount_read > 0 => { input_len -= amount_read as usize; let tag = match input_len { 0 => Tag::Final, _ => Tag::Message, }; out.write_all( &stream .push(&buf[..amount_read], None, tag) .map_err(|_| crate::Error::SnapshotError("Failed to encrypt".into()))?, )? } Err(e) => return Err(crate::Error::from(e)), _ => break, } } Ok(()) } pub fn decrypt_snapshot(input: &mut File, output: &mut Vec<u8>, password: &[u8]) -> crate::Result<()> { check_file_len(input)?; let salt = get_salt(input, true)?; decrypt_file(input, output, password, salt)?; Ok(()) } pub fn update_snapshot(input: &mut File, output: &mut File, password: &[u8]) -> crate::Result<()> { let mut buffer: Vec<u8> = Vec::new(); check_file_len(input)?; let salt = get_salt(input, false)?; decrypt_file(input, &mut buffer, password, salt)?; encrypt_snapshot(buffer, output, password)?; Ok(()) } fn decrypt_file(input: &mut File, output: &mut Vec<u8>, password: &[u8], salt: pwhash::Salt) -> crate::Result<()> { let mut header = [0u8; secretstream::HEADERBYTES]; input.read_exact(&mut header)?; let key = derive_key_from_password(&password, &salt)?; let mut buf = [0u8; CHUNK_SIZE + secretstream::ABYTES]; let mut stream = pull_stream(&header, &key)?; while stream.is_not_finalized() { match input.read(&mut buf) { Ok(bytes_read) if bytes_read > 0 => { let (decrypt, _tag) = stream.pull(&buf[..bytes_read], None).map_err(|_| { crate::Error::SnapshotError("Stream pull failed, could not decrypt snapshot".into()) })?; output.extend(&decrypt); } Err(_) => return Err(crate::Error::SnapshotError("Incorrect Password".into())), _ => return Err(crate::Error::SnapshotError("Decryption failed... ".into())), } } Ok(()) } fn check_file_len(input: &mut File) -> crate::Result<()> { if input.metadata()?.len() <= (pwhash::SALTBYTES + secretstream::HEADERBYTES + SIGN.len()) as u64 { return Err(crate::Error::SnapshotError("Snapshot is not valid or encrypted".into())); } Ok(()) } fn get_salt(input: &mut File, chk_version: bool) -> crate::Result<pwhash::Salt> { let mut sign = [0u8; 5]; let mut version = [0u8; 2]; let mut salt = [0u8; pwhash::SALTBYTES]; input.read_exact(&mut sign)?; input.read_exact(&mut version)?; if chk_version { check_version(&version)?; } if sign == SIGN { input.read_exact(&mut salt)?; } else { salt[..7].copy_from_slice(&sign); input.read_exact(&mut salt[7..])?; } let salt = pwhash::Salt(salt); Ok(salt) } fn check_version(version: &[u8]) -> crate::Result<()> { if version != VERSION { Err(crate::Error::SnapshotError("Snapshot version is incorrect".into())) } else { Ok(()) } } #[cfg(test)] mod test { use super::*; use sodiumoxide::crypto::secretstream::Tag; use std::fs::OpenOptions; #[test] fn test_key_derivation() { let salt = generate_salt().unwrap(); let key_one = derive_key_from_password(b"some long password", &salt).unwrap(); let key_two = derive_key_from_password(b"some long password", &salt).unwrap(); assert_eq!(key_one, key_two); } #[test] fn test_stream() { let salt = generate_salt().unwrap(); let key = derive_key_from_password(b"a password", &salt).unwrap(); let data = b"data"; let (mut push_stream, header) = create_stream(&key).unwrap(); let mut pull_stream = pull_stream(&header.0, &key).unwrap(); let cipher = push_stream.push(data, None, Tag::Final).unwrap(); let (plain, _) = pull_stream.pull(&cipher, None).unwrap(); assert_eq!(data, &plain.as_slice()); } #[test] fn test_snapshot() { let password = b"some_password"; let data = vec![ 69, 59, 116, 81, 23, 91, 2, 212, 10, 248, 108, 227, 167, 142, 2, 205, 202, 100, 216, 225, 53, 223, 223, 14, 153, 239, 46, 106, 120, 103, 85, 144, 69, 59, 116, 81, 23, 91, 2, 212, 10, 248, 108, 227, 167, 142, 2, 205, 202, 100, 216, 225, 53, 223, 223, 14, 153, 239, 46, 106, 120, 103, 85, 144, 69, 59, 116, 81, 23, 91, 2, 212, 10, 248, 108, 227, 167, 142, 2, 205, 202, 100, 216, 225, 53, 223, 223, 14, 153, 239, 46, 106, 120, 103, 85, 144, ]; let expected = data.clone(); let mut encrypt = OpenOptions::new() .write(true) .create(true) .open("test/snapshot.snapshot") .unwrap(); let mut decrypt = OpenOptions::new().read(true).open("test/snapshot.snapshot").unwrap(); let mut output: Vec<u8> = Vec::new(); encrypt_snapshot(data, &mut encrypt, password).unwrap(); decrypt_snapshot(&mut decrypt, &mut output, password).unwrap(); assert_eq!(expected, output); } }
use sodiumoxide::crypto::{ hash, pwhash, secretstream::{self, Header, Key, Pull, Push, Stream, Tag}, }; use std::{ fs::File, io::{Read, Write}, }; const CHUNK_SIZE: usize = 4096; const SIGN: [u8; 5] = [0x50, 0x41, 0x52, 0x54, 0x49]; const VERSION: [u8; 2] = [0x1, 0x0]; fn generate_salt() -> crate::Result<pwhash::Salt> { let salt = pwhash::gen_salt(); let hash = hash::sha256::hash(&salt.0); let salt = pwhash::Salt::from_slice(hash.as_ref()).expect("Unable to rewrap salt"); Ok(salt) } fn derive_key_from_password(password: &[u8], salt: &pwhash::Salt) -> crate::Result<Key> { let mut key = [0; secretstream::KEYBYTES]; match pwhash::derive_key( &mut key, password, &salt, pwhash::OPSLIMIT_INTERACTIVE, pwhash::MEMLIMIT_INTERACTIVE, ) { Ok(_) => Ok(Key(key)), Err(_) => Err(crate::Error::SnapshotError("Could not derive key from password".into())), } } fn create_stream(&Key(ref key): &Key) -> crate::Result<(Stream<Push>, Header)> { let stream_key = secretstream::Key(key.to_owned()); Stream::init_push(&stream_key).map_err(|_| crate::Error::SnapshotError("Unable to create stream".into())) } fn pull_stream(header: &[u8], &Key(ref key): &Key) -> crate::Result<Stream<Pull>> { let stream_key = secretstream::Key(key.to_owned()); let header = Header::from_slice(header).expect("Invalid Header size"); Stream::init_pull(&header, &stream_key).map_err(|_| crate::Error::SnapshotError("Unable to open stream".into())) } pub fn encrypt_snapshot(input: Vec<u8>, out: &mut File, password: &[u8]) -> crate::Result<()> { let mut slice = input.as_slice(); let mut buf = [0; CHUNK_SIZE]; let mut input_len = slice.len(); out.write_all(&SIGN)?; out.write_all(&VERSION)?; let salt = generate_salt()?; out.write_all(&salt.0)?; let key = derive_key_from_password(password, &salt)?; let (mut stream, header) = create_stream(&key)?; out.write_all(&header.0)?; loop { match slice.read(&mut buf) { Ok(amount_read) if amount_read > 0 => { input_len -= amount_read as usize; let tag = match input_len { 0 => Tag::Final, _ => Tag::Message, }; out.write_all( &stream .push(&buf[..amount_read], None, tag) .map_err(|_| crate::Error::SnapshotError("Failed to encrypt".into()))?, )? } Err(e) => return Err(crate::Error::from(e)), _ => break, } } Ok(()) } pub fn decrypt_snapshot(input: &mut File, output: &mut Vec<u8>, password: &[u8]) -> crate::Result<()> { check_file_len(input)?; let salt = get_salt(input, true)?; decrypt_file(input, output, password, salt)?; Ok(()) } pub fn update_snapshot(input: &mut File, output: &mut File, password: &[u8]) -> crate::Result<()> { let mut buffer: Vec<u8> = Vec::new(); check_file_len(input)?; let salt = get_salt(input, false)?; decrypt_file(input, &mut buffer, password, salt)?; encrypt_snapshot(buffer, output, password)?; Ok(()) } fn decrypt_file(input: &mut File, output: &mut Vec<u8>, password: &[u8], salt: pwhash::Salt) -> crate::Result<()> { let mut header = [0u8; secretstream::HEADERBYTES]; input.read_exact(&mut header)?; let key = derive_key_from_password(&p
=> return Err(crate::Error::SnapshotError("Incorrect Password".into())), _ => return Err(crate::Error::SnapshotError("Decryption failed... ".into())), } } Ok(()) } fn check_file_len(input: &mut File) -> crate::Result<()> { if input.metadata()?.len() <= (pwhash::SALTBYTES + secretstream::HEADERBYTES + SIGN.len()) as u64 { return Err(crate::Error::SnapshotError("Snapshot is not valid or encrypted".into())); } Ok(()) } fn get_salt(input: &mut File, chk_version: bool) -> crate::Result<pwhash::Salt> { let mut sign = [0u8; 5]; let mut version = [0u8; 2]; let mut salt = [0u8; pwhash::SALTBYTES]; input.read_exact(&mut sign)?; input.read_exact(&mut version)?; if chk_version { check_version(&version)?; } if sign == SIGN { input.read_exact(&mut salt)?; } else { salt[..7].copy_from_slice(&sign); input.read_exact(&mut salt[7..])?; } let salt = pwhash::Salt(salt); Ok(salt) } fn check_version(version: &[u8]) -> crate::Result<()> { if version != VERSION { Err(crate::Error::SnapshotError("Snapshot version is incorrect".into())) } else { Ok(()) } } #[cfg(test)] mod test { use super::*; use sodiumoxide::crypto::secretstream::Tag; use std::fs::OpenOptions; #[test] fn test_key_derivation() { let salt = generate_salt().unwrap(); let key_one = derive_key_from_password(b"some long password", &salt).unwrap(); let key_two = derive_key_from_password(b"some long password", &salt).unwrap(); assert_eq!(key_one, key_two); } #[test] fn test_stream() { let salt = generate_salt().unwrap(); let key = derive_key_from_password(b"a password", &salt).unwrap(); let data = b"data"; let (mut push_stream, header) = create_stream(&key).unwrap(); let mut pull_stream = pull_stream(&header.0, &key).unwrap(); let cipher = push_stream.push(data, None, Tag::Final).unwrap(); let (plain, _) = pull_stream.pull(&cipher, None).unwrap(); assert_eq!(data, &plain.as_slice()); } #[test] fn test_snapshot() { let password = b"some_password"; let data = vec![ 69, 59, 116, 81, 23, 91, 2, 212, 10, 248, 108, 227, 167, 142, 2, 205, 202, 100, 216, 225, 53, 223, 223, 14, 153, 239, 46, 106, 120, 103, 85, 144, 69, 59, 116, 81, 23, 91, 2, 212, 10, 248, 108, 227, 167, 142, 2, 205, 202, 100, 216, 225, 53, 223, 223, 14, 153, 239, 46, 106, 120, 103, 85, 144, 69, 59, 116, 81, 23, 91, 2, 212, 10, 248, 108, 227, 167, 142, 2, 205, 202, 100, 216, 225, 53, 223, 223, 14, 153, 239, 46, 106, 120, 103, 85, 144, ]; let expected = data.clone(); let mut encrypt = OpenOptions::new() .write(true) .create(true) .open("test/snapshot.snapshot") .unwrap(); let mut decrypt = OpenOptions::new().read(true).open("test/snapshot.snapshot").unwrap(); let mut output: Vec<u8> = Vec::new(); encrypt_snapshot(data, &mut encrypt, password).unwrap(); decrypt_snapshot(&mut decrypt, &mut output, password).unwrap(); assert_eq!(expected, output); } }
assword, &salt)?; let mut buf = [0u8; CHUNK_SIZE + secretstream::ABYTES]; let mut stream = pull_stream(&header, &key)?; while stream.is_not_finalized() { match input.read(&mut buf) { Ok(bytes_read) if bytes_read > 0 => { let (decrypt, _tag) = stream.pull(&buf[..bytes_read], None).map_err(|_| { crate::Error::SnapshotError("Stream pull failed, could not decrypt snapshot".into()) })?; output.extend(&decrypt); } Err(_)
function_block-random_span
[ { "content": "/// HChaCha20 implementation\n\npub fn h_chacha20_hash(key: &[u8], nonce: &[u8], buf: &mut [u8]) {\n\n // initialize state\n\n let mut state = vec![0u32; 16];\n\n (0..4).for_each(|i| state[i] = BASIS[i]);\n\n (4..12).for_each(|i| state[i] = read32_little_endian!(&key[(i - 4) * 4..]));\n\n (12..16).for_each(|i| state[i] = read32_little_endian!(&nonce[(i - 12) * 4..]));\n\n\n\n // run the rounds\n\n chacha20_rounds(&mut state);\n\n\n\n // write to the output\n\n let (buf_a, buf_b) = buf.split_at_mut(16);\n\n (0..4).for_each(|i| write32_little_endian!(state[i] => &mut buf_a[i* 4..]));\n\n (12..16).for_each(|i| write32_little_endian!(state[i] => &mut buf_b[(i - 12) * 4..]));\n\n}\n\n\n", "file_path": "crypto/src/internal/chacha.rs", "rank": 2, "score": 282932.82165243797 }, { "content": "/// encrypts data in place\n\npub fn chachapoly_seal(data: &mut [u8], tag: &mut [u8], ad: &[u8], key: &[u8], nonce: &[u8]) {\n\n // encrypt data\n\n ChaCha20Ietf::xor(key, nonce, 1, data);\n\n\n\n // create footer\n\n let mut foot = Vec::with_capacity(16);\n\n foot.extend_from_slice(&(ad.len() as u64).to_le_bytes());\n\n foot.extend_from_slice(&(data.len() as u64).to_le_bytes());\n\n\n\n // compute poly key and auth tag\n\n let mut pkey = vec![0; 32];\n\n ChaCha20Ietf::xor(key, nonce, 0, &mut pkey);\n\n Poly1305::chachapoly_auth(tag, ad, data, &foot, &pkey);\n\n}\n\n\n", "file_path": "crypto/src/chachapoly_ietf.rs", "rank": 7, "score": 249374.6707229907 }, { "content": "/// calculates the nth ChaCha20 block into a buffer\n\npub fn chacha20_block(key: &[u8], nonce: &[u8], n: u64, buf: &mut [u8]) {\n\n // create buffer\n\n let mut state = vec![0u32; 32];\n\n let (init, mixed) = state.split_at_mut(16);\n\n\n\n // initialize buffer\n\n (0..4).for_each(|i| init[i] = BASIS[i]);\n\n (4..12).for_each(|i| init[i] = read32_little_endian!(&key[(i - 4) * 4..]));\n\n split64_little_endian!(n => &mut init[12..]);\n\n (14..16).for_each(|i| init[i] = read32_little_endian!(&nonce[(i - 14) * 4..]));\n\n\n\n // mix the buffer\n\n mixed.copy_from_slice(init);\n\n chacha20_rounds(mixed);\n\n\n\n // write the mixed state into the buffer\n\n (0..16).for_each(|i| mixed[i] = add!(mixed[i], init[i]));\n\n (0..16).for_each(|i| write32_little_endian!(mixed[i] => &mut buf[i * 4..]));\n\n}\n", "file_path": "crypto/src/internal/chacha.rs", "rank": 8, "score": 245281.6393141871 }, { "content": "/// calculates the nth ChaCha20-IETF block into a buffer\n\npub fn chacha20_ietf_block(key: &[u8], nonce: &[u8], n: u32, buf: &mut [u8]) {\n\n // create buffer\n\n let mut state = vec![0u32; 32];\n\n let (init, mixed) = state.split_at_mut(16);\n\n\n\n // initialize buffer\n\n (0..4).for_each(|i| init[i] = BASIS[i]);\n\n (4..12).for_each(|i| init[i] = read32_little_endian!(&key[(i - 4) * 4..]));\n\n init[12] = n;\n\n (13..16).for_each(|i| init[i] = read32_little_endian!(&nonce[(i - 13) * 4..]));\n\n\n\n // mix the buffer\n\n mixed.copy_from_slice(init);\n\n chacha20_rounds(mixed);\n\n\n\n // write the mixed state into the buffer\n\n (0..16).for_each(|i| mixed[i] = add!(mixed[i], init[i]));\n\n (0..16).for_each(|i| write32_little_endian!(mixed[i] => &mut buf[i * 4..]));\n\n}\n\n\n", "file_path": "crypto/src/internal/chacha.rs", "rank": 9, "score": 241302.5669984584 }, { "content": "/// encrypts data in-place and authenticates it\n\nfn xchachapoly_seal(data: &mut [u8], tag: &mut [u8], ad: &[u8], key: &[u8], nonce: &[u8]) {\n\n // xor and encrypt the data.\n\n XChaCha20::xor(key, nonce, 1, data);\n\n\n\n // build a footer\n\n let mut foot = Vec::with_capacity(16);\n\n foot.extend_from_slice(&(ad.len() as u64).to_le_bytes());\n\n foot.extend_from_slice(&(data.len() as u64).to_le_bytes());\n\n\n\n // compute Poly1305 key and auth tag\n\n let mut pkey = vec![0; 32];\n\n XChaCha20::xor(key, nonce, 0, &mut pkey);\n\n Poly1305::chachapoly_auth(tag, ad, data, &foot, &pkey);\n\n}\n\n\n", "file_path": "crypto/src/xchachapoly.rs", "rank": 11, "score": 226865.62636113891 }, { "content": "/// finishes authentication\n\npub fn poly1305_finish(tag: &mut [u8], a: &mut [u32], s: &[u32]) {\n\n // modular reduction\n\n let mut c;\n\n c = shift_right!(a[1], 26);\n\n a[1] = and!(a[1], 0x3ffffff);\n\n a[2] = add!(a[2], c);\n\n c = shift_right!(a[2], 26);\n\n a[2] = and!(a[2], 0x3ffffff);\n\n a[3] = add!(a[3], c);\n\n c = shift_right!(a[3], 26);\n\n a[3] = and!(a[3], 0x3ffffff);\n\n a[4] = add!(a[4], c);\n\n c = shift_right!(a[4], 26);\n\n a[4] = and!(a[4], 0x3ffffff);\n\n a[0] = add!(a[0], mult!(c, 5));\n\n c = shift_right!(a[0], 26);\n\n a[0] = and!(a[0], 0x3ffffff);\n\n a[1] = add!(a[1], c);\n\n\n\n // reduce if values is in the range (2^130-5, 2^130]\n", "file_path": "crypto/src/internal/poly.rs", "rank": 13, "score": 208975.36274699494 }, { "content": "/// Loads a key into r and s and computes the key multipliers\n\npub fn poly1305_init(r: &mut [u32], s: &mut [u32], mu: &mut [u32], key: &[u8]) {\n\n // load key\n\n r[0] = and!(shift_right!(read32_little_endian!(&key[0..]), 0), 0x03FFFFFF);\n\n r[1] = and!(shift_right!(read32_little_endian!(&key[3..]), 2), 0x03FFFF03);\n\n r[2] = and!(shift_right!(read32_little_endian!(&key[6..]), 4), 0x03FFC0FF);\n\n r[3] = and!(shift_right!(read32_little_endian!(&key[9..]), 6), 0x03F03FFF);\n\n r[4] = and!(shift_right!(read32_little_endian!(&key[12..]), 8), 0x000FFFFF);\n\n\n\n s[0] = read32_little_endian!(&key[16..]);\n\n s[1] = read32_little_endian!(&key[20..]);\n\n s[2] = read32_little_endian!(&key[24..]);\n\n s[3] = read32_little_endian!(&key[28..]);\n\n\n\n // compute multipliers\n\n mu[0] = 0;\n\n mu[1] = mult!(r[1], 5);\n\n mu[2] = mult!(r[2], 5);\n\n mu[3] = mult!(r[3], 5);\n\n mu[4] = mult!(r[4], 5);\n\n}\n\n\n", "file_path": "crypto/src/internal/poly.rs", "rank": 14, "score": 198587.09745531628 }, { "content": "// deseralize a hashmap\n\npub fn deserialize_buffer(bytes: &[u8]) -> HashMap<Vec<u8>, Vec<u8>> {\n\n let mut map = HashMap::new();\n\n\n\n let mut left = &bytes[..];\n\n while !left.is_empty() {\n\n let k = read_buffer(&mut left);\n\n let v = read_buffer(&mut left);\n\n map.insert(k, v);\n\n }\n\n\n\n map\n\n}\n\n\n", "file_path": "snapshot/src/serialize.rs", "rank": 15, "score": 189459.56767366867 }, { "content": "/// read the buffer.\n\nfn read_buffer(input: &mut &[u8]) -> Vec<u8> {\n\n let (len, rest) = input.split_at(std::mem::size_of::<usize>());\n\n let len = usize::from_le_bytes(len.try_into().unwrap());\n\n let (v, rest) = rest.split_at(len);\n\n *input = rest;\n\n v.to_vec()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_serialize_deserialize() {\n\n let mut map = HashMap::new();\n\n map.insert(vec![32, 1, 53], vec![39, 43, 5]);\n\n map.insert(vec![52, 13, 53, 53], vec![31, 1]);\n\n map.insert(vec![142], vec![1, 0, 125, 82, 13, 54, 69]);\n\n\n\n let buf = serialize_map(&map);\n\n let recovered = deserialize_buffer(&buf);\n\n\n\n println!(\"{:?}, {:?}\", buf, recovered);\n\n\n\n assert_eq!(map, recovered);\n\n }\n\n}\n", "file_path": "snapshot/src/serialize.rs", "rank": 16, "score": 179025.15144528524 }, { "content": "/// serialize a hashmap\n\npub fn serialize_map(map: &HashMap<Vec<u8>, Vec<u8>>) -> Vec<u8> {\n\n map.iter().fold(Vec::new(), |mut acc, (k, v)| {\n\n acc.extend(&k.len().to_le_bytes());\n\n acc.extend(k.as_slice());\n\n acc.extend(&v.len().to_le_bytes());\n\n acc.extend(v.as_slice());\n\n acc\n\n })\n\n}\n\n\n", "file_path": "snapshot/src/serialize.rs", "rank": 18, "score": 160173.30580607208 }, { "content": "/// a mutable view over raw data.\n\npub trait AsViewMut<T: Sized>: AsMut<[u8]> {\n\n /// creates a mutable view over `self`.\n\n fn view_mut(&mut self) -> &mut T {\n\n // get bytes\n\n let bytes = self.as_mut();\n\n // validate bytes\n\n assert!(mem::size_of::<T>() <= bytes.len(), \"Can't create view over this memory\");\n\n // get mute pointer\n\n let bytes = bytes.as_mut_ptr();\n\n // validate alignment\n\n assert_eq!(\n\n bytes.align_offset(mem::align_of::<T>()),\n\n 0,\n\n \"View's offset is incorrect\"\n\n );\n\n\n\n // cast mutable pointer\n\n unsafe { bytes.cast::<T>().as_mut() }.unwrap()\n\n }\n\n}\n", "file_path": "vault/src/types.rs", "rank": 19, "score": 154074.40681797737 }, { "content": "/// updates the value a with any data using the key and the multipliers\n\n/// pads any incomplete block with 0 bytes.\n\npub fn poly1305_update(a: &mut [u32], r: &[u32], mu: &[u32], mut data: &[u8], is_last: bool) {\n\n let mut buf = vec![0; 16];\n\n let mut w = vec![0; 5];\n\n\n\n // process data\n\n while !data.is_empty() {\n\n // put data into buffer and append 0x01 byte as padding as needed\n\n let buf_len = min(data.len(), buf.len());\n\n if buf_len < 16 {\n\n buf.copy_from_slice(&[0; 16]);\n\n if is_last {\n\n buf[buf_len] = 0x01\n\n }\n\n }\n\n buf[..buf_len].copy_from_slice(&data[..buf_len]);\n\n\n\n // decode next block into an accumulator. Apply high bit if needed.\n\n a[0] = add!(\n\n a[0],\n\n and!(shift_right!(read32_little_endian!(&buf[0..]), 0), 0x03FFFFFF)\n", "file_path": "crypto/src/internal/poly.rs", "rank": 21, "score": 148215.43091180632 }, { "content": "/// get the home directory of the user's device\n\npub fn home_dir() -> crate::Result<PathBuf> {\n\n let home = match std::env::var(\"STRONGHOLD\") {\n\n Ok(h) => h.into(),\n\n Err(_) => dirs::home_dir().unwrap(),\n\n };\n\n let home_dir = home.join(format!(\".{}\", \"engine\"));\n\n\n\n verify_or_create(&home_dir)?;\n\n\n\n Ok(home_dir)\n\n}\n\n\n", "file_path": "snapshot/src/files.rs", "rank": 22, "score": 143484.8679845298 }, { "content": "/// get the snapshot dir of the user's device\n\npub fn snapshot_dir() -> crate::Result<PathBuf> {\n\n let home_dir = home_dir()?;\n\n let snapshot_dir = home_dir.join(\"snapshots\");\n\n\n\n verify_or_create(&snapshot_dir)?;\n\n\n\n Ok(snapshot_dir)\n\n}\n\n\n", "file_path": "snapshot/src/files.rs", "rank": 23, "score": 143484.8679845298 }, { "content": "pub trait Sign: SecretKeyGen + PublicKeyGen {\n\n /// returns info about the signer\n\n fn info(&self) -> SignInfo;\n\n /// signs data in the buffer using the secret key and returns the signature length.\n\n fn sign(&self, buf: &mut [u8], data: &[u8], secret_key: &[u8]) -> Result<usize, Box<dyn Error + 'static>>;\n\n /// verify the signature for the data with the public key. Returns an error if the signature was invalid.\n\n fn verify(&self, data: &[u8], sig: &[u8], public_key: &[u8]) -> Result<(), Box<dyn Error + 'static>>;\n\n}\n", "file_path": "primitives/src/signing.rs", "rank": 25, "score": 141279.2092102962 }, { "content": "/// open data and decrypt it in place.\n\npub fn chachapoly_open(\n\n data: &mut [u8],\n\n tag: &[u8],\n\n ad: &[u8],\n\n key: &[u8],\n\n nonce: &[u8],\n\n) -> Result<(), Box<dyn Error + 'static>> {\n\n // build footer\n\n let mut foot = Vec::with_capacity(16);\n\n foot.extend_from_slice(&(ad.len() as u64).to_le_bytes());\n\n foot.extend_from_slice(&(data.len() as u64).to_le_bytes());\n\n\n\n // compute poly key and auth tag\n\n let (mut pkey, mut vfy_tag) = (vec![0; 32], vec![0; 16]);\n\n ChaCha20Ietf::xor(key, nonce, 0, &mut pkey);\n\n Poly1305::chachapoly_auth(&mut vfy_tag, ad, data, &foot, &pkey);\n\n\n\n // validate tags\n\n if eq_const_time!(&tag, &vfy_tag) {\n\n ChaCha20Ietf::xor(key, nonce, 1, data);\n", "file_path": "crypto/src/chachapoly_ietf.rs", "rank": 26, "score": 136088.0568712189 }, { "content": "pub fn serialize_to_snapshot(snapshot: &PathBuf, pass: &str, mut client: Client<Provider>) {\n\n let mut file = OpenOptions::new()\n\n .write(true)\n\n .create(true)\n\n .open(snapshot)\n\n .expect(\"Unable to access snapshot. Make sure that it exists.\");\n\n\n\n file.set_len(0).expect(\"unable to clear the contents of the file file\");\n\n\n\n let snap: Snapshot<Provider> = Snapshot::new(&mut client);\n\n\n\n let data: Vec<u8> = bincode::serialize(&snap).expect(\"Couldn't serialize the client data\");\n\n encrypt_snapshot(data, &mut file, pass.as_bytes()).expect(\"Couldn't write to the snapshot\");\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::data::Blob;\n\n use crate::line_error;\n", "file_path": "client/src/snap.rs", "rank": 27, "score": 131826.5067192298 }, { "content": "/// A Hash interface\n\npub trait Hash {\n\n /// Get the information block that describes the hash\n\n fn info(&self) -> HashInfo;\n\n /// hashes data and returns the hash length. `buf` contains the outgoing hashed data. \n\n fn hash(&self, buf: &mut [u8], data: &[u8]) -> Result<usize, Box<dyn Error + 'static>>;\n\n}\n\n\n", "file_path": "primitives/src/hash.rs", "rank": 28, "score": 125219.52659895073 }, { "content": "/// a view over raw data.\n\npub trait AsView<T: Sized>: AsRef<[u8]> {\n\n /// creates a view over `self`.\n\n fn view(&self) -> &T {\n\n // get the bytes\n\n let bytes = self.as_ref();\n\n // validate the bytes\n\n assert!(mem::size_of::<T>() <= bytes.len(), \"Can't create view over this memory\");\n\n // get the pointer\n\n let bytes = bytes.as_ptr();\n\n // validate alignment\n\n assert_eq!(\n\n bytes.align_offset(mem::align_of::<T>()),\n\n 0,\n\n \"View's offset is incorrect\"\n\n );\n\n // cast the pointer\n\n unsafe { bytes.cast::<T>().as_ref() }.unwrap()\n\n }\n\n}\n\n\n", "file_path": "vault/src/types.rs", "rank": 29, "score": 119347.30441057889 }, { "content": "/// a variable length hash\n\npub trait VarLenHash: Hash {\n\n /// hashes the data and returns the hash length. `buf` contains the outgoing hashed data.\n\n fn var_len_hash(&self, buf: &mut [u8], data: &[u8]) -> Result<usize, Box<dyn Error + 'static>>;\n\n}\n", "file_path": "primitives/src/hash.rs", "rank": 30, "score": 119080.69376363067 }, { "content": "pub fn deserialize_from_snapshot(snapshot: &PathBuf, pass: &str) -> Client<Provider> {\n\n let mut buffer = Vec::new();\n\n\n\n let mut file = OpenOptions::new()\n\n .read(true)\n\n .open(snapshot)\n\n .expect(\"Unable to access the snapshot. Make sure it exists.\");\n\n\n\n decrypt_snapshot(&mut file, &mut buffer, pass.as_bytes());\n\n\n\n let snapshot: Snapshot<Provider> = bincode::deserialize(&buffer[..]).expect(\"Unable to deserialize data\");\n\n\n\n Client::<Provider>::new_from_snapshot(snapshot)\n\n}\n\n\n", "file_path": "client/src/snap.rs", "rank": 31, "score": 101928.78113258607 }, { "content": "/// verify that the folder exists or create it.\n\nfn verify_or_create(dir: &Path) -> crate::Result<()> {\n\n if dir.is_dir() {\n\n return Ok(());\n\n }\n\n Ok(fs::create_dir_all(dir)?)\n\n}\n", "file_path": "snapshot/src/files.rs", "rank": 32, "score": 101452.24329398863 }, { "content": "/// trait for encryptable data\n\npub trait Encrypt<T: From<Vec<u8>>>: AsRef<[u8]> {\n\n /// encrypts a raw data and creates a type T from the ciphertext\n\n fn encrypt<B: BoxProvider>(&self, key: &Key<B>, ad: &[u8]) -> crate::Result<T> {\n\n let sealed = B::box_seal(key, ad, self.as_ref())?;\n\n Ok(T::from(sealed))\n\n }\n\n}\n\n\n", "file_path": "vault/src/crypto_box.rs", "rank": 33, "score": 101059.3763333167 }, { "content": "/// Does ChaCha20 Rounds over the state\n\nfn chacha20_rounds(state: &mut [u32]) {\n\n for _ in 0..10 {\n\n // macro for a quater round\n\n macro_rules! quarter_round {\n\n ($a:expr, $b:expr, $c:expr, $d:expr) => {{\n\n state[$a] = add!(state[$a], state[$b]);\n\n state[$d] = xor!(state[$d], state[$a]);\n\n state[$d] = or!(shift_left!(state[$d], 16), shift_right!(state[$d], 16));\n\n state[$c] = add!(state[$c], state[$d]);\n\n state[$b] = xor!(state[$b], state[$c]);\n\n state[$b] = or!(shift_left!(state[$b], 12), shift_right!(state[$b], 20));\n\n state[$a] = add!(state[$a], state[$b]);\n\n state[$d] = xor!(state[$d], state[$a]);\n\n state[$d] = or!(shift_left!(state[$d], 8), shift_right!(state[$d], 24));\n\n state[$c] = add!(state[$c], state[$d]);\n\n state[$b] = xor!(state[$b], state[$c]);\n\n state[$b] = or!(shift_left!(state[$b], 7), shift_right!(state[$b], 25));\n\n }};\n\n }\n\n\n", "file_path": "crypto/src/internal/chacha.rs", "rank": 34, "score": 95561.81029535588 }, { "content": "/// decrypts data in-place after validation\n\nfn xchachapoly_open(\n\n data: &mut [u8],\n\n tag: &[u8],\n\n ad: &[u8],\n\n key: &[u8],\n\n nonce: &[u8],\n\n) -> Result<(), Box<dyn Error + 'static>> {\n\n // build footer\n\n let mut foot = Vec::with_capacity(16);\n\n foot.extend_from_slice(&(ad.len() as u64).to_le_bytes());\n\n foot.extend_from_slice(&(data.len() as u64).to_le_bytes());\n\n\n\n // get poly1305 key and auth tag\n\n let (mut pkey, mut verify_tag) = (vec![0; 32], vec![0; 16]);\n\n XChaCha20::xor(key, nonce, 0, &mut pkey);\n\n Poly1305::chachapoly_auth(&mut verify_tag, ad, data, &foot, &pkey);\n\n\n\n // validate the tags.\n\n if !eq_const_time!(&tag, &verify_tag) {\n\n return Err(crate::Error::InvalidData.into());\n", "file_path": "crypto/src/xchachapoly.rs", "rank": 35, "score": 90047.31588292321 }, { "content": "/// A key derivation function interface\n\npub trait KeyDervFunc {\n\n /// returns the information block about the key derivation function\n\n fn info(&self) -> KeyDervFuncInfo;\n\n /// derive bytes from the base key with salt and info. Outputs to the buffer `buf`.\n\n fn derive(&self, buf: &mut [u8], base_key: &[u8], salt: &[u8], info: &[u8])\n\n -> Result<(), Box<dyn Error + 'static>>;\n\n}\n", "file_path": "primitives/src/key_derv_func.rs", "rank": 36, "score": 90023.93270624749 }, { "content": "/// Trait for decryptable data\n\npub trait Decrypt<E, T: TryFrom<Vec<u8>, Error = E>>: AsRef<[u8]> {\n\n /// decrypts raw data and creates a new type T from the plaintext\n\n fn decrypt<B: BoxProvider>(&self, key: &Key<B>, ad: &[u8]) -> crate::Result<T> {\n\n let opened = B::box_open(key, ad, self.as_ref())?;\n\n Ok(T::try_from(opened).map_err(|_| crate::Error::DatabaseError(String::from(\"Invalid Entry\")))?)\n\n }\n\n}\n", "file_path": "vault/src/crypto_box.rs", "rank": 37, "score": 89624.0947389598 }, { "content": "/// slice extension for constrainted values\n\npub trait SliceExt {\n\n fn constrain_value(&self) -> usize;\n\n}\n\n\n\nimpl USizeExt for usize {\n\n fn constrain_value(&self) -> usize {\n\n *self\n\n }\n\n}\n\n\n\nimpl<T: AsRef<[u8]>> SliceExt for T {\n\n fn constrain_value(&self) -> usize {\n\n self.as_ref().len()\n\n }\n\n}\n\n\n\n/// verify size of buffer\n\n#[macro_export]\n\nmacro_rules! verify_keygen {\n\n ($size:expr => $buf:expr) => {{\n", "file_path": "crypto/src/verify.rs", "rank": 38, "score": 86662.0899332951 }, { "content": "// create a record with a revoke transaction. Data isn't actually deleted until it is garbage collected.\n\nfn revoke_command(matches: &ArgMatches) {\n\n if let Some(matches) = matches.subcommand_matches(\"revoke\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n if let Some(ref id) = matches.value_of(\"id\") {\n\n let snapshot = get_snapshot_path();\n\n let client: Client<Provider> = deserialize_from_snapshot(&snapshot, pass);\n\n\n\n let id = Vec::from_base64(id.as_bytes()).expect(\"couldn't convert the id to from base64\");\n\n let id = Id::load(&id).expect(\"Couldn't build a new Id\");\n\n\n\n client.revoke_record_by_id(id);\n\n\n\n let snapshot = get_snapshot_path();\n\n serialize_to_snapshot(&snapshot, pass, client);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/commandline/src/main.rs", "rank": 39, "score": 86498.08687807963 }, { "content": "// Purge a record from the chain: revoke and then garbage collect.\n\nfn purge_command(matches: &ArgMatches) {\n\n if let Some(matches) = matches.subcommand_matches(\"purge\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n if let Some(ref id) = matches.value_of(\"id\") {\n\n let snapshot = get_snapshot_path();\n\n let client: Client<Provider> = deserialize_from_snapshot(&snapshot, pass);\n\n\n\n let id = Vec::from_base64(id.as_bytes()).expect(\"couldn't convert the id to from base64\");\n\n let id = Id::load(&id).expect(\"Couldn't build a new Id\");\n\n\n\n client.revoke_record_by_id(id);\n\n client.perform_gc();\n\n\n\n assert!(client.db.take(|db| db.all().find(|i| i == &id).is_none()));\n\n\n\n let snapshot = get_snapshot_path();\n\n serialize_to_snapshot(&snapshot, pass, client);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/commandline/src/main.rs", "rank": 40, "score": 86494.09639640304 }, { "content": "// handle the encryption command.\n\nfn encrypt_command(matches: &ArgMatches) {\n\n let snapshot = get_snapshot_path();\n\n\n\n if let Some(matches) = matches.subcommand_matches(\"encrypt\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n if let Some(plain) = matches.value_of(\"plain\") {\n\n let client = if snapshot.exists() {\n\n deserialize_from_snapshot(&get_snapshot_path(), pass)\n\n } else {\n\n let key = Key::<Provider>::random().expect(\"Unable to generate a new key\");\n\n let id = Id::random::<Provider>().expect(\"Unable to generate a new id\");\n\n Client::create_chain(key, id)\n\n };\n\n let id = client.create_record(plain.as_bytes().to_vec());\n\n serialize_to_snapshot(&get_snapshot_path(), pass, client);\n\n println!(\"{:?}\", id);\n\n };\n\n };\n\n }\n\n}\n\n\n", "file_path": "examples/commandline/src/main.rs", "rank": 41, "score": 86494.09639640304 }, { "content": "// handle the snapshot command.\n\nfn snapshot_command(matches: &ArgMatches) {\n\n if let Some(matches) = matches.subcommand_matches(\"snapshot\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n if let Some(ref path) = matches.value_of(\"path\") {\n\n let path = Path::new(path);\n\n\n\n let client: Client<Provider> = deserialize_from_snapshot(&path.to_path_buf(), pass);\n\n\n\n let new_path = path.parent().unwrap().join(\"recomputed.snapshot\");\n\n serialize_to_snapshot(&new_path, pass, client);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/commandline/src/main.rs", "rank": 42, "score": 86494.09639640304 }, { "content": "// handle the read command.\n\nfn read_command(matches: &ArgMatches) {\n\n if let Some(matches) = matches.subcommand_matches(\"read\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n if let Some(ref id) = matches.value_of(\"id\") {\n\n let snapshot = get_snapshot_path();\n\n let client: Client<Provider> = deserialize_from_snapshot(&snapshot, pass);\n\n\n\n let id = Vec::from_base64(id.as_bytes()).expect(\"couldn't convert the id to from base64\");\n\n let id = Id::load(&id).expect(\"Couldn't build a new Id\");\n\n\n\n client.read_record_by_id(id);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/commandline/src/main.rs", "rank": 43, "score": 86494.09639640304 }, { "content": "// handle the list command.\n\nfn list_command(matches: &ArgMatches) {\n\n if let Some(matches) = matches.subcommand_matches(\"list\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n let snapshot = get_snapshot_path();\n\n let client: Client<Provider> = deserialize_from_snapshot(&snapshot, pass);\n\n\n\n if matches.is_present(\"all\") {\n\n client.list_all_ids();\n\n } else {\n\n client.list_ids();\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/commandline/src/main.rs", "rank": 44, "score": 86494.09639640304 }, { "content": "// Loop until there is a Result.\n\npub fn send_until_success(req: CRequest) -> CResult {\n\n loop {\n\n match send(req.clone()) {\n\n Some(result) => {\n\n break result;\n\n }\n\n None => thread::sleep(Duration::from_millis(50)),\n\n }\n\n }\n\n}\n", "file_path": "examples/commandline/src/connection.rs", "rank": 45, "score": 85275.36683554793 }, { "content": "// send a request until there is a response - emulates network\n\npub fn send_until_success(req: TransactionRequest) -> TransactionResult {\n\n loop {\n\n match send(req.clone()) {\n\n Some(result) => break result,\n\n None => thread::sleep(Duration::from_millis(Env::retry_delay())),\n\n }\n\n }\n\n}\n", "file_path": "vault/fuzz/src/connection.rs", "rank": 46, "score": 85270.8104738264 }, { "content": "// Take ownership of an existing chain. Requires that the new chain owner knows the old key to unlock the data.\n\nfn take_ownership_command(matches: &ArgMatches) {\n\n if let Some(matches) = matches.subcommand_matches(\"take_ownership\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n let new_id = Id::random::<Provider>().expect(\"Unable to generate a new id\");\n\n\n\n let snapshot = get_snapshot_path();\n\n let client: Client<Provider> = deserialize_from_snapshot(&snapshot, pass);\n\n let new_client: Client<Provider> = Client::create_chain(client.db.key, new_id);\n\n\n\n new_client.take_ownership(client.id);\n\n\n\n println!(\"Old owner id: {:?}\\nNew owner id: {:?}\", client.id, new_client.id);\n\n\n\n let snapshot = get_snapshot_path();\n\n serialize_to_snapshot(&snapshot, pass, new_client);\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/commandline/src/main.rs", "rank": 47, "score": 84601.82878764407 }, { "content": "/// extension on usize for constrained values.\n\npub trait USizeExt {\n\n fn constrain_value(&self) -> usize;\n\n}\n\n\n", "file_path": "crypto/src/verify.rs", "rank": 48, "score": 84159.5014413984 }, { "content": "/// A secret key generation algorithm\n\npub trait SecretKeyGen {\n\n /// generate a new private key in the buffer. `buf` is the output buffer.\n\n fn new_secret_key(&self, buf: &mut [u8], rng: &mut dyn SecureRng) -> Result<usize, Box<dyn Error + 'static>>;\n\n}\n\n\n", "file_path": "primitives/src/rng.rs", "rank": 49, "score": 84035.87886135673 }, { "content": "/// A public key generation algorithm\n\npub trait PublicKeyGen {\n\n /// generate a new public key in the buffer. `buf` is the output buffer.\n\n fn get_pub_key(&self, buf: &mut [u8], secret_key: &[u8]) -> Result<usize, Box<dyn Error + 'static>>;\n\n}\n", "file_path": "primitives/src/rng.rs", "rank": 50, "score": 84035.87886135673 }, { "content": "// garbage collect the chain. Remove any revoked data from the chain.\n\nfn garbage_collect_vault_command(matches: &ArgMatches) {\n\n if let Some(matches) = matches.subcommand_matches(\"garbage_collect\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n let snapshot = get_snapshot_path();\n\n let client: Client<Provider> = deserialize_from_snapshot(&snapshot, pass);\n\n\n\n client.perform_gc();\n\n client.list_ids();\n\n\n\n let snapshot = get_snapshot_path();\n\n serialize_to_snapshot(&snapshot, pass, client);\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/commandline/src/main.rs", "rank": 51, "score": 82811.02653622431 }, { "content": "// resolve the requests into responses.\n\npub fn send(req: CRequest) -> Option<CResult> {\n\n let result = match req {\n\n // if the request is a list, get the keys from the map and put them into a ListResult.\n\n CRequest::List => {\n\n let entries = State::storage_map()\n\n .read()\n\n .expect(line_error!())\n\n .keys()\n\n .cloned()\n\n .collect();\n\n\n\n CResult::List(ListResult::new(entries))\n\n }\n\n // on write, write data to the map and send back a Write result.\n\n CRequest::Write(write) => {\n\n State::storage_map()\n\n .write()\n\n .expect(line_error!())\n\n .insert(write.id().to_vec(), write.data().to_vec());\n\n\n", "file_path": "examples/commandline/src/connection.rs", "rank": 52, "score": 82780.63629950256 }, { "content": "/// a trait to make types base64 decodable\n\npub trait Base64Decodable: Sized {\n\n fn from_base64(base: impl AsRef<[u8]>) -> crate::Result<Self>;\n\n}\n\n\n\nimpl<T: AsRef<[u8]>> Base64Encodable for T {\n\n fn base64(&self) -> String {\n\n Base64::encode_data(self.as_ref())\n\n }\n\n}\n\n\n\nimpl Base64Decodable for Vec<u8> {\n\n fn from_base64(base: impl AsRef<[u8]>) -> crate::Result<Self> {\n\n Base64::decode_data(base.as_ref())\n\n }\n\n}\n", "file_path": "vault/src/base64.rs", "rank": 53, "score": 82038.6073006196 }, { "content": "fn get_snapshot_path() -> PathBuf {\n\n let path = snapshot_dir().expect(\"Unable to get the snapshot path\");\n\n\n\n path.join(\"backup.snapshot\")\n\n}\n\n\n", "file_path": "client/src/snap.rs", "rank": 54, "score": 80196.20533109008 }, { "content": "/// A provider interface between the vault and a crypto box. See libsodium's [secretbox](https://libsodium.gitbook.io/doc/secret-key_cryptography/secretbox) for an example.\n\npub trait BoxProvider: Sized {\n\n /// function for the key length of the crypto box\n\n fn box_key_len() -> usize;\n\n /// gets the crypto box's overhead\n\n fn box_overhead() -> usize;\n\n\n\n /// seals some data into the crypto box using the `key` and the `ad`\n\n fn box_seal(key: &Key<Self>, ad: &[u8], data: &[u8]) -> crate::Result<Vec<u8>>;\n\n\n\n /// opens a crypto box to get data using the `key` and the `ad`.\n\n fn box_open(key: &Key<Self>, ad: &[u8], data: &[u8]) -> crate::Result<Vec<u8>>;\n\n\n\n /// fills a buffer `buf` with secure random bytes.\n\n fn random_buf(buf: &mut [u8]) -> crate::Result<()>;\n\n\n\n /// creates a vector with secure random bytes based off of an inputted length `len`.\n\n fn random_vec(len: usize) -> crate::Result<Vec<u8>> {\n\n let mut buf = vec![0; len];\n\n Self::random_buf(&mut buf)?;\n\n Ok(buf)\n", "file_path": "vault/src/crypto_box.rs", "rank": 55, "score": 79771.56594141302 }, { "content": "/// A one shot stateless cipher. Implements the `SecretKeyGen` trait.\n\npub trait Cipher: SecretKeyGen {\n\n /// returns cipher info block\n\n fn info(&self) -> CipherInfo;\n\n /// predicts the max encrypted cipher length given a `plaintext_len` (in bytes)\n\n fn predict_encrypted_max(&self, plain_len: usize) -> usize;\n\n /// encrypts the plaintext in-place and returns the cipher's length. `buf` contains the incoming plaintext buffer.\n\n fn encrypt(\n\n &self,\n\n buf: &mut [u8],\n\n plain_len: usize,\n\n key: &[u8],\n\n nonce: &[u8],\n\n ) -> Result<usize, Box<dyn Error + 'static>>;\n\n\n\n /// encrypts the plaintext and returns the plaintext's length. `buf` contains the incoming plaintext buffer.\n\n fn encrypt_to(\n\n &self,\n\n buf: &mut [u8],\n\n plain: &[u8],\n\n key: &[u8],\n", "file_path": "primitives/src/cipher.rs", "rank": 56, "score": 79653.23628135698 }, { "content": "/// a Message authentication interface (MAC) that is stateless and can be a one shot.\n\npub trait MessageAuthCode: SecretKeyGen {\n\n /// get the info about the MAC\n\n fn info(&self) -> MessageAuthCodeInfo;\n\n /// authenticate the `data` using the `key` through the `buf` buffer. Returns the MAC length in a `Result`\n\n fn auth(&self, buf: &mut [u8], data: &[u8], key: &[u8]) -> Result<usize, Box<dyn Error + 'static>>;\n\n}\n\n\n", "file_path": "primitives/src/auth.rs", "rank": 57, "score": 75591.17620596816 }, { "content": "fn main() {\n\n // determine which secure random number generator should be used.\n\n #[allow(unused_assignments)]\n\n let mut secure_random = None;\n\n\n\n #[cfg(any(target_os = \"macos\", target_os = \"ios\"))]\n\n {\n\n secure_random = macos_secrandom()\n\n }\n\n #[cfg(any(target_os = \"freebsd\", target_os = \"openbsd\", target_os = \"netbsd\"))]\n\n {\n\n secure_random = Some(\"USE_ARC4RANDOM\")\n\n }\n\n #[cfg(target_os = \"windows\")]\n\n {\n\n println!(\"cargo:rustc-link-lib=bcrypt\");\n\n secure_random = Some(\"USE_CRYPTGENRANDOM\")\n\n }\n\n #[cfg(target_os = \"linux\")]\n\n {\n", "file_path": "random/build.rs", "rank": 58, "score": 59814.57076490857 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "client/src/main.rs", "rank": 59, "score": 58259.01063942617 }, { "content": "fn main() {\n\n let yaml = load_yaml!(\"cli.yml\");\n\n let matches = App::from(yaml).get_matches();\n\n\n\n encrypt_command(&matches);\n\n snapshot_command(&matches);\n\n read_command(&matches);\n\n list_command(&matches);\n\n revoke_command(&matches);\n\n garbage_collect_vault_command(&matches);\n\n take_ownership_command(&matches);\n\n purge_command(&matches);\n\n}\n", "file_path": "examples/commandline/src/main.rs", "rank": 60, "score": 56834.60246397934 }, { "content": "#[test]\n\nfn testset_full() {\n\n testset(\"full\");\n\n}\n\n\n", "file_path": "vault/tests/preload.rs", "rank": 61, "score": 56834.60246397934 }, { "content": "fn main() {\n\n // get the threads from the THREADS enviroment var.\n\n let threads_str = env::var(\"NUM_THREADS\").unwrap_or(num_cpus::get().to_string());\n\n let threads = usize::from_str(&threads_str).expect(\"Invalid value of THREADS\");\n\n\n\n // load the enviroment limit from the VECTOR_LIMIT env var.\n\n let limit_str = env::var(\"VECTOR_LIMIT\").unwrap_or(264.to_string());\n\n let limit = usize::from_str(&limit_str).expect(\"Invalid value of TEST_VECTOR_LIMIT\");\n\n\n\n // fuzz the threads.\n\n for _ in 0..threads {\n\n let mut rng = SecureRng::new();\n\n thread::spawn(move || loop {\n\n ChaChaPolyVector::random(limit, &mut rng).test();\n\n XChaChaPolyVector::random(limit, &mut rng).test()\n\n });\n\n }\n\n\n\n // Show the progress of fuzzing.\n\n println!(\n\n \"Spraying Fuzz [Num Of Threads = {}, Vector Limit = {} bytes]...\",\n\n threads, limit\n\n );\n\n loop {\n\n thread::sleep(Duration::from_secs(5));\n\n println!(\"Performed {} tests...\", COUNTER.load(Relaxed));\n\n }\n\n}\n", "file_path": "crypto/fuzz/src/main.rs", "rank": 62, "score": 56834.60246397934 }, { "content": "#[test]\n\nfn test_error() {\n\n for vec in ErrorTestVector::load() {\n\n vec.test_decryption();\n\n }\n\n}\n\n\n\n// API test vector\n\n#[derive(Default, Clone, Debug)]\n\npub struct ApiTestVector {\n\n id: String,\n\n key_len: usize,\n\n nonce_len: usize,\n\n ad_len: usize,\n\n enc_input_len: usize,\n\n enc_buf_len: usize,\n\n dec_input_len: usize,\n\n dec_buf_len: usize,\n\n error: String,\n\n}\n\nimpl ApiTestVector {\n", "file_path": "crypto/tests/xchachapoly.rs", "rank": 63, "score": 56834.60246397934 }, { "content": "#[test]\n\nfn base64_fail() {\n\n let vectors = [\n\n b\"Rg\".as_ref(),\n\n b\"Rk8\".as_ref(),\n\n b\"Rk9PQk+S\".as_ref(),\n\n b\"Zm9vY/Fy\".as_ref(),\n\n ];\n\n for vec in vectors.iter() {\n\n assert!(Vec::from_base64(vec).is_err());\n\n }\n\n}\n", "file_path": "vault/tests/base64.rs", "rank": 64, "score": 56834.60246397934 }, { "content": "#[test]\n\nfn test_crypto() {\n\n for vec in TestVector::load() {\n\n vec.test_encryption().test_decryption();\n\n }\n\n}\n\n\n\n// Mac error Vector\n", "file_path": "crypto/tests/xchachapoly.rs", "rank": 65, "score": 56834.60246397934 }, { "content": "#[test]\n\nfn test_crypto() {\n\n for vec in TestVector::load() {\n\n vec.test_keystream_encryption().test_keystream_decryption();\n\n }\n\n}\n\n\n\n// API test vector\n\n#[derive(Default, Clone, Debug)]\n\npub struct ApiTestVector {\n\n id: String,\n\n key_len: usize,\n\n nonce_len: usize,\n\n enc_input_len: usize,\n\n enc_buf_len: usize,\n\n dec_input_len: usize,\n\n dec_buf_len: usize,\n\n error: String,\n\n}\n\nimpl ApiTestVector {\n\n // load json vectors\n", "file_path": "crypto/tests/xchacha.rs", "rank": 66, "score": 56834.60246397934 }, { "content": "#[test]\n\nfn test_api() {\n\n for vec in ApiTestVector::load() {\n\n vec.test_encryption().test_decryption();\n\n }\n\n}\n", "file_path": "crypto/tests/xchachapoly.rs", "rank": 67, "score": 56834.60246397934 }, { "content": "#[test]\n\nfn test_api() {\n\n for vec in ApiTestVector::load() {\n\n vec.test_mac();\n\n }\n\n}\n", "file_path": "crypto/tests/poly.rs", "rank": 68, "score": 56834.60246397934 }, { "content": "#[test]\n\nfn test_base64() {\n\n let vectors = [\n\n [[0x14, 0xfb, 0x9c, 0x03, 0xd9, 0x7e].as_ref(), b\"FPucA9l-\".as_ref()],\n\n\t\t[b\"\".as_ref(), b\"\".as_ref()],\n\n\t\t[b\"f\".as_ref(), b\"Zg==\".as_ref()], [b\"F\".as_ref(), b\"Rg==\".as_ref()],\n\n\t\t[b\"fo\".as_ref(), b\"Zm8=\".as_ref()], [b\"FO\".as_ref(), b\"Rk8=\".as_ref()],\n\n\t\t[b\"foo\".as_ref(), b\"Zm9v\".as_ref()], [b\"FOO\".as_ref(), b\"Rk9P\".as_ref()],\n\n\t\t[b\"foob\".as_ref(), b\"Zm9vYg==\".as_ref()], [b\"FOOB\".as_ref(), b\"Rk9PQg==\".as_ref()],\n\n\t\t[b\"fooba\".as_ref(), b\"Zm9vYmE=\".as_ref()], [b\"FOOBA\".as_ref(), b\"Rk9PQkE=\".as_ref()],\n\n\t\t[b\"foobar\".as_ref(), b\"Zm9vYmFy\".as_ref()], [b\"FOOBAR\".as_ref(), b\"Rk9PQkFS\".as_ref()],\n\n\t\t[\n\n\t\t\t[0xCA, 0xDD, 0x73, 0xBD, 0x92, 0x1E, 0xB8, 0x3F, 0xF2, 0x80, 0x96, 0x63, 0x17, 0x13, 0xB6, 0xC8, 0x54, 0x22, 0xA5, 0xE5, 0x40, 0xA7, 0x32, 0x5A, 0x6E, 0x41, 0x3F, 0xD5, 0x0B, 0x23, 0xDC, 0xE3, 0x22, 0xB3, 0xB7, 0x59, 0x68, 0xD1, 0xDE, 0x44, 0x31, 0xA3, 0xDF, 0x24, 0x1B, 0x08, 0x8E, 0x17, 0x44, 0xD2, 0xEA, 0x6E, 0x21, 0x72, 0xFB, 0x00, 0x2F, 0x94, 0xC9, 0x59, 0x77, 0x98, 0x78, 0xDD, 0xCB, 0x1F, 0xB9, 0x91, 0x32, 0xD6, 0x38, 0x16, 0x7E, 0xB5, 0xC6, 0x45, 0x9E, 0x50, 0xB8, 0x41, 0x4E, 0xD1, 0x9D, 0xE8, 0x9B, 0xAB, 0x87, 0x9E, 0x43, 0x23, 0xA4, 0x0A, 0x7A, 0x57, 0xEE, 0x35, 0x21, 0xA0, 0xCC, 0xA6, 0xC4, 0xEB, 0x61, 0xC6, 0x31, 0x4B, 0x27, 0x9D, 0xBC, 0x9A, 0x1F, 0x20, 0x15, 0xC8, 0xE1, 0x78, 0xD4, 0xE7, 0x89, 0x3C, 0x17, 0x96, 0x5B, 0x11, 0xFD, 0xA4, 0x41, 0x20, 0x4D, 0x26, 0x27, 0xD5, 0xDD, 0x54, 0x3A, 0x9E, 0x12, 0x17, 0x01, 0x3F, 0xC3, 0x6C, 0x69, 0xB9, 0xDC, 0xEF, 0x89, 0x48, 0xD1, 0x05, 0x4F, 0x56, 0x32, 0x83, 0x05, 0x05, 0x0F, 0x84, 0x62, 0xED, 0x30, 0x6B, 0x5C, 0x77, 0x8B, 0x8A, 0x93, 0xD0, 0x7D, 0xF9, 0x16, 0x96, 0x37, 0x15, 0x13, 0xC2, 0x7D, 0x51, 0x19, 0x0D, 0x7F, 0x55, 0x07, 0x85, 0x7E, 0x9D, 0x09, 0xD0, 0xBF, 0x49, 0x74, 0x7E, 0xA8, 0x01, 0xE4, 0x49, 0x7C, 0x4F, 0x39, 0x9A, 0xF9, 0xF8, 0xC0, 0xCA, 0xB4, 0xB8, 0x3B, 0x91, 0x58, 0xA6, 0x79, 0x90, 0xE3, 0x92, 0xD8, 0x4B, 0x68, 0x57, 0x54, 0xC8, 0x66, 0xA7, 0xD6, 0x3F, 0x4F, 0x0F, 0x0E, 0x42, 0xD3, 0x93, 0x2E, 0x94, 0x31, 0x1E, 0x23, 0xE0, 0x7F, 0x49, 0xBD, 0x46, 0x46, 0x54, 0xE2, 0x7C, 0x8D, 0xE2, 0x54, 0x0C, 0x03, 0x78, 0x2C, 0xBA, 0x5E, 0x73, 0x35, 0x4F, 0x0A, 0x11, 0x21, 0x36, 0x74, 0x0B, 0xD8, 0x81, 0x1F, 0x56, 0x12, 0x0A, 0x80, 0xD4, 0x7D, 0x37, 0xC7, 0x69, 0xE1, 0x6D, 0x64, 0x1C, 0xD9, 0xF5, 0xA3, 0x5C, 0x35, 0x6C, 0x7A, 0xC6, 0x63, 0x3F, 0xDD, 0x8B, 0x46, 0x76, 0xC7, 0x57, 0x9D, 0xE7, 0x26, 0x92, 0xFE, 0x88, 0xB3, 0xB0, 0x77, 0xA9, 0xF5, 0x40, 0xE8, 0x2C, 0x9C, 0xFD, 0x51, 0xDF, 0x5D, 0xE0, 0xC8, 0x3F, 0x18, 0x27, 0xBB, 0xA5, 0x4E, 0xD2, 0xBD, 0xC1, 0xB5, 0xD8, 0x92, 0xE0, 0x7F, 0xB2, 0x3C, 0xE1, 0x41, 0x01, 0x71, 0xEE, 0xEC, 0x9B, 0x38, 0x28, 0x41, 0x10, 0xDA, 0x50, 0xDC, 0x4B, 0x4C, 0xAF, 0x00, 0xFF, 0x3A, 0x01, 0x75, 0xA6, 0x1C, 0xFD, 0x76, 0xA7, 0x0E, 0x85, 0xF4, 0x4B, 0x2D, 0x1B, 0x07, 0xEC, 0x9D, 0xE6, 0x4D, 0x46, 0x22, 0x52, 0xCB, 0xD5, 0xA6, 0x4F, 0x6E, 0x5F, 0xBA, 0x81, 0xA8, 0x9F, 0x64, 0x42, 0xB7, 0x09, 0xCA, 0x0F, 0x73, 0x71, 0x46, 0x4C, 0x63, 0xED, 0x60, 0xD3, 0xAA, 0x1F, 0xAC, 0xAC, 0x88, 0x30, 0xD3, 0x81, 0x3F, 0xD9, 0x9A, 0xFC, 0xA8, 0x09, 0x9B, 0x91, 0x91, 0x81, 0x53, 0xED, 0x11, 0x0D, 0xC0, 0xE4, 0x80, 0xF1, 0x8C, 0x34, 0x07, 0xC5, 0xF1, 0x7A, 0x39, 0x75, 0x68, 0xF7, 0x70, 0xD9, 0x93, 0x92, 0x4C, 0x3E, 0xF8, 0xDE, 0x91, 0x30, 0x67, 0xF0, 0xEB, 0xCF, 0x8C, 0xEC, 0xA8, 0x56, 0x98, 0xB5, 0x05, 0xE7, 0x09, 0x38, 0x77, 0xAE, 0x55, 0x46, 0x1C, 0x6B, 0x89, 0xED, 0xE8, 0x49, 0x77, 0xD5, 0x6D, 0x29, 0xB3, 0x57, 0xED, 0x12, 0x56, 0x73, 0x4E, 0x92, 0xF4, 0x64, 0x0E, 0x44, 0x48, 0x45, 0x8C, 0x2A, 0x14, 0x71, 0xBB, 0xE4, 0x8E, 0x54, 0xFC, 0xE5, 0xD6, 0xA9, 0xD2, 0xE0, 0xC3, 0x58, 0x52, 0xDD, 0xF9, 0x20, 0x80, 0x48, 0x0F, 0xE4, 0x43, 0x62, 0x9F, 0xF1].as_ref(),\n\n\t\t\tb\"yt1zvZIeuD_ygJZjFxO2yFQipeVApzJabkE_1Qsj3OMis7dZaNHeRDGj3yQbCI4XRNLqbiFy-wAvlMlZd5h43csfuZEy1jgWfrXGRZ5QuEFO0Z3om6uHnkMjpAp6V-41IaDMpsTrYcYxSyedvJofIBXI4XjU54k8F5ZbEf2kQSBNJifV3VQ6nhIXAT_DbGm53O-JSNEFT1YygwUFD4Ri7TBrXHeLipPQffkWljcVE8J9URkNf1UHhX6dCdC_SXR-qAHkSXxPOZr5-MDKtLg7kVimeZDjkthLaFdUyGan1j9PDw5C05MulDEeI-B_Sb1GRlTifI3iVAwDeCy6XnM1TwoRITZ0C9iBH1YSCoDUfTfHaeFtZBzZ9aNcNWx6xmM_3YtGdsdXnecmkv6Is7B3qfVA6Cyc_VHfXeDIPxgnu6VO0r3BtdiS4H-yPOFBAXHu7Js4KEEQ2lDcS0yvAP86AXWmHP12pw6F9EstGwfsneZNRiJSy9WmT25fuoGon2RCtwnKD3NxRkxj7WDTqh-srIgw04E_2Zr8qAmbkZGBU-0RDcDkgPGMNAfF8Xo5dWj3cNmTkkw--N6RMGfw68-M7KhWmLUF5wk4d65VRhxrie3oSXfVbSmzV-0SVnNOkvRkDkRIRYwqFHG75I5U_OXWqdLgw1hS3fkggEgP5ENin_E=\".as_ref()\n\n\t\t]\n\n ];\n\n\n\n for vec in vectors.iter() {\n\n assert_eq!(vec[0].base64().as_bytes(), vec[1]);\n\n assert_eq!(Vec::from_base64(vec[1]).unwrap(), vec[0]);\n\n }\n\n}\n\n\n", "file_path": "vault/tests/base64.rs", "rank": 69, "score": 56834.60246397934 }, { "content": "#[test]\n\nfn test_api() {\n\n for vec in ApiTestVector::load() {\n\n vec.test_encryption().test_decryption();\n\n }\n\n}\n", "file_path": "crypto/tests/xchacha.rs", "rank": 70, "score": 56834.60246397934 }, { "content": "#[test]\n\nfn test_crypto() {\n\n for vec in TestVector::load() {\n\n vec.test_mac();\n\n }\n\n}\n\n\n\n// API test vector\n\n#[derive(Default, Clone, Debug)]\n\npub struct ApiTestVector {\n\n id: String,\n\n key_len: usize,\n\n data_len: usize,\n\n buf_len: usize,\n\n error: String,\n\n}\n\n\n\nimpl ApiTestVector {\n\n // load json\n\n pub fn load() -> Vec<Self> {\n\n let json = json::parse(VECTORS).unwrap();\n", "file_path": "crypto/tests/poly.rs", "rank": 71, "score": 56834.60246397934 }, { "content": "#[test]\n\nfn testset_partial() {\n\n testset(\"partial\")\n\n}\n", "file_path": "vault/tests/preload.rs", "rank": 72, "score": 56834.60246397934 }, { "content": "fn main() {\n\n // prepare key and ids\n\n let key = Key::<Provider>::random().expect(\"failed to generate random key\");\n\n let ids: Vec<Id> = (0..Env::client_count())\n\n .map(|_| Id::random::<Provider>().expect(\"Failed to generate random ID\"))\n\n .collect();\n\n\n\n // print info.\n\n eprintln! {\n\n \"Spraying fuzz [{}: {}, {}: {}, {}: {}, {}: {}]...\",\n\n \"Number of Clients\", Env::client_count(),\n\n \"Error rate\", Env::error_rate(),\n\n \"Verification rate\", Env::verify_number(),\n\n \"Retry delay\", Env::retry_delay(),\n\n };\n\n\n\n // start fuzzing\n\n ids.iter().for_each(|id| Client::<Provider>::create_chain(&key, *id));\n\n\n\n loop {\n", "file_path": "vault/fuzz/src/main.rs", "rank": 73, "score": 56834.60246397934 }, { "content": "/// A PBKDF\n\npub trait Pbkdf {\n\n /// returns the info of the PBKDF\n\n fn info(&self) -> PbkdfInfo;\n\n /// fills the buffer with bytes derived from the password parameterized by the CPU cost.\n\n fn derive(\n\n &self,\n\n buf: &mut [u8],\n\n password: &[u8],\n\n salt: &[u8],\n\n cpu_cost: u64,\n\n ) -> Result<(), Box<dyn Error + 'static>>;\n\n}\n\n\n", "file_path": "primitives/src/pbkdf.rs", "rank": 74, "score": 56094.61214698265 }, { "content": "#[test]\n\nfn test_crypto() {\n\n for vec in TestVector::load() {\n\n vec.test_encryption().test_decryption();\n\n }\n\n}\n\n\n\n// API test vector\n\n#[derive(Default, Clone, Debug)]\n\npub struct ApiTestVector {\n\n id: String,\n\n key_len: usize,\n\n nonce_len: usize,\n\n enc_input_len: usize,\n\n enc_buf_len: usize,\n\n dec_input_len: usize,\n\n dec_buf_len: usize,\n\n error: String,\n\n}\n\nimpl ApiTestVector {\n\n // load json vectors\n", "file_path": "crypto/tests/chacha_ietf.rs", "rank": 75, "score": 55525.43070193347 }, { "content": "#[test]\n\nfn test_error() {\n\n for vec in ErrorVector::load() {\n\n vec.test_decryption();\n\n }\n\n}\n\n\n\n// api vector struct\n\n#[derive(Default, Clone, Debug)]\n\npub struct ApiTestVector {\n\n id: String,\n\n key_len: usize,\n\n nonce_len: usize,\n\n ad_len: usize,\n\n enc_input_len: usize,\n\n enc_buf_len: usize,\n\n dec_input_len: usize,\n\n dec_buf_len: usize,\n\n error: String,\n\n}\n\nimpl ApiTestVector {\n", "file_path": "crypto/tests/chachapoly_ietf.rs", "rank": 76, "score": 55525.43070193347 }, { "content": "#[test]\n\nfn test_api() {\n\n for vec in ApiTestVector::load() {\n\n vec.test_encryption().test_decryption();\n\n }\n\n}\n", "file_path": "crypto/tests/chacha_ietf.rs", "rank": 77, "score": 55525.43070193347 }, { "content": "#[test]\n\nfn test_api() {\n\n for vec in ApiTestVector::load() {\n\n vec.test_encryption().test_decryption();\n\n }\n\n}\n", "file_path": "crypto/tests/chachapoly_ietf.rs", "rank": 78, "score": 55525.43070193347 }, { "content": "#[test]\n\nfn test_crypto() {\n\n for vec in TestVector::load() {\n\n vec.test_encryption().test_decryption();\n\n }\n\n}\n\n\n\n// MAC error vector\n", "file_path": "crypto/tests/chachapoly_ietf.rs", "rank": 79, "score": 55525.43070193347 }, { "content": "/// a Random Number Generator\n\npub trait SecureRng {\n\n /// fills the buffer with secure random data. `buf` is the output buffer.\n\n fn random(&mut self, buf: &mut [u8]) -> Result<(), Box<dyn Error + 'static>>;\n\n}\n\n\n", "file_path": "primitives/src/rng.rs", "rank": 80, "score": 54775.56726901492 }, { "content": "/// a trait to make types base64 encodable\n\npub trait Base64Encodable {\n\n fn base64(&self) -> String;\n\n}\n\n\n", "file_path": "vault/src/base64.rs", "rank": 81, "score": 54775.56726901492 }, { "content": "// extension for JsonValue\n\npub trait JsonValueExt {\n\n // decode string\n\n fn check_string(&self) -> String;\n\n // hex-decode string into byte vector\n\n fn check_bytes(&self) -> Vec<u8>;\n\n // check if null\n\n fn check_array_iter(&self) -> Members;\n\n // get usize if not null\n\n fn option_usize(&self, def: usize) -> usize;\n\n // get string if not null\n\n fn option_string(&self, def: impl ToString) -> String;\n\n}\n\n\n\nimpl JsonValueExt for JsonValue {\n\n fn check_string(&self) -> String {\n\n self.as_str().unwrap().to_string()\n\n }\n\n\n\n fn check_bytes(&self) -> Vec<u8> {\n\n let encode = self.as_str().unwrap();\n", "file_path": "crypto/tests/common.rs", "rank": 82, "score": 53559.086017904934 }, { "content": "/// a typed transaction\n\npub trait TypedTransaction {\n\n fn type_id() -> Val;\n\n}\n\n\n\n/// a revocation transaction\n\n#[repr(packed)]\n\n#[derive(Debug)]\n\npub struct RevocationTransaction {\n\n /// transaction type\n\n #[allow(unused)]\n\n pub type_id: Val,\n\n /// owner id\n\n #[allow(unused)]\n\n pub owner: Id,\n\n /// counter\n\n #[allow(unused)]\n\n pub ctr: Val,\n\n /// unique id for transaction\n\n pub id: Id,\n\n}\n", "file_path": "vault/src/types/transactions.rs", "rank": 83, "score": 53559.086017904934 }, { "content": "#[test]\n\nfn test_actor_system() {\n\n let key = Key::<Provider>::random().expect(\"Couldn't create key\");\n\n let sys = ActorSystem::new().unwrap();\n\n\n\n let client = sys\n\n .actor_of_args::<Client, _>(\"client\", Id::random::<Provider>().unwrap())\n\n .unwrap();\n\n\n\n sys.actor_of::<Blob<Provider>>(\"blob\").unwrap();\n\n\n\n client.tell(CMsg::AddVault(key.clone()), None);\n\n client.tell(CMsg::CreateRecord((key.clone(), b\"Some data\".to_vec())), None);\n\n\n\n std::thread::sleep(std::time::Duration::from_millis(500));\n\n client.tell(CMsg::ListRecords(key), None);\n\n\n\n std::thread::sleep(std::time::Duration::from_millis(500));\n\n}\n", "file_path": "client/src/actor_test_client.rs", "rank": 84, "score": 53201.046432094416 }, { "content": "/// an AEAD Extension for the Cipher\n\npub trait AeadCipher: Cipher {\n\n /// Seals the Plaintext bytes in place with AEAD and returns the Cipher length. `buf` contains the incoming\n\n /// plaintext buffer\n\n fn seal(\n\n &self,\n\n buf: &mut [u8],\n\n plain_len: usize,\n\n ad: &[u8],\n\n key: &[u8],\n\n nonce: &[u8],\n\n ) -> Result<usize, Box<dyn Error + 'static>>;\n\n\n\n /// Seals the plaintext and returns the Cipher's length using AEAD. `buf` contains the incoming plaintext\n\n /// buffer\n\n fn seal_with(\n\n &self,\n\n buf: &mut [u8],\n\n plain: &[u8],\n\n ad: &[u8],\n\n key: &[u8],\n", "file_path": "primitives/src/cipher.rs", "rank": 85, "score": 51442.50367149479 }, { "content": "pub trait SerializeSecret: Serialize {}\n\n\n\npub struct Secret<S>\n\nwhere\n\n S: Zeroize,\n\n{\n\n value: S,\n\n}\n\n\n\nimpl<S> Secret<S>\n\nwhere\n\n S: Zeroize,\n\n{\n\n pub fn new(value: S) -> Self {\n\n Self { value }\n\n }\n\n}\n\n\n\nimpl<S> ReadSecret<S> for Secret<S>\n\nwhere\n", "file_path": "client/src/secret.rs", "rank": 86, "score": 51442.50367149479 }, { "content": "pub trait ReadSecret<S>\n\nwhere\n\n S: Zeroize,\n\n{\n\n fn read_secret(&self) -> &S;\n\n}\n\n\n", "file_path": "client/src/secret.rs", "rank": 87, "score": 51442.50367149479 }, { "content": "/// A memory hardened PBKDF\n\npub trait StatelessPbkdf: Pbkdf {\n\n /// fills the buffer with bytes derived from the password parameterized by the CPU cost.\n\n fn derive_stateless(\n\n &self,\n\n buf: &mut [u8],\n\n password: &[u8],\n\n salt: &[u8],\n\n cpu_cost: u64,\n\n memory_cost: u64,\n\n parallelism: u64,\n\n ) -> Result<(), Box<dyn Error + 'static>>;\n\n}\n", "file_path": "primitives/src/pbkdf.rs", "rank": 88, "score": 51442.50367149479 }, { "content": "fn testset(set: &str) {\n\n let vault = TestVault::from_json(DATA, set);\n\n let view = vault::DBView::load(vault.key().clone(), vault.list()).unwrap();\n\n let records: Vec<_> = view.records().collect();\n\n\n\n let reader = view.reader();\n\n let existing: HashMap<_, _> = records\n\n .into_iter()\n\n .map(|(id, hint)| (reader.prepare_read(id).unwrap(), hint))\n\n .map(|(req, hint)| (vault.read(req).unwrap(), hint))\n\n .map(|(res, hint)| (hint, reader.read(res).unwrap()))\n\n .collect();\n\n\n\n let plain = PlainVault::from_json(DATA, set);\n\n\n\n assert_eq!(existing, plain.records);\n\n}\n\n\n", "file_path": "vault/tests/preload.rs", "rank": 89, "score": 51114.89436867937 }, { "content": "/// A deterministic Random Number Generator Extension\n\npub trait DeterministicRng: SecureRng {\n\n /// reseeds the random number generator with a seed.\n\n fn reseed(&mut self, seed: &[u8]) -> Result<(), Box<dyn Error + 'static>>;\n\n}\n\n\n", "file_path": "primitives/src/rng.rs", "rank": 90, "score": 50317.07131398195 }, { "content": "// result extension\n\npub trait ResultExt<T, E> {\n\n // unwraps error and panics\n\n fn error_or(self, msg: impl ToString) -> E;\n\n}\n\n\n\nimpl<T, E> ResultExt<T, E> for Result<T, E> {\n\n fn error_or(self, msg: impl ToString) -> E {\n\n match self {\n\n Err(e) => e,\n\n _ => panic!(msg.to_string()),\n\n }\n\n }\n\n}\n", "file_path": "crypto/tests/common.rs", "rank": 91, "score": 48550.44951008535 }, { "content": "pub trait CloneSecret: Clone + Zeroize {}\n\n\n", "file_path": "client/src/secret.rs", "rank": 92, "score": 48550.44951008535 }, { "content": "#[cfg(any(target_os = \"macos\", target_os = \"ios\"))]\n\nfn macos_secrandom() -> Option<&'static str> {\n\n println!(\"cargo:rustc-link-lib=framework=Security\");\n\n Some(\"USE_SECRANDOM\")\n\n}\n\n\n\n// checks if the current version of glibc supports the getrandom function\n", "file_path": "random/build.rs", "rank": 93, "score": 48249.18645987897 }, { "content": "#[cfg(target_os = \"linux\")]\n\nfn linux_check_getrandom() -> Option<&'static str> {\n\n use std::{ffi::CStr, os::raw::c_char, str::FromStr};\n\n extern \"C\" {\n\n fn gnu_get_libc_version() -> *const c_char;\n\n }\n\n\n\n let v: Vec<u8> = unsafe { CStr::from_ptr(gnu_get_libc_version()) }\n\n .to_str()\n\n .unwrap()\n\n .split('.')\n\n .map(|s| u8::from_str(s).unwrap())\n\n .collect();\n\n\n\n match (v[0], v[1]) {\n\n (2..=255, 25..=255) => Some(\"USE_GETRANDOM\"),\n\n _ => Some(\"USE_DEV_RANDOM\"),\n\n }\n\n}\n\n\n", "file_path": "random/build.rs", "rank": 94, "score": 47132.17801903001 }, { "content": "/// an extension for a Variable length Message Authentication Code (MAC).\n\npub trait VarLenMessageAuthCode: MessageAuthCode {\n\n /// Authenticates the `data` using a `key` through the `buf` buffer. Returns the MAC's length in a `Result`.\n\n fn varlen_auth(&self, buf: &mut [u8], data: &[u8], key: &[u8]) -> Result<usize, Box<dyn Error + 'static>>;\n\n}\n", "file_path": "primitives/src/auth.rs", "rank": 95, "score": 46548.00959997333 }, { "content": "// send a message\n\nfn send(req: TransactionRequest) -> Option<TransactionResult> {\n\n // should request fail or not\n\n if CRng::bool(Env::error_rate()) {\n\n None?\n\n }\n\n\n\n let res = match req {\n\n TransactionRequest::List => {\n\n let records = Env::storage()\n\n .read()\n\n .expect(line_error!())\n\n .keys()\n\n .cloned()\n\n .collect();\n\n\n\n TransactionResult::List(ListResult::new(records))\n\n }\n\n TransactionRequest::Write(write) => {\n\n Env::storage()\n\n .write()\n", "file_path": "vault/fuzz/src/connection.rs", "rank": 96, "score": 42827.06625039052 }, { "content": "pub trait Bucket<P: BoxProvider + Send + Sync + Clone + 'static> {\n\n fn create_record(&mut self, uid: Id, key: Key<P>, payload: Vec<u8>) -> Option<Id>;\n\n fn add_vault(&mut self, key: &Key<P>, uid: Id);\n\n fn read_record(&mut self, uid: Id, key: Key<P>);\n\n fn garbage_collect(&mut self, uid: Id, key: Key<P>);\n\n fn revoke_record(&mut self, uid: Id, tx_id: Id, key: Key<P>);\n\n fn list_all_valid_by_key(&mut self, key: Key<P>);\n\n fn offload_data(self) -> (Vec<Key<P>>, HashMap<Vec<u8>, Vec<u8>>);\n\n}\n\n\n\nimpl<P: BoxProvider + Clone + Send + Sync + 'static> Blob<P> {\n\n pub fn new() -> Self {\n\n let cache = Cache::new();\n\n let vaults = DashMap::new();\n\n\n\n Self { cache, vaults }\n\n }\n\n\n\n pub fn new_from_snapshot(snapshot: Snapshot<P>) -> Self {\n\n let cache = Cache::new();\n", "file_path": "client/src/data.rs", "rank": 97, "score": 39909.99084030378 }, { "content": "// Copyright 2020 IOTA Stiftung\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except in compliance with\n\n// the License. You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on\n\n// an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and limitations under the License.\n\n\n\nuse crate::rng::{PublicKeyGen, SecretKeyGen};\n\nuse std::{error::Error, ops::Range};\n\n\n\n/// Signature information block\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub struct SignInfo {\n\n /// the signature ID\n\n pub id: &'static str,\n\n /// Range of supported signature lengths\n\n pub sig_lens: Range<usize>,\n\n /// Range of supported secret/private key lengths\n\n pub secret_key_lens: Range<usize>,\n\n /// Range of supported public key lengths.\n\n pub public_key_lens: Range<usize>,\n\n}\n\n\n", "file_path": "primitives/src/signing.rs", "rank": 98, "score": 36294.33798987492 }, { "content": "// Copyright 2020 IOTA Stiftung\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except in compliance with\n\n// the License. You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on\n\n// an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and limitations under the License.\n\n\n\nuse std::{error::Error, ops::Range};\n\n\n\n/// An information block describing a Hash.\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub struct HashInfo {\n\n /// A id of hash\n\n pub id: &'static str,\n\n /// The hash's length\n\n pub hash_len: usize,\n\n /// A range for supported hash lengths\n\n pub hash_lens: Range<usize>,\n\n}\n\n\n\n/// A Hash interface\n", "file_path": "primitives/src/hash.rs", "rank": 99, "score": 36239.88557758902 } ]
Rust
aoc2020/src/main.rs
kylewillmon/advent-of-code-rs
f6d37627eabe4b39f87329159b11aedf59362b73
use std::io::{self, Read}; use std::fs; use clap::{App, Arg}; use aoclib::{self, AOC, Day}; pub(crate) mod parse; pub(crate) mod error; mod day1; mod day2; mod day3; mod day4; mod day6; mod day7; mod day8; mod day9; mod day10; mod day11; mod day12; mod day13; mod day14; mod day15; mod day16; mod day17; mod day18; mod day19; mod day20; mod day21; mod day22; mod day23; mod day24; mod day25; fn main() { let m = App::new("Advent of Code 2020 solvers") .author("Kyle Willmon <[email protected]>") .arg(Arg::from_usage("<INPUT> 'Sets the input file to use'")) .arg(Arg::with_name("day") .short("d") .long("day") .takes_value(true) .help("day to solve")) .get_matches(); let day = match m.value_of("day") { None => None, Some(val) => match val.parse::<u8>() { Ok(val) => Some(val), Err(err) => { println!("Invalid day {:?}: {}", val, err); return; } } }; let aoc = AOC::new() .day(Day::new(1) .part(1, day1::part1) .part(2, day1::part2)) .day(Day::new(2) .part(1, day2::part1) .part(2, day2::part2)) .day(Day::new(3) .part(1, day3::part1) .part(2, day3::part2)) .day(Day::new(4) .part(1, day4::part1) .part(2, day4::part2)) .day(Day::new(6) .part(1, day6::part1) .part(2, day6::part2)) .day(Day::new(7) .part(1, day7::part1) .part(2, day7::part2)) .day(Day::new(8) .part(1, day8::part1) .part(2, day8::part2)) .day(Day::new(9) .part(1, day9::part1) .part(2, day9::part2)) .day(Day::new(10) .part(1, day10::part1) .part(2, day10::part2)) .day(Day::new(11) .part(1, day11::part1) .part(2, day11::part2)) .day(Day::new(12) .part(1, day12::part1) .part(2, day12::part2)) .day(Day::new(13) .part(1, day13::part1) .part(2, day13::part2)) .day(Day::new(14) .part(1, day14::part1) .part(2, day14::part2)) .day(Day::new(15) .part(1, day15::part1) .part(2, day15::part2)) .day(Day::new(16) .part(1, day16::part1) .part(2, day16::part2)) .day(Day::new(17) .part(1, day17::part1) .part(2, day17::part2)) .day(Day::new(18) .part(1, day18::part1) .part(2, day18::part2)) .day(Day::new(19) .part(1, day19::part1) .part(2, day19::part2)) .day(Day::new(20) .part(1, day20::part1) .part(2, day20::part2)) .day(Day::new(21) .part(1, day21::part1) .part(2, day21::part2)) .day(Day::new(22) .part(1, day22::part1) .part(2, day22::part2)) .day(Day::new(23) .part(1, day23::part1) .part(2, day23::part2)) .day(Day::new(24) .part(1, day24::part1) .part(2, day24::part2)) .day(Day::new(25) .part(1, day25::part1) .part(2, day25::part2)); match get_input(m.value_of("INPUT").unwrap()) { Ok(input) => print!("{}", aoc.run(day, input)), Err(err) => println!("Error: {}", err), }; } fn get_input<P: AsRef<str>>(filename: P) -> io::Result<String> { if filename.as_ref() == "-" { let mut data = String::new(); return io::stdin().read_to_string(&mut data).map(move |_| data); } fs::read_to_string(filename.as_ref()) }
use std::io::{self, Read}; use std::fs; use clap::{App, Arg}; use aoclib::{self, AOC, Day}; pub(crate) mod parse; pub(crate) mod error; mod day1; mod day2; mod day3; mod day4; mod day6; mod day7; mod day8; mod day9; mod day10; mod day11; mod day12; mod day13; mod day14; mod day15; mod day16; mod day17; mod day18; mod day19; mod day20; mod day21; mod day22; mod day23; mod day24; mod day25; fn main() { let m = App::new("Advent of Code 2020 solvers") .author("Kyle Willmon <[email protected]>") .arg(Arg::from_usage("<INPUT> 'Sets the input file to use'")) .arg(Arg::with_name("day") .short("d") .long("day") .takes_value(true) .help("day to solve")) .get_matches(); let day = match m.value_of("day") { None => None, Some(val) => match val.parse::<u8>() { Ok(val) => Some(val), Err(err) => { println!("Invalid day {:?}: {}", val, err); return; } } }; let aoc = AOC::new() .day(Day::new(1) .part(1, day1::part1) .part(2, day1::part2)) .day(Day::new(2) .part(1, day2::part1) .part(2, day2::part2)) .day(Day::new(3) .part(1, day3::part1) .part(2, day3::part2)) .day(Day::new(4) .part(1, day4::part1) .part(2, day4::part2)) .day(Day::new(6) .part(1, day6::part1) .part(2, day6::part2)) .day(Day::new(7) .part(1, day7::part1) .part(2, day7::part2)) .day(Day::n
et_input<P: AsRef<str>>(filename: P) -> io::Result<String> { if filename.as_ref() == "-" { let mut data = String::new(); return io::stdin().read_to_string(&mut data).map(move |_| data); } fs::read_to_string(filename.as_ref()) }
ew(8) .part(1, day8::part1) .part(2, day8::part2)) .day(Day::new(9) .part(1, day9::part1) .part(2, day9::part2)) .day(Day::new(10) .part(1, day10::part1) .part(2, day10::part2)) .day(Day::new(11) .part(1, day11::part1) .part(2, day11::part2)) .day(Day::new(12) .part(1, day12::part1) .part(2, day12::part2)) .day(Day::new(13) .part(1, day13::part1) .part(2, day13::part2)) .day(Day::new(14) .part(1, day14::part1) .part(2, day14::part2)) .day(Day::new(15) .part(1, day15::part1) .part(2, day15::part2)) .day(Day::new(16) .part(1, day16::part1) .part(2, day16::part2)) .day(Day::new(17) .part(1, day17::part1) .part(2, day17::part2)) .day(Day::new(18) .part(1, day18::part1) .part(2, day18::part2)) .day(Day::new(19) .part(1, day19::part1) .part(2, day19::part2)) .day(Day::new(20) .part(1, day20::part1) .part(2, day20::part2)) .day(Day::new(21) .part(1, day21::part1) .part(2, day21::part2)) .day(Day::new(22) .part(1, day22::part1) .part(2, day22::part2)) .day(Day::new(23) .part(1, day23::part1) .part(2, day23::part2)) .day(Day::new(24) .part(1, day24::part1) .part(2, day24::part2)) .day(Day::new(25) .part(1, day25::part1) .part(2, day25::part2)); match get_input(m.value_of("INPUT").unwrap()) { Ok(input) => print!("{}", aoc.run(day, input)), Err(err) => println!("Error: {}", err), }; } fn g
random
[ { "content": "pub fn part2(input: String) -> Result<usize, AocError> {\n\n let mut total = 0;\n\n for entry in input.split(\"\\n\\n\") {\n\n let cf = entry.parse::<CustomsForm>()?;\n\n total += cf.everyone_yes_count();\n\n }\n\n Ok(total)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n const EXAMPLE: &str = \"abc\n\n\n\na\n\nb\n\nc\n\n\n\nab\n", "file_path": "aoc2020/src/day6.rs", "rank": 0, "score": 262418.25017625897 }, { "content": "pub fn part1(input: String) -> Result<usize, AocError> {\n\n let mut total = 0;\n\n for entry in input.split(\"\\n\\n\") {\n\n let cf = entry.parse::<CustomsForm>()?;\n\n total += cf.anyone_yes_count();\n\n }\n\n Ok(total)\n\n}\n\n\n", "file_path": "aoc2020/src/day6.rs", "rank": 1, "score": 262418.250176259 }, { "content": "pub fn part1(input: String) -> Result<i32, AocError> {\n\n if let RunResult::InfiniteLoop(acc) = input.parse::<Program>()?.run() {\n\n return Ok(acc);\n\n }\n\n Err(AocError::Unknown)\n\n}\n\n\n", "file_path": "aoc2020/src/day8.rs", "rank": 2, "score": 262379.7160532317 }, { "content": "pub fn part2(input: String) -> Result<i32, AocError> {\n\n let mut prog = input.parse::<Program>()?;\n\n\n\n for i in 0..prog.0.len() {\n\n let orig = prog.0[i];\n\n let fixed = match orig {\n\n Instruction::Nop(v) => Instruction::Jmp(v),\n\n Instruction::Jmp(v) => Instruction::Nop(v),\n\n _ => continue,\n\n };\n\n prog.0[i] = fixed;\n\n if let RunResult::Terminated(acc) = prog.run() {\n\n return Ok(acc);\n\n }\n\n prog.0[i] = orig;\n\n }\n\n Err(AocError::Unknown)\n\n}\n\n\n", "file_path": "aoc2020/src/day8.rs", "rank": 3, "score": 262379.7160532317 }, { "content": "pub fn part2(input: String) -> Result<u32, AocError> {\n\n let bags = parse_input(input)?;\n\n Ok(bags.calc_cost(bags.find_bag(\"shiny gold\".to_string()).unwrap()))\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n const EXAMPLE: &str = \"light red bags contain 1 bright white bag, 2 muted yellow bags.\n\ndark orange bags contain 3 bright white bags, 4 muted yellow bags.\n\nbright white bags contain 1 shiny gold bag.\n\nmuted yellow bags contain 2 shiny gold bags, 9 faded blue bags.\n\nshiny gold bags contain 1 dark olive bag, 2 vibrant plum bags.\n\ndark olive bags contain 3 faded blue bags, 4 dotted black bags.\n\nvibrant plum bags contain 5 faded blue bags, 6 dotted black bags.\n\nfaded blue bags contain no other bags.\n\ndotted black bags contain no other bags.\";\n\n\n\n #[test]\n", "file_path": "aoc2020/src/day7.rs", "rank": 4, "score": 262302.18007212866 }, { "content": "pub fn part1(input: String) -> Result<usize, AocError> {\n\n let bags = parse_input(input)?;\n\n Ok(bags\n\n .can_contain(bags.find_bag(\"shiny gold\".to_string()).unwrap())\n\n .len())\n\n}\n\n\n", "file_path": "aoc2020/src/day7.rs", "rank": 5, "score": 262302.18007212866 }, { "content": "fn parse_input(input: String) -> Result<BagGraph, AocError> {\n\n let mut bags = BagGraph::new();\n\n let mut rules = Vec::new();\n\n let mut index: HashMap<String, BagIdx> = HashMap::new();\n\n\n\n for line in input.lines() {\n\n let mut split = line.splitn(2, \" bags contain \");\n\n let bagname = split.next().unwrap();\n\n let rule = split\n\n .next()\n\n .ok_or_else(|| AocError::ParseError(\"invalid line\".to_string()))?;\n\n\n\n let idx = bags.add_bag(bagname.to_string());\n\n index.insert(bagname.to_string(), idx);\n\n rules.push((idx, rule));\n\n }\n\n\n\n for (idx, rule) in rules {\n\n let contains = rule\n\n .trim_end_matches('.')\n", "file_path": "aoc2020/src/day7.rs", "rank": 6, "score": 210283.88154894454 }, { "content": "pub fn part1(input: String) -> Result<u32> {\n\n let (time, buses) = strtools::split_once(input.as_str(), \"\\n\");\n\n let time = time.parse::<u32>()?;\n\n let buses: Vec<u32> = buses.trim()\n\n .split(',')\n\n .filter(|s| *s != \"x\")\n\n .map(|s| s.parse::<u32>())\n\n .collect::<Result<_, _>>()?;\n\n\n\n let (id, wait) = buses\n\n .into_iter()\n\n .map(|id| (id, wait_time(time, id)))\n\n .min_by_key(|&(_, wait)| wait)\n\n .unwrap();\n\n\n\n Ok(id * wait)\n\n}\n\n\n", "file_path": "aoc2020/src/day13.rs", "rank": 7, "score": 206635.8727957887 }, { "content": "pub fn part2(input: String) -> Result<usize> {\n\n let mut init: Vec<usize> = input.trim()\n\n .split(',')\n\n .map(|n| n.parse::<usize>())\n\n .collect::<Result<_, _>>()?;\n\n\n\n let mut position_map = HashMap::new();\n\n let mut last = init.pop().unwrap();\n\n\n\n for (idx, num) in init.iter().enumerate() {\n\n position_map.insert(*num, idx);\n\n }\n\n\n\n for position in init.len()..29_999_999 {\n\n let last_pos = position_map.get(&last);\n\n\n\n let next = match last_pos {\n\n Some(last_pos) => position - last_pos,\n\n None => 0,\n\n };\n", "file_path": "aoc2020/src/day15.rs", "rank": 8, "score": 206615.7145807272 }, { "content": "pub fn part1(input: String) -> Result<usize> {\n\n let mut nums: Vec<usize> = input.trim()\n\n .split(',')\n\n .map(|n| n.parse::<usize>())\n\n .collect::<Result<_, _>>()?;\n\n\n\n while nums.len() < 2020 {\n\n let last = *nums.last().unwrap();\n\n let pos = nums.iter().rev()\n\n .skip(1)\n\n .position(|&n| n == last);\n\n\n\n let next = match pos {\n\n Some(val) => val + 1,\n\n None => 0,\n\n };\n\n nums.push(next);\n\n }\n\n\n\n Ok(*nums.last().unwrap())\n\n}\n\n\n", "file_path": "aoc2020/src/day15.rs", "rank": 9, "score": 206615.7145807272 }, { "content": "pub fn part1(input: String) -> Result<usize> {\n\n let recipes: Vec<Recipe<'_>> = input.lines()\n\n .map(Recipe::from_line)\n\n .collect();\n\n\n\n let all_allergens: HashSet<&str> = recipes.iter()\n\n .flat_map(|r| r.allergens().iter().cloned())\n\n .collect();\n\n\n\n let mut ingredients: HashMap<&str, HashSet<&str>> = recipes.iter()\n\n .flat_map(|r| r.ingredients().iter().cloned())\n\n .map(|i| (i, all_allergens.clone()))\n\n .collect();\n\n\n\n for r in recipes.iter() {\n\n for a in r.allergens().iter().cloned() {\n\n for (i, possibles) in ingredients.iter_mut() {\n\n if !r.ingredients().contains(i) {\n\n possibles.remove(a);\n\n }\n", "file_path": "aoc2020/src/day21.rs", "rank": 10, "score": 206575.51033530477 }, { "content": "pub fn part2(input: String) -> Result<String> {\n\n let recipes: Vec<Recipe<'_>> = input.lines()\n\n .map(Recipe::from_line)\n\n .collect();\n\n\n\n let all_allergens: HashSet<&str> = recipes.iter()\n\n .flat_map(|r| r.allergens().iter().cloned())\n\n .collect();\n\n\n\n let mut ingredients: HashMap<&str, HashSet<&str>> = recipes.iter()\n\n .flat_map(|r| r.ingredients().iter().cloned())\n\n .map(|i| (i, all_allergens.clone()))\n\n .collect();\n\n\n\n for r in recipes.iter() {\n\n for a in r.allergens().iter().cloned() {\n\n for (i, possibles) in ingredients.iter_mut() {\n\n if !r.ingredients().contains(i) {\n\n possibles.remove(a);\n\n }\n", "file_path": "aoc2020/src/day21.rs", "rank": 11, "score": 206575.51033530477 }, { "content": "pub fn part2(input: String) -> Result<usize> {\n\n let adapters = {\n\n let mut a = to_nums(input);\n\n a.sort_unstable();\n\n a.reverse();\n\n\n\n a.push(0);\n\n a\n\n };\n\n\n\n // Number of ways to connect to joltages: n, n+1, n+2, and n+3\n\n let mut ways: [usize; 4] = [1, 0, 0, 0];\n\n\n\n // Start with device joltage\n\n let mut joltage = adapters[0] + 3;\n\n\n\n for adapter in adapters {\n\n let difference: usize = usize::try_from(joltage - adapter)?;\n\n\n\n ways.rotate_right(difference);\n", "file_path": "aoc2020/src/day10.rs", "rank": 12, "score": 206575.51033530477 }, { "content": "pub fn part1(input: String) -> Result<usize> {\n\n let adapters = {\n\n let mut a = to_nums(input);\n\n a.sort_unstable();\n\n a\n\n };\n\n\n\n let mut joltage = 0;\n\n let mut ones = 0;\n\n let mut threes = 0;\n\n for adapter in adapters {\n\n let difference = adapter - joltage;\n\n if difference == 1 {\n\n ones += 1;\n\n } else if difference == 3 {\n\n threes += 1;\n\n }\n\n joltage = adapter;\n\n }\n\n\n\n // The final difference to device joltage is always three\n\n threes += 1;\n\n\n\n Ok(ones * threes)\n\n}\n\n\n", "file_path": "aoc2020/src/day10.rs", "rank": 13, "score": 206575.51033530477 }, { "content": "pub fn part2(input: String) -> Result<usize> {\n\n let (rules, messages) = strtools::split_once(input.as_str(), \"\\n\\n\");\n\n let rule42 = build_regex(rules, \"42\");\n\n let rule42 = Regex::new(format!(\"^{}\", rule42).as_str()).unwrap();\n\n\n\n let rule31 = build_regex(rules, \"31\");\n\n let rule31 = Regex::new(format!(\"^{}\", rule31).as_str()).unwrap();\n\n\n\n Ok(messages.trim().lines()\n\n .filter(|msg| {\n\n let mut msg = *msg;\n\n let mut count = 0i64;\n\n let mut match31 = false;\n\n while let Some(_match) = rule42.find(msg) {\n\n msg = &msg[_match.end()..];\n\n count += 1;\n\n }\n\n while let Some(_match) = rule31.find(msg) {\n\n match31 = true;\n\n msg = &msg[_match.end()..];\n\n count -= 1;\n\n }\n\n msg.is_empty() && match31 && count > 0\n\n })\n\n .count()\n\n )\n\n}\n\n\n", "file_path": "aoc2020/src/day19.rs", "rank": 14, "score": 206555.3895129 }, { "content": "pub fn part1(input: String) -> Result<usize> {\n\n let (rules, messages) = strtools::split_once(input.as_str(), \"\\n\\n\");\n\n let rule = build_regex(rules, \"0\");\n\n let rule = Regex::new(format!(\"^{}$\", rule).as_str()).unwrap();\n\n\n\n Ok(messages.trim().lines()\n\n .filter(|&msg| rule.is_match(msg))\n\n .count()\n\n )\n\n}\n\n\n", "file_path": "aoc2020/src/day19.rs", "rank": 15, "score": 206555.3895129 }, { "content": "pub fn part2(input: String) -> Result<i32> {\n\n let mut ferry = Ferry::new();\n\n\n\n for instruction in input.lines() {\n\n let (action, value) = instruction.split_at(1);\n\n let action = action.chars().next().unwrap();\n\n let value = value.parse::<i32>()?;\n\n\n\n ferry = match action {\n\n 'L' => ferry.turn_waypoint(value),\n\n 'R' => ferry.turn_waypoint(-value),\n\n 'F' => ferry.go_to_waypoint(value),\n\n 'N' => ferry.move_waypoint(NORTH * value),\n\n 'E' => ferry.move_waypoint(EAST * value),\n\n 'S' => ferry.move_waypoint(SOUTH * value),\n\n 'W' => ferry.move_waypoint(WEST * value),\n\n _ => return Err(anyhow!(\"unknown action '{}'\", action)),\n\n };\n\n }\n\n\n\n let pos = ferry.position;\n\n Ok(pos.x.abs() + pos.y.abs())\n\n}\n\n\n\nconst NORTH: Coord = Coord::new(0, 1);\n\nconst EAST: Coord = Coord::new(1, 0);\n\nconst SOUTH: Coord = Coord::new(0, -1);\n\nconst WEST: Coord = Coord::new(-1, 0);\n\n\n", "file_path": "aoc2020/src/day12.rs", "rank": 16, "score": 206515.02897703752 }, { "content": "pub fn part2(input: String) -> Result<u64> {\n\n let mut computer = Computer::new();\n\n for line in input.trim().lines() {\n\n let line = line.trim();\n\n\n\n let (target, value) = strtools::split_once(line, \"=\");\n\n let target = target.trim();\n\n let value = value.trim();\n\n\n\n if target == \"mask\" {\n\n computer = computer.set_mask(value)?;\n\n } else {\n\n let memtarget = target\n\n .strip_prefix(\"mem[\")\n\n .and_then(|t| t.strip_suffix(']'))\n\n .ok_or_else(|| anyhow!(\"invalid target: {}\", target))\n\n .and_then(|l| Ok(l.parse::<u64>()?))?;\n\n\n\n let value = value.parse::<u64>()?;\n\n computer = computer.write_decoded(memtarget, value);\n\n }\n\n }\n\n Ok(computer.mem_iter().map(|(&_loc, &value)| value).sum())\n\n}\n\n\n", "file_path": "aoc2020/src/day14.rs", "rank": 17, "score": 206515.02897703752 }, { "content": "pub fn part1(input: String) -> Result<u64> {\n\n let mut computer = Computer::new();\n\n for line in input.trim().lines() {\n\n let line = line.trim();\n\n\n\n let (target, value) = strtools::split_once(line, \"=\");\n\n let target = target.trim();\n\n let value = value.trim();\n\n\n\n if target == \"mask\" {\n\n computer = computer.set_mask(value)?;\n\n } else {\n\n let memtarget = target\n\n .strip_prefix(\"mem[\")\n\n .and_then(|t| t.strip_suffix(']'))\n\n .ok_or_else(|| anyhow!(\"invalid target: {}\", target))\n\n .and_then(|l| Ok(l.parse::<u64>()?))?;\n\n\n\n let value = value.parse::<u64>()?;\n\n computer = computer.write_masked(memtarget, value);\n\n }\n\n }\n\n Ok(computer.mem_iter().map(|(&_loc, &value)| value).sum())\n\n}\n\n\n", "file_path": "aoc2020/src/day14.rs", "rank": 18, "score": 206515.02897703752 }, { "content": "pub fn part1(input: String) -> Result<i32> {\n\n let mut ferry = Ferry::new();\n\n\n\n for instruction in input.lines() {\n\n let (action, value) = instruction.split_at(1);\n\n let action = action.chars().next().unwrap();\n\n let value = value.parse::<i32>()?;\n\n\n\n ferry = match action {\n\n 'L' => ferry.turn_ferry(value),\n\n 'R' => ferry.turn_ferry(-value),\n\n 'F' => ferry.forward(value),\n\n 'N' => ferry.move_ferry(NORTH * value),\n\n 'E' => ferry.move_ferry(EAST * value),\n\n 'S' => ferry.move_ferry(SOUTH * value),\n\n 'W' => ferry.move_ferry(WEST * value),\n\n _ => return Err(anyhow!(\"unknown action '{}'\", action)),\n\n };\n\n }\n\n\n\n let pos = ferry.position;\n\n Ok(pos.x.abs() + pos.y.abs())\n\n}\n\n\n", "file_path": "aoc2020/src/day12.rs", "rank": 19, "score": 206515.02897703752 }, { "content": "pub fn part2(input: String) -> Result<usize> {\n\n let game = Combat::from_input(input.as_str())?.recursive();\n\n\n\n let (game, _) = game.play_to_end();\n\n\n\n let w = game.0.winners_cards();\n\n\n\n Ok(\n\n w.into_iter().rev().enumerate()\n\n .map(|(i, c)| c * (i + 1))\n\n .sum()\n\n )\n\n}\n\n\n", "file_path": "aoc2020/src/day22.rs", "rank": 20, "score": 206494.77317107125 }, { "content": "pub fn part2(input: String) -> Result<u64> {\n\n part2_with_preamble_len(25, input)\n\n}\n\n\n", "file_path": "aoc2020/src/day9.rs", "rank": 21, "score": 206494.77317107125 }, { "content": "pub fn part2(input: String) -> Result<usize> {\n\n let (wire1, wire2) = {\n\n let mut split = input.lines();\n\n split.next()\n\n .and_then(|a| {\n\n split.next().map(|b| (a, b))\n\n })\n\n .ok_or(anyhow!(\"input too short\"))?\n\n };\n\n\n\n let points1: HashMap<(i32, i32), usize> = wire_points(wire1).enumerate().map(|(a, b)| (b, a)).collect();\n\n\n\n let mut min = usize::MAX;\n\n\n\n for (i, p) in wire_points(wire2).enumerate() {\n\n if let Some(steps) = points1.get(&p) {\n\n let distance = steps + i + 2;\n\n if distance < min {\n\n min = distance;\n\n }\n\n }\n\n }\n\n Ok(min)\n\n}\n\n\n", "file_path": "aoc2019/src/day3.rs", "rank": 22, "score": 206494.77317107125 }, { "content": "pub fn part1(input: String) -> Result<i32> {\n\n let (wire1, wire2) = {\n\n let mut split = input.lines();\n\n split.next()\n\n .and_then(|a| {\n\n split.next().map(|b| (a, b))\n\n })\n\n .ok_or(anyhow!(\"input too short\"))?\n\n };\n\n\n\n let points1: HashSet<(i32, i32)> = wire_points(wire1).collect();\n\n let points2: HashSet<(i32, i32)> = wire_points(wire2).collect();\n\n\n\n points1.intersection(&points2)\n\n .map(|&(x, y)| x.abs() + y.abs())\n\n .min()\n\n .ok_or(anyhow!(\"wires do not intersect\"))\n\n}\n\n\n", "file_path": "aoc2019/src/day3.rs", "rank": 23, "score": 206494.77317107125 }, { "content": "pub fn part1(input: String) -> Result<usize> {\n\n let mut game = Combat::from_input(input.as_str())?;\n\n\n\n while !game.is_over() {\n\n game.play_round();\n\n }\n\n\n\n let w = game.winners_cards();\n\n\n\n Ok(\n\n w.into_iter().rev().enumerate()\n\n .map(|(i, c)| c * (i + 1))\n\n .sum()\n\n )\n\n}\n\n\n", "file_path": "aoc2020/src/day22.rs", "rank": 24, "score": 206494.77317107125 }, { "content": "pub fn part1(input: String) -> Result<u64> {\n\n part1_with_preamble_len(25, input)\n\n}\n\n\n", "file_path": "aoc2020/src/day9.rs", "rank": 25, "score": 206494.77317107125 }, { "content": "pub fn part2(input: String) -> Result<usize> {\n\n let (fields, mine, nearby) = parse_input(input)?;\n\n\n\n let indexes = solve_fields(&fields, &nearby)?;\n\n\n\n let vals = mine\n\n .0\n\n .into_iter()\n\n .enumerate()\n\n .map(|(i, val)| (fields[indexes[i]].name.as_str(), val))\n\n .filter(|&(name, _val)| name.starts_with(\"departure\"))\n\n .map(|(_name, val)| val);\n\n\n\n Ok(vals.product())\n\n}\n\n\n", "file_path": "aoc2020/src/day16.rs", "rank": 26, "score": 206474.46030809963 }, { "content": "pub fn part1(input: String) -> Result<usize> {\n\n let (fields, _, tickets) = parse_input(input)?;\n\n\n\n let mut error_rate = 0;\n\n for ticket in tickets {\n\n error_rate += ticket.error_rate(&fields).unwrap_or(0);\n\n }\n\n Ok(error_rate)\n\n}\n\n\n", "file_path": "aoc2020/src/day16.rs", "rank": 27, "score": 206474.46030809963 }, { "content": "pub fn part1(input: String) -> Result<usize> {\n\n let tiles = TileMap::from_input(input.as_str());\n\n\n\n Ok(tiles.count())\n\n}\n\n\n", "file_path": "aoc2020/src/day24.rs", "rank": 28, "score": 206474.46030809963 }, { "content": "pub fn part2(input: String) -> Result<usize> {\n\n let mut tiles = TileMap::from_input(input.as_str());\n\n\n\n for _ in 0..100 {\n\n tiles = tiles.next_day();\n\n }\n\n\n\n Ok(tiles.count())\n\n}\n\n\n", "file_path": "aoc2020/src/day24.rs", "rank": 29, "score": 206474.46030809963 }, { "content": "pub fn part2(input: String) -> Result<usize> {\n\n Ok(input\n\n .lines()\n\n .map(|l| {\n\n let toks: Vec<Token> = Lexer::new(l).collect();\n\n evaluate_with_precedence(&toks)\n\n })\n\n .sum())\n\n}\n\n\n", "file_path": "aoc2020/src/day18.rs", "rank": 30, "score": 206392.58166549023 }, { "content": "pub fn part1(input: String) -> Result<usize> {\n\n Ok(input.lines().map(evaluate).sum())\n\n}\n\n\n", "file_path": "aoc2020/src/day18.rs", "rank": 31, "score": 206392.58166549023 }, { "content": "pub fn part2(input: String) -> Result<usize> {\n\n let mut grid = input.parse::<Grid<Space>>()?;\n\n let mut other: Grid<Space> = Grid::with_size(grid.rows(), grid.cols());\n\n\n\n let mut cur = &mut grid;\n\n let mut next = &mut other;\n\n\n\n while *cur != *next {\n\n for r in 0..cur.rows() {\n\n for c in 0..cur.cols() {\n\n let mut count = 0;\n\n for (dr, dc) in NEIGHBORS.iter().cloned() {\n\n let mut r = r.wrapping_add(dr as usize);\n\n let mut c = c.wrapping_add(dc as usize);\n\n let mut loc = cur.get((r, c));\n\n while let Some(s) = loc {\n\n if *s == Space::Floor {\n\n r = r.wrapping_add(dr as usize);\n\n c = c.wrapping_add(dc as usize);\n\n loc = cur.get((r, c));\n", "file_path": "aoc2020/src/day11.rs", "rank": 32, "score": 206351.24041998357 }, { "content": "pub fn part1(input: String) -> Result<usize> {\n\n let mut grid = input.parse::<Grid<Space>>()?;\n\n\n\n grid = run_simulation(\n\n grid,\n\n |&s| s == Space::TakenSeat,\n\n |count, &s| {\n\n if count == 0 {\n\n match s {\n\n Space::EmptySeat => Space::TakenSeat,\n\n _ => s,\n\n }\n\n } else if count >= 4 {\n\n match s {\n\n Space::TakenSeat => Space::EmptySeat,\n\n _ => s,\n\n }\n\n } else {\n\n s\n\n }\n", "file_path": "aoc2020/src/day11.rs", "rank": 33, "score": 206351.24041998357 }, { "content": "pub fn part2(input: String) -> Result<usize> {\n\n let mut grid = Hypergrid::from_input(input.as_str());\n\n\n\n for _ in 0..6 {\n\n grid = grid.run_cycle();\n\n }\n\n\n\n Ok(grid.count())\n\n}\n\n\n", "file_path": "aoc2020/src/day17.rs", "rank": 34, "score": 206267.70629843653 }, { "content": "pub fn part1(input: String) -> Result<usize> {\n\n let mut grid = Grid::from_input(input.as_str());\n\n\n\n for _ in 0..6 {\n\n grid = grid.run_cycle();\n\n }\n\n\n\n Ok(grid\n\n .visit()\n\n .map(|(_, c)| c)\n\n .filter(|&c| c == Cube::Active)\n\n .count())\n\n}\n\n\n", "file_path": "aoc2020/src/day17.rs", "rank": 35, "score": 206267.70629843653 }, { "content": "pub fn part2(input: String) -> Result<u64> {\n\n let mut tiles = parse_input(input)?;\n\n\n\n let num_tiles = tiles.len();\n\n let bigside = {\n\n let mut bigside = 1;\n\n while bigside * bigside < num_tiles {\n\n bigside += 1;\n\n }\n\n bigside\n\n };\n\n\n\n assert!(bigside * bigside == tiles.len());\n\n\n\n let first_corner: u64 = tiles\n\n .iter()\n\n .filter_map(|(&num, t)| if t.is_corner() { Some(num) } else { None })\n\n .next()\n\n .unwrap();\n\n\n", "file_path": "aoc2020/src/day20.rs", "rank": 36, "score": 206075.365316697 }, { "content": "pub fn part1(input: String) -> Result<u64> {\n\n let tiles = parse_input(input)?;\n\n\n\n let corners: Vec<u64> = tiles\n\n .into_iter()\n\n .filter_map(|(num, t)| if t.is_corner() { Some(num) } else { None })\n\n .collect();\n\n\n\n assert_eq!(4, corners.len());\n\n\n\n Ok(corners.into_iter().product())\n\n}\n\n\n", "file_path": "aoc2020/src/day20.rs", "rank": 37, "score": 206075.365316697 }, { "content": "pub fn part2(input: String) -> Result<BigUint> {\n\n let (_, buses) = strtools::split_once(input.as_str(), \"\\n\");\n\n let buses: Vec<(BigUint, BigUint)> = buses.trim()\n\n .split(',')\n\n .enumerate()\n\n .filter(|&(_, s)| s != \"x\")\n\n .map(|(i, s)| s.parse::<usize>().map(|num| (i.into(), num.into())))\n\n .collect::<Result<_, _>>()?;\n\n\n\n let (mut timestamp, mut modulus) = buses.first().unwrap_or(&(0u8.into(), 0u8.into())).clone();\n\n\n\n for (minute, bus_id) in buses.into_iter().skip(1) {\n\n // This math only works if the Bus IDs are all coprime....\n\n assert_eq!(BigUint::from(1u8), modulus.clone().gcd(&bus_id));\n\n\n\n let minute = &bus_id - (&minute % &bus_id);\n\n while &timestamp % &bus_id != minute {\n\n timestamp += &modulus;\n\n }\n\n modulus *= bus_id;\n\n }\n\n Ok(timestamp)\n\n}\n\n\n", "file_path": "aoc2020/src/day13.rs", "rank": 38, "score": 203876.50888071244 }, { "content": "pub fn part1(input: String) -> Result<BigUint> {\n\n let public_keys: Vec<BigUint> = input\n\n .lines()\n\n .map(|l| l.trim().parse::<BigUint>())\n\n .collect::<Result<_, _>>()?;\n\n\n\n let solved_key = find_private_key(&public_keys).ok_or(anyhow!(\"cannot find private key\"))?;\n\n\n\n Ok(public_keys\n\n .into_iter()\n\n .filter_map(|k| {\n\n if k == solved_key.public_key {\n\n None\n\n } else {\n\n Some(solved_key.encrypt(k))\n\n }\n\n })\n\n .next()\n\n .unwrap())\n\n}\n\n\n", "file_path": "aoc2020/src/day25.rs", "rank": 39, "score": 203856.78231461317 }, { "content": "pub fn part2(input: String) -> Result<u32, Infallible>\n\n{\n\n let total = parse_input(input)\n\n .into_iter()\n\n .map(|x| calc_fuel_recurse(x))\n\n .sum();\n\n Ok(total)\n\n}\n\n\n", "file_path": "aoc2019/src/day1.rs", "rank": 40, "score": 201447.46403751243 }, { "content": "pub fn part2(input: String) -> Result<u32, Infallible> {\n\n let nums = parse::to_nums(input);\n\n\n\n for (a, b, c) in nums.into_iter().tuple_combinations() {\n\n if a + b + c == 2020 {\n\n return Ok(a * b * c);\n\n }\n\n }\n\n panic!(\"No triple adds up to 2020!\");\n\n}\n", "file_path": "aoc2020/src/day1.rs", "rank": 41, "score": 201447.46403751243 }, { "content": "pub fn part1(input: String) -> Result<u32, Infallible> {\n\n let nums: hash_set::HashSet<_> = parse::to_nums(input).into_iter().collect();\n\n\n\n for i in nums.iter().cloned() {\n\n let other = 2020 - i;\n\n if nums.contains(&other) {\n\n return Ok(i * other);\n\n }\n\n }\n\n panic!(\"No two items add up to 2020!\");\n\n}\n\n\n", "file_path": "aoc2020/src/day1.rs", "rank": 42, "score": 201447.46403751243 }, { "content": "pub fn part1(input: String) -> Result<u32, Infallible>\n\n{\n\n let total = parse_input(input)\n\n .into_iter()\n\n .map(|x| calc_fuel(x))\n\n .sum();\n\n Ok(total)\n\n}\n\n\n", "file_path": "aoc2019/src/day1.rs", "rank": 43, "score": 201447.46403751243 }, { "content": "pub fn part1(input: String) -> Result<usize, Infallible>\n\n{\n\n Ok(Map::new(input).count_trees(3, 1))\n\n}\n\n\n\n\n", "file_path": "aoc2020/src/day3.rs", "rank": 44, "score": 201407.87128879174 }, { "content": "pub fn part2(input: String) -> Result<usize, Infallible>\n\n{\n\n let slopes = vec!(\n\n (1, 1),\n\n (3, 1),\n\n (5, 1),\n\n (7, 1),\n\n (1, 2),\n\n );\n\n\n\n let m = Map::new(input);\n\n\n\n let res = slopes.into_iter()\n\n .map(|(c, r)| m.count_trees(c, r))\n\n .product();\n\n Ok(res)\n\n}\n", "file_path": "aoc2020/src/day3.rs", "rank": 45, "score": 201407.87128879174 }, { "content": "pub fn part1(input: String) -> Result<u32, Infallible>\n\n{\n\n let mut prog = parse_input(input);\n\n\n\n prog[1] = 12;\n\n prog[2] = 2;\n\n\n\n return Ok(run_intcode(prog).unwrap());\n\n}\n\n\n", "file_path": "aoc2019/src/day2.rs", "rank": 46, "score": 201387.99338626646 }, { "content": "pub fn part1(input: String) -> Result<usize, Infallible>\n\n{\n\n let res = parse_input(input)\n\n .into_iter()\n\n .filter(|(pol, pass)| pol.check(pass))\n\n .count();\n\n Ok(res)\n\n}\n\n\n", "file_path": "aoc2020/src/day2.rs", "rank": 47, "score": 201387.99338626646 }, { "content": "pub fn part2(input: String) -> Result<usize, Infallible>\n\n{\n\n let res = parse_input(input)\n\n .into_iter()\n\n .filter(|(pol, pass)| pol.check2(pass))\n\n .count();\n\n Ok(res)\n\n}\n\n\n", "file_path": "aoc2020/src/day2.rs", "rank": 48, "score": 201387.99338626646 }, { "content": "pub fn part2(input: String) -> Result<usize, Infallible> {\n\n let entries = input.split(\"\\n\\n\");\n\n\n\n let num = entries\n\n .map(|e| e.parse::<Passport>().unwrap())\n\n .filter(|p| p.validate())\n\n .count();\n\n Ok(num)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n const EXAMPLE: &str = \"ecl:gry pid:860033327 eyr:2020 hcl:#fffffd\n\nbyr:1937 iyr:2017 cid:147 hgt:183cm\n\n\n\niyr:2013 ecl:amb cid:350 eyr:2023 pid:028048884\n\nhcl:#cfa07d byr:1929\n\n\n", "file_path": "aoc2020/src/day4.rs", "rank": 49, "score": 201387.99338626646 }, { "content": "pub fn part2(input: String) -> Result<u32, Infallible>\n\n{\n\n let prog = parse_input(input);\n\n\n\n for x in 0..prog.len() {\n\n for y in 0..prog.len() {\n\n let mut prog = prog.clone();\n\n prog[1] = x as u32;\n\n prog[2] = y as u32;\n\n\n\n if let Some(i) = run_intcode(prog) {\n\n if i == 19690720 {\n\n return Ok((x * 100 + y) as u32);\n\n }\n\n }\n\n }\n\n }\n\n panic!(\"Solution not found\")\n\n}\n\n\n", "file_path": "aoc2019/src/day2.rs", "rank": 50, "score": 201387.99338626646 }, { "content": "pub fn part1(input: String) -> Result<usize, Infallible> {\n\n let entries = input.split(\"\\n\\n\");\n\n\n\n let num = entries\n\n .map(|e| e.parse::<Passport>().unwrap())\n\n .filter(|p| p.validate_keys())\n\n .count();\n\n Ok(num)\n\n}\n\n\n", "file_path": "aoc2020/src/day4.rs", "rank": 51, "score": 201387.99338626646 }, { "content": "fn part1_answer(input: &str) -> Result<String> {\n\n let cups = play_crab_cups(\n\n part1_cups(input),\n\n 100,\n\n );\n\n\n\n let mut idx = cups.cups[1];\n\n let mut s = String::new();\n\n\n\n while idx != 1 {\n\n let c = ((idx as u8) + b'0').into();\n\n s.push(c);\n\n idx = cups.cups[idx];\n\n }\n\n\n\n Ok(s)\n\n}\n\n\n", "file_path": "aoc2020/src/day23.rs", "rank": 52, "score": 179468.0424329443 }, { "content": "fn part2_answer(input: &str) -> Result<u64> {\n\n let cups = play_crab_cups(\n\n part2_cups(input),\n\n 10_000_000,\n\n );\n\n\n\n let a = cups.cups[1];\n\n let b = cups.cups[a];\n\n\n\n Ok((a as u64) * (b as u64))\n\n}\n\n\n", "file_path": "aoc2020/src/day23.rs", "rank": 53, "score": 179468.0424329443 }, { "content": "pub fn part2(_input: String) -> Result<BigUint> {\n\n Ok(0u8.into())\n\n}\n\n\n", "file_path": "aoc2020/src/day25.rs", "rank": 54, "score": 177590.0913942341 }, { "content": "fn part1_with_preamble_len(preamble_len: usize, input: String) -> Result<u64> {\n\n let nums = input\n\n .lines()\n\n .map(|l| l.parse())\n\n .collect::<Result<Vec<u64>, _>>()?;\n\n ensure!(nums.len() >= preamble_len, \"input too short\");\n\n\n\n let mut xs = XmasState::new(&nums[..preamble_len]);\n\n for num in nums.into_iter().skip(preamble_len) {\n\n if !xs.is_valid(num) {\n\n return Ok(num);\n\n }\n\n xs = xs.push(num);\n\n }\n\n Err(anyhow!(\"input is valid\"))\n\n}\n\n\n", "file_path": "aoc2020/src/day9.rs", "rank": 55, "score": 166105.10133098369 }, { "content": "pub fn part2_with_preamble_len(preamble_len: usize, input: String) -> Result<u64> {\n\n use std::cmp::Ordering;\n\n let target = part1_with_preamble_len(preamble_len, input.clone())?;\n\n let nums = input\n\n .lines()\n\n .map(|l| l.parse())\n\n .collect::<Result<Vec<u64>, _>>()?;\n\n\n\n let mut start = 0;\n\n let mut end = 0;\n\n loop {\n\n ensure!(end <= nums.len(), \"target sum not found\");\n\n let range = &nums[start..end];\n\n let sum: u64 = range.iter().cloned().sum();\n\n match sum.cmp(&target) {\n\n Ordering::Equal => {\n\n if let MinMax(min, max) = range.iter().cloned().minmax() {\n\n return Ok(min + max);\n\n }\n\n panic!(\"range found, but min/max failed\");\n", "file_path": "aoc2020/src/day9.rs", "rank": 56, "score": 162381.50700198516 }, { "content": "fn parse_input(input: String) -> Vec<u32>\n\n{\n\n input\n\n .lines()\n\n .map(|l| l.parse().unwrap())\n\n .collect()\n\n}\n", "file_path": "aoc2019/src/day1.rs", "rank": 57, "score": 155215.8499628989 }, { "content": "fn parse_input(input: String) -> Vec<u32>\n\n{\n\n input\n\n .split(',')\n\n .map(|l| l.trim().parse().unwrap())\n\n .collect()\n\n}\n", "file_path": "aoc2019/src/day2.rs", "rank": 58, "score": 155155.07799824834 }, { "content": "pub fn part2(_: String) -> Result<u64> {\n\n part2_answer(\"974618352\")\n\n}\n\n\n", "file_path": "aoc2020/src/day23.rs", "rank": 59, "score": 150489.77289783687 }, { "content": "pub fn part1(_: String) -> Result<String> {\n\n part1_answer(\"974618352\")\n\n}\n\n\n", "file_path": "aoc2020/src/day23.rs", "rank": 60, "score": 150489.77289783687 }, { "content": "fn parse_input(input: String) -> Vec<(PasswordPolicy, String)>\n\n{\n\n input\n\n .lines()\n\n .map(|l| {\n\n let parts : Vec<&str> = l.splitn(2, \": \").collect();\n\n let pol = parts[0].parse::<PasswordPolicy>().unwrap();\n\n let pass = parts[1];\n\n (pol, pass.to_string())\n\n })\n\n .collect()\n\n}\n", "file_path": "aoc2020/src/day2.rs", "rank": 61, "score": 149815.39230904388 }, { "content": "fn parse_input(input: String) -> Result<HashMap<u64, PartialTile>> {\n\n let mut tiles: HashMap<u64, PartialTile> = input\n\n .split(\"\\n\\n\")\n\n .map(PartialTile::from_input)\n\n .collect::<Result<_, _>>()?;\n\n\n\n let keys: Vec<u64> = tiles.keys().copied().collect();\n\n\n\n for k in keys {\n\n let matches = tiles[&k].calc_matches(tiles.iter().filter(|&(&num, _)| num != k));\n\n tiles.get_mut(&k).unwrap().side_matches = matches;\n\n }\n\n\n\n Ok(tiles)\n\n}\n\n\n", "file_path": "aoc2020/src/day20.rs", "rank": 62, "score": 144475.82671955467 }, { "content": "fn build_regex(input: &str, root: &str) -> String {\n\n let mut map = HashMap::new();\n\n\n\n for line in input.lines() {\n\n let (name, rule) = strtools::split_once(line, \":\");\n\n let name = name.trim();\n\n let rule = rule.trim();\n\n map.insert(name, rule);\n\n }\n\n\n\n _build_regex_str(&map, root)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n const EXAMPLE: &str = r#\"0: 4 1 5\n\n1: 2 3 | 3 2\n\n2: 4 4 | 5 5\n", "file_path": "aoc2020/src/day19.rs", "rank": 63, "score": 143243.4587923319 }, { "content": "fn solve_single_field(options: &[HashSet<usize>]) -> Option<(usize, usize)> {\n\n for (col, options) in options.iter().enumerate() {\n\n if let Ok(field) = options.iter().exactly_one() {\n\n return Some((col, *field));\n\n }\n\n }\n\n\n\n for field in 0..options.len() {\n\n let cols = options\n\n .iter()\n\n .enumerate()\n\n .filter(|&(_col, options)| options.contains(&field))\n\n .map(|(col, _options)| col);\n\n\n\n if let Ok(col) = cols.exactly_one() {\n\n return Some((col, field));\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "aoc2020/src/day16.rs", "rank": 64, "score": 142441.9850563508 }, { "content": "fn parse_input(input: String) -> Result<(Vec<Field>, Ticket, Vec<Ticket>)> {\n\n let (fields, tickets) = strtools::split_once(input.trim(), \"\\n\\n\");\n\n\n\n let fields: Vec<Field> = fields\n\n .lines()\n\n .map(Field::from_line)\n\n .collect::<Result<_, _>>()?;\n\n\n\n let (mine, nearby) = strtools::split_once(tickets, \"\\n\\n\");\n\n let mine = Ticket::from_line(\n\n mine.lines()\n\n .skip(1)\n\n .exactly_one()\n\n .map_err(|_| anyhow!(\"too many lines for my ticket\"))?,\n\n )?;\n\n let nearby: Vec<Ticket> = nearby\n\n .lines()\n\n .skip(1)\n\n .map(|n| Ticket::from_line(n.trim()))\n\n .collect::<Result<_, _>>()?;\n\n\n\n Ok((fields, mine, nearby))\n\n}\n\n\n", "file_path": "aoc2020/src/day16.rs", "rank": 65, "score": 142143.24968459833 }, { "content": "fn main() {\n\n let m = App::new(\"Advent of Code 2019 solvers\")\n\n .author(\"Kyle Willmon <[email protected]>\")\n\n .arg(Arg::from_usage(\"<INPUT> 'Sets the input file to use'\"))\n\n .arg(\n\n Arg::with_name(\"day\")\n\n .short(\"d\")\n\n .long(\"day\")\n\n .takes_value(true)\n\n .help(\"day to solve\"),\n\n )\n\n .get_matches();\n\n\n\n let day = match m.value_of(\"day\") {\n\n None => None,\n\n Some(val) => match val.parse::<u8>() {\n\n Ok(val) => Some(val),\n\n Err(err) => {\n\n println!(\"Invalid day {:?}: {}\", val, err);\n\n return;\n", "file_path": "aoc2019/src/main.rs", "rank": 67, "score": 142075.55222132386 }, { "content": "fn is_num_between<T: AsRef<str>>(val: T, min: u32, max: u32) -> bool {\n\n val.as_ref()\n\n .parse::<u32>()\n\n .map(|num| min <= num && num <= max)\n\n .unwrap_or(false)\n\n}\n\n\n\nimpl Passport {\n\n fn validate_keys(&self) -> bool {\n\n let required_keys: HashSet<String> = [\"byr\", \"iyr\", \"eyr\", \"hgt\", \"hcl\", \"ecl\", \"pid\"]\n\n .iter()\n\n .cloned()\n\n .map(|s| s.to_string())\n\n .collect();\n\n\n\n let keys: HashSet<String> = self.0.keys().cloned().collect();\n\n\n\n keys.is_superset(&required_keys)\n\n }\n\n\n", "file_path": "aoc2020/src/day4.rs", "rank": 68, "score": 133530.0932074826 }, { "content": "fn part1_cups(s: &str) -> CupList {\n\n let mut cups = CupList::with_size(11);\n\n\n\n cups.extend(\n\n s.bytes().map(|c| c - b'0').map(|c| c.into())\n\n );\n\n\n\n cups\n\n}\n\n\n", "file_path": "aoc2020/src/day23.rs", "rank": 69, "score": 123446.19964371974 }, { "content": "fn part2_cups(s: &str) -> CupList {\n\n let mut cups = CupList::with_size(1_000_001);\n\n\n\n cups.extend(\n\n s.bytes().map(|c| c - b'0').map(|c| c.into())\n\n );\n\n\n\n let next = s.bytes().count()+1;\n\n cups.extend(next..=1_000_000);\n\n\n\n cups\n\n}\n\n\n", "file_path": "aoc2020/src/day23.rs", "rank": 70, "score": 123446.19964371974 }, { "content": "fn solve_fields(fields: &[Field], tickets: &[Ticket]) -> Result<Vec<usize>> {\n\n let tickets: Vec<Ticket> = tickets\n\n .iter()\n\n .cloned()\n\n .filter(|t| t.error_rate(fields).is_none())\n\n .collect();\n\n\n\n let mut options: Vec<HashSet<usize>> = vec![(0..fields.len()).collect(); fields.len()];\n\n\n\n for t in tickets {\n\n for (num, options) in t.0.into_iter().zip_eq(options.iter_mut()) {\n\n for (idx, field) in fields.iter().enumerate() {\n\n if options.contains(&idx) && !field.is_valid(num) {\n\n options.remove(&idx);\n\n }\n\n }\n\n }\n\n }\n\n\n\n let mut result: Vec<usize> = vec![usize::MAX; fields.len()];\n", "file_path": "aoc2020/src/day16.rs", "rank": 71, "score": 113032.5933829723 }, { "content": "pub fn to_nums(input: String) -> Vec<u32>\n\n{\n\n input\n\n .lines()\n\n .map(|l| l.parse().unwrap())\n\n .collect()\n\n}\n", "file_path": "aoc2020/src/parse.rs", "rank": 72, "score": 112143.01515594027 }, { "content": "fn get_input<P: AsRef<str>>(filename: P) -> io::Result<String> {\n\n if filename.as_ref() == \"-\" {\n\n let mut data = String::new();\n\n return io::stdin().read_to_string(&mut data).map(move |_| data);\n\n }\n\n fs::read_to_string(filename.as_ref())\n\n}\n", "file_path": "aoc2019/src/main.rs", "rank": 73, "score": 109280.20224244476 }, { "content": "fn evaluate(expr: &str) -> usize {\n\n let lex = Lexer::new(expr);\n\n let mut stack = ExprStack::new();\n\n for tok in lex {\n\n stack.push(tok);\n\n }\n\n stack.value()\n\n}\n\n\n", "file_path": "aoc2020/src/day18.rs", "rank": 75, "score": 91450.86381745654 }, { "content": "fn calc_fuel(weight: u32) -> u32 {\n\n if weight <= 6 {\n\n return 0\n\n }\n\n weight/3 - 2\n\n}\n\n\n", "file_path": "aoc2019/src/day1.rs", "rank": 76, "score": 90098.08914459708 }, { "content": "fn evaluate_with_precedence(toks: &[Token]) -> usize {\n\n let mut flat_expr = Vec::new();\n\n let mut depth = 0;\n\n let mut start = None;\n\n for (i, t) in toks.iter().cloned().enumerate() {\n\n match t {\n\n Token::ParenOpen => {\n\n if depth == 0 {\n\n start = Some(i + 1);\n\n }\n\n depth += 1;\n\n }\n\n Token::ParenClose => {\n\n assert!(depth != 0);\n\n depth -= 1;\n\n if depth == 0 {\n\n flat_expr.push(Token::Number(evaluate_with_precedence(\n\n &toks[start.unwrap()..i],\n\n )));\n\n start = None;\n", "file_path": "aoc2020/src/day18.rs", "rank": 77, "score": 89952.2472830733 }, { "content": "fn calc_fuel_recurse(weight: u32) -> u32 {\n\n let fuel = calc_fuel(weight);\n\n if fuel == 0 {\n\n return 0\n\n }\n\n fuel + calc_fuel_recurse(fuel)\n\n}\n\n\n", "file_path": "aoc2019/src/day1.rs", "rank": 78, "score": 88661.86563134484 }, { "content": "fn make_move(cups: &mut CupList) {\n\n let cur = cups.head;\n\n\n\n let first_removed = cups.cups[cur];\n\n let second_removed = cups.cups[first_removed];\n\n let last_removed = cups.cups[second_removed];\n\n\n\n let next_head = cups.cups[last_removed];\n\n\n\n let dest = destination_cup(cur, cups.max, first_removed, second_removed, last_removed);\n\n let dest_tail = cups.cups[dest];\n\n\n\n cups.cups[cur] = next_head;\n\n cups.cups[dest] = first_removed;\n\n cups.cups[last_removed] = dest_tail;\n\n\n\n cups.tail = cur;\n\n cups.head = next_head;\n\n}\n\n\n", "file_path": "aoc2020/src/day23.rs", "rank": 79, "score": 88580.72039939949 }, { "content": "fn _minmax(a: i32, b: i32) -> (i32, i32) {\n\n if b < a {\n\n (b, a)\n\n } else {\n\n (a, b)\n\n }\n\n}\n\n\n", "file_path": "aoc2020/src/day17.rs", "rank": 80, "score": 87625.46096236815 }, { "content": "fn _normalize_points(a: &Point, b: &Point) -> (Point, Point) {\n\n let (xmin, xmax) = _minmax(a.x, b.x);\n\n let (ymin, ymax) = _minmax(a.y, b.y);\n\n let (zmin, zmax) = _minmax(a.z, b.z);\n\n (Point::new(xmin, ymin, zmin), Point::new(xmax, ymax, zmax))\n\n}\n\n\n\nimpl Grid {\n\n fn run_cycle(&self) -> Self {\n\n let mut next = {\n\n let (min, max) = self.minmax_points();\n\n let one = Point::new(1, 1, 1);\n\n Grid::from_points(&(&min - &one), &(&max + &one))\n\n };\n\n\n\n let points = next.point_iter();\n\n\n\n for p in points {\n\n let neighbors = self.active_neighbors(&p);\n\n let cube = self.get(&p).unwrap_or(Cube::Inactive);\n", "file_path": "aoc2020/src/day17.rs", "rank": 81, "score": 86195.22230556063 }, { "content": "fn wait_time(time: u32, bus: u32) -> u32 {\n\n let rem = time % bus;\n\n if rem == 0 {\n\n 0\n\n } else {\n\n bus - rem\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n const EXAMPLE: &str = \"939\\n7,13,x,x,59,x,31,19\";\n\n\n\n #[test]\n\n fn part1_example() {\n\n assert_eq!(295, part1(EXAMPLE.to_string()).unwrap());\n\n }\n\n\n\n #[test]\n\n fn part2_example() {\n\n assert_eq!(BigUint::from(1068781u32), part2(EXAMPLE.to_string()).unwrap());\n\n }\n\n}", "file_path": "aoc2020/src/day13.rs", "rank": 82, "score": 85874.26687492701 }, { "content": "fn parse_range(r: &str) -> Result<RangeInclusive<usize>> {\n\n let (start, end) = strtools::split_once(r, \"-\");\n\n let start = start.trim().parse::<usize>()?;\n\n let end = end.trim().parse::<usize>()?;\n\n Ok(start..=end)\n\n}\n\n\n\nimpl Field {\n\n fn from_line(line: &str) -> Result<Self> {\n\n let (name, ranges) = strtools::split_once(line, \": \");\n\n let (low, high) = strtools::split_once(ranges, \" or \");\n\n\n\n Ok(Self {\n\n name: name.to_string(),\n\n low: parse_range(low)?,\n\n high: parse_range(high)?,\n\n })\n\n }\n\n\n\n fn is_valid(&self, num: usize) -> bool {\n", "file_path": "aoc2020/src/day16.rs", "rank": 83, "score": 85716.3107216697 }, { "content": "fn _neighbors(tile: (i32, i32)) -> Vec<(i32, i32)> {\n\n vec![\n\n Direction::East,\n\n Direction::SouthEast,\n\n Direction::SouthWest,\n\n Direction::West,\n\n Direction::NorthWest,\n\n Direction::NorthEast,\n\n ].into_iter()\n\n .map(|d| d.into_coords())\n\n .map(|(x, y)| (x+tile.0, y+tile.1))\n\n .collect()\n\n}\n\n\n\nimpl TileMap {\n\n fn from_input(input: &str) -> Self {\n\n let mut tiles: HashMap<(i32, i32), bool> = HashMap::new();\n\n\n\n for line in input.trim().lines() {\n\n let tile = DirList::new(line)\n", "file_path": "aoc2020/src/day24.rs", "rank": 84, "score": 85028.56198074143 }, { "content": "fn run_intcode(mut prog: Vec<u32>) -> Option<u32>\n\n{\n\n for i in (0..prog.len()).step_by(4) {\n\n let opcode = *prog.get(i)?;\n\n\n\n if opcode == 99 {\n\n break;\n\n }\n\n\n\n let left = *prog.get(i+1)? as usize;\n\n let right = *prog.get(i+2)? as usize;\n\n let out = *prog.get(i+3)? as usize;\n\n if out >= prog.len() {\n\n return None\n\n }\n\n match opcode {\n\n 1 => prog[out] = *prog.get(left)? + *prog.get(right)?,\n\n 2 => prog[out] = *prog.get(left)? * *prog.get(right)?,\n\n _ => panic!(\"invalid opcode\"),\n\n }\n\n }\n\n prog.get(0).map(|&x| x)\n\n}\n\n\n", "file_path": "aoc2019/src/day2.rs", "rank": 85, "score": 83712.7325697426 }, { "content": "fn find_private_key(public_keys: &[BigUint]) -> Option<CryptoKey> {\n\n let mut exponent: BigUint = 1u8.into();\n\n let mut public_key: BigUint = BASE.into();\n\n\n\n while exponent < MODULUS.into() {\n\n if public_keys.contains(&public_key) {\n\n return Some(CryptoKey {\n\n public_key,\n\n private_key: exponent,\n\n });\n\n }\n\n\n\n exponent += 1u8;\n\n public_key = (public_key * BASE) % MODULUS;\n\n }\n\n None\n\n}\n\n\n", "file_path": "aoc2020/src/day25.rs", "rank": 86, "score": 82789.15481813876 }, { "content": "fn wire_points<'a>(line: &'a str) -> impl Iterator<Item = (i32, i32)> + 'a {\n\n line\n\n .split(',')\n\n .scan((0, 0), |state, motion| {\n\n let (dir, len) = motion.split_at(1);\n\n let dir = dir.chars().nth(0).unwrap();\n\n let len = len.parse::<i32>().unwrap();\n\n\n\n let start = *state;\n\n\n\n *state = move_point(start, dir, len);\n\n\n\n Some((1..=len).map(move |l| move_point(start, dir, l)))\n\n })\n\n .flatten()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n", "file_path": "aoc2019/src/day3.rs", "rank": 87, "score": 80087.07646688921 }, { "content": "fn has_char_at<T: AsRef<str>>(s: T, c: char, idx: usize) -> bool\n\n{\n\n s\n\n .as_ref()\n\n .chars()\n\n .nth(idx)\n\n .map(|val| val == c)\n\n .unwrap_or(false)\n\n}\n\n\n\nimpl PasswordPolicy {\n\n fn check<T: AsRef<str>>(&self, password: T) -> bool\n\n {\n\n let num = password\n\n .as_ref()\n\n .chars()\n\n .filter(|&c| c == self.letter)\n\n .count() as u16;\n\n\n\n self.min <= num && num <= self.max\n", "file_path": "aoc2020/src/day2.rs", "rank": 88, "score": 80068.01489723225 }, { "content": "fn play_crab_cups(mut cups: CupList, iterations: usize) -> CupList {\n\n for _ in 0..iterations {\n\n make_move(&mut cups);\n\n }\n\n\n\n cups\n\n}\n\n\n", "file_path": "aoc2020/src/day23.rs", "rank": 89, "score": 80058.31619461297 }, { "content": "fn _build_regex_str(map: &HashMap<&str, &str>, rule: &str) -> String {\n\n let rule = map.get(rule).expect(\"rule not found!\");\n\n if let Some(c) = rule.strip_prefix('\"').and_then(|r| r.chars().next()) {\n\n return c.to_string();\n\n }\n\n\n\n let regex: Vec<String> = rule.split('|').map(|chain| {\n\n let mut regex = String::from(\"\");\n\n let chain = chain.trim();\n\n for rule in chain.split(' ') {\n\n let rule = rule.trim();\n\n regex.push_str(_build_regex_str(map, rule).as_str());\n\n }\n\n regex\n\n }).collect();\n\n\n\n if regex.len() == 1 {\n\n regex.into_iter().next().unwrap()\n\n } else {\n\n format!(\"({})\", regex.into_iter().join(\"|\"))\n\n }\n\n}\n\n\n", "file_path": "aoc2020/src/day19.rs", "rank": 90, "score": 79501.89919315583 }, { "content": "pub trait Solver {\n\n fn solve(self: Box<Self>, input: String) -> Result<String, anyhow::Error>;\n\n}\n\n\n\nimpl<F, T: fmt::Display, E: Into<anyhow::Error>> Solver for F\n\nwhere\n\n F: Fn(String) -> Result<T, E>,\n\n{\n\n fn solve(self: Box<Self>, input: String) -> Result<String, anyhow::Error> {\n\n match self(input) {\n\n Ok(v) => Ok(v.to_string()),\n\n Err(e) => Err(e.into()),\n\n }\n\n }\n\n}\n", "file_path": "aoclib/src/solver.rs", "rank": 91, "score": 78484.21382434871 }, { "content": "fn destination_cup(cur: usize, max: usize, a: usize, b: usize, c: usize) -> usize {\n\n let dest = {\n\n if cur == 1 {\n\n max\n\n } else {\n\n cur - 1\n\n }\n\n };\n\n\n\n if a == dest || b == dest || c == dest {\n\n destination_cup(dest, max, a, b, c)\n\n } else {\n\n dest\n\n }\n\n}\n\n\n", "file_path": "aoc2020/src/day23.rs", "rank": 92, "score": 78320.16246341466 }, { "content": "fn move_point(start: (i32, i32), dir: char, len: i32) -> (i32, i32) {\n\n match dir {\n\n 'R' => (start.0 + len, start.1),\n\n 'L' => (start.0 - len, start.1),\n\n 'U' => (start.0, start.1 + len),\n\n 'D' => (start.0, start.1 - len),\n\n _ => panic!(\"invalid direction\"),\n\n }\n\n}\n\n\n", "file_path": "aoc2019/src/day3.rs", "rank": 93, "score": 78258.65711412311 }, { "content": "fn _neighbors(pos: (usize, usize), grid_size: (usize, usize)) -> Vec<(usize, usize)> {\n\n let up = if pos.0 == 0 { None } else { Some(pos.0 - 1) };\n\n let down = {\n\n let down = pos.0 + 1;\n\n if down == grid_size.0 {\n\n None\n\n } else {\n\n Some(down)\n\n }\n\n };\n\n let left = if pos.1 == 0 { None } else { Some(pos.1 - 1) };\n\n let right = {\n\n let right = pos.1 + 1;\n\n if right == grid_size.1 {\n\n None\n\n } else {\n\n Some(right)\n\n }\n\n };\n\n\n", "file_path": "aoc2020/src/day11.rs", "rank": 94, "score": 78129.27933181824 }, { "content": "fn run_simulation<F, G>(mut grid: Grid<Space>, filter: F, translate: G) -> Grid<Space>\n\nwhere\n\n F: Fn(&Space) -> bool,\n\n G: Fn(usize, &Space) -> Space,\n\n{\n\n let mut other: Grid<Space> = Grid::with_size(grid.rows(), grid.cols());\n\n\n\n let mut cur = &mut grid;\n\n let mut next = &mut other;\n\n\n\n while *cur != *next {\n\n for r in 0..cur.rows() {\n\n for c in 0..cur.cols() {\n\n let mut count = 0;\n\n for (r, c) in _neighbors((r, c), (cur.rows(), cur.cols())) {\n\n if filter(cur.get((r, c)).unwrap()) {\n\n count += 1;\n\n }\n\n }\n\n *next.get_mut((r, c)).unwrap() = translate(count, cur.get((r, c)).unwrap());\n", "file_path": "aoc2020/src/day11.rs", "rank": 95, "score": 73349.2114406771 }, { "content": "struct Hypergrid(HashSet<Hypercube>);\n\n\n\nimpl Hypergrid {\n\n fn run_cycle(self) -> Self {\n\n let mut neighbor_count: HashMap<Hypercube, usize> = HashMap::new();\n\n\n\n for cube in self.0.iter().cloned() {\n\n for c in cube.neighbors() {\n\n neighbor_count\n\n .entry(c)\n\n .and_modify(|count| *count += 1)\n\n .or_insert(1);\n\n }\n\n }\n\n\n\n let mut grid = HashSet::new();\n\n\n\n for (cube, count) in neighbor_count {\n\n if count == 3 || (count == 2 && self.0.contains(&cube)) {\n\n grid.insert(cube);\n", "file_path": "aoc2020/src/day17.rs", "rank": 96, "score": 69177.04239807535 }, { "content": "/// Split a str into exactly two substrings. If the delimeter isn't found in the string, (s, \"\") is returned\n\npub fn split_once<'a, 'b>(s: &'a str, delimeter: &'b str) -> (&'a str, &'a str) {\n\n if let Some(index) = s.find(delimeter) {\n\n let (head, tail) = s.split_at(index);\n\n let (_, tail) = tail.split_at(delimeter.len());\n\n (head, tail)\n\n } else {\n\n (s, \"\")\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_split_once() {\n\n assert_eq!((\"a\", \"b\"), split_once(\"a123b\", \"123\"));\n\n assert_eq!((\"abc\", \"\"), split_once(\"abc\", \"123\"));\n\n assert_eq!((\"a\", \"b-c\"), split_once(\"a-b-c\", \"-\"));\n\n assert_eq!((\"a\", \"-b\"), split_once(\"a--b\", \"-\"));\n\n }\n\n}", "file_path": "aoclib/src/strtools.rs", "rank": 97, "score": 48802.916661639974 }, { "content": "use std::fmt;\n\n\n", "file_path": "aoclib/src/solver.rs", "rank": 98, "score": 38312.51612100907 }, { "content": " {\n\n self.parts.push(Part::new(part, solver));\n\n self\n\n }\n\n\n\n pub fn solve(self, input: String) -> String {\n\n let mut out = String::new();\n\n let input = input.replace(\"\\r\\n\", \"\\n\");\n\n for part in self.parts.into_iter() {\n\n writeln!(out, \"Part: {}\", part.part).unwrap();\n\n match part.solve(input.clone()) {\n\n Ok(solution) => writeln!(out, \"Solution: {}\", solution).unwrap(),\n\n Err(e) => writeln!(out, \"Error: {}\", e).unwrap(),\n\n }\n\n }\n\n out\n\n }\n\n}\n\n\n", "file_path": "aoclib/src/day.rs", "rank": 99, "score": 38300.22303551312 } ]
Rust
verifier/src/verify_merkle.rs
patrickbiel01/Cairo_Verifier
c174b5d5bc906cb64c832534ffac74268bd9b308
use num256::uint256::Uint256 as Uint256; use crate::uint256_ops; pub fn get_hash_mask() -> Uint256 { return uint256_ops::get_uint256("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF000000000000000000000000"); } /* Verifies a Merkle tree decommitment for n leaves in a Merkle tree with N leaves. The inputs data sits in the queue at queuePtr. Each slot in the queue contains a 32 bytes leaf index and a 32 byte leaf value. The indices need to be in the range [N..2*N-1] and strictly incrementing. Decommitments are read from the channel in the ctx. The input data is destroyed during verification. Queue Structure: 0 1 [Index, Hash/Value] [Index, Hash/Value] */ pub fn verify_merkle( channel_idx: usize, ctx: &mut Vec<Uint256>, queue_idx: usize, root: Uint256, unique_queries: usize ) -> Uint256 { let l_hash_mask = get_hash_mask(); let max_merkle_verifier_queries: usize = 128; assert!(unique_queries <= max_merkle_verifier_queries); let hashes_index: usize = queue_idx + 1; let slot_size: usize = 2; let queue_size: usize = slot_size * unique_queries; let mut rd_idx: usize = 0; let mut wr_idx: usize = 0; let mut index: Uint256 = ctx[queue_idx + rd_idx].clone(); let mut proof_idx = uint256_ops::to_usize( &ctx[channel_idx] ); let mut sibling_data: Vec<[u8; 32]> = vec![ [0; 32], [0; 32] ]; while index > uint256_ops::get_uint256("1") { let sibling_index = uint256_ops::to_usize(&index) ^ 1; let sibling_offset = sibling_index % 2; sibling_data[1 ^ sibling_offset] = uint256_ops::to_fixed_bytes( &ctx[rd_idx + hashes_index] ); rd_idx = ( rd_idx + slot_size ) % queue_size; let mut new_hash_index = proof_idx; proof_idx += 1; ctx[queue_idx + wr_idx] = index / uint256_ops::get_uint256("2"); index = ctx[queue_idx + rd_idx].clone(); if index == Uint256::from_bytes_le( &sibling_index.to_le_bytes() ) { new_hash_index = hashes_index + rd_idx; proof_idx -= 1; rd_idx = (rd_idx + slot_size) % queue_size; index = ctx[queue_idx + rd_idx].clone(); } sibling_data[sibling_offset] = uint256_ops::to_fixed_bytes( &ctx[new_hash_index] ); let mut combined_data: [u8; 64] = [0; 64]; for i in 0..31 { combined_data[i] = sibling_data[0][i]; combined_data[i + 32] = sibling_data[1][i]; } let sibling_hash = uint256_ops::keccak_256(&combined_data); ctx[hashes_index + wr_idx] = uint256_ops::bitwise_and( &l_hash_mask, &sibling_hash ); wr_idx = (wr_idx + slot_size) % queue_size; } let hash = ctx[rd_idx + hashes_index].clone(); ctx[channel_idx] = Uint256::from_bytes_le( &proof_idx.to_le_bytes() ); assert!(hash == root); return root; }
use num256::uint256::Uint256 as Uint256; use crate::uint256_ops; pub fn get_hash_mask() -> Uint256 { return uint256_ops::get_uint256("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF000000000000000000000000"); } /* Verifies a Merkle tree decommitment for n leaves in a Merkle tree with N leaves. The inputs data sits in the queue at queuePtr. Each slot in the queue contains a 32 bytes leaf index and a 32 byte leaf value. The indices need to be in the range [N..2*N-1] and strictly incrementing. Decommitments are read from the channel in the ctx. The input data is destroyed during verification. Queue Structure: 0 1 [Index, Hash/Value] [Index, Hash/Value] */ pub fn verify_merkle( channel_idx: usize, ctx: &mut Vec<Uint256>, queue_idx: usize, root: Uint256, unique_queries: usize ) -> Uint256 { let l_hash_mask = get_hash_mask(); let max_merkle_verifier_queries: usize = 128; assert!(unique_queries <= max_merkle_verifier_queries); let hashes_index: usize = queue_idx + 1; let slot_size: usize = 2; let queue_size: usize = slot_size * unique_queries; let mut rd_idx: usize = 0; let mut wr_idx: usize = 0; let mut index: Uint256 = ctx[queue_idx + rd_idx].clone(); let mut proof_idx = uint256_ops::to_usize( &ctx[channel_idx] ); let mut sibling_data: Vec<[u8; 32]> = vec![ [0; 32], [0; 32] ]; while index > uint256_ops::get_uint256("1") { let sibling_index = uint256_ops::to_usize(&index) ^ 1; let sibling_offset = sibling_inde
x % 2; sibling_data[1 ^ sibling_offset] = uint256_ops::to_fixed_bytes( &ctx[rd_idx + hashes_index] ); rd_idx = ( rd_idx + slot_size ) % queue_size; let mut new_hash_index = proof_idx; proof_idx += 1; ctx[queue_idx + wr_idx] = index / uint256_ops::get_uint256("2"); index = ctx[queue_idx + rd_idx].clone(); if index == Uint256::from_bytes_le( &sibling_index.to_le_bytes() ) { new_hash_index = hashes_index + rd_idx; proof_idx -= 1; rd_idx = (rd_idx + slot_size) % queue_size; index = ctx[queue_idx + rd_idx].clone(); } sibling_data[sibling_offset] = uint256_ops::to_fixed_bytes( &ctx[new_hash_index] ); let mut combined_data: [u8; 64] = [0; 64]; for i in 0..31 { combined_data[i] = sibling_data[0][i]; combined_data[i + 32] = sibling_data[1][i]; } let sibling_hash = uint256_ops::keccak_256(&combined_data); ctx[hashes_index + wr_idx] = uint256_ops::bitwise_and( &l_hash_mask, &sibling_hash ); wr_idx = (wr_idx + slot_size) % queue_size; } let hash = ctx[rd_idx + hashes_index].clone(); ctx[channel_idx] = Uint256::from_bytes_le( &proof_idx.to_le_bytes() ); assert!(hash == root); return root; }
function_block-function_prefixed
[ { "content": "pub fn read_hash(channel_idx: usize, mix: bool, ctx: &mut Vec<Uint256>) -> Uint256 {\n\n\tlet val = read_bytes(channel_idx, mix, ctx);\n\n\treturn val;\n\n}\n\n\n\n\n\n\n\n\n", "file_path": "verifier/src/verifier_channel.rs", "rank": 0, "score": 340848.0792643196 }, { "content": "fn read_bytes(channel_idx: usize, mix: bool, ctx: &mut Vec<Uint256>) -> Uint256 {\n\n\n\n\tlet proof_idx = uint256_ops::to_usize( &ctx[channel_idx] );\n\n\tlet val = ctx[proof_idx].clone(); \n\n\tctx[channel_idx] = uint256_ops::from_usize( proof_idx + 1 );\n\n\tprintln!(\"proof_ptr: {}\", ctx[channel_idx]);\n\n\n\n\tif mix {\n\n\t\t //Prng.mixSeedWithBytes(get_prng_ptr(channelPtr), abi.encodePacked(val));\n\n\t\t let digest_idx = channel_idx + 1;\n\n\t\t let counter_idx = channel_idx + 2;\n\n\n\n\t\t ctx[counter_idx] = val.clone();\n\n\n\n\t\tlet mut combined_data: [u8; 64] = [0; 64];\n\n\t\tlet bytes_1 = uint256_ops::to_fixed_bytes( &ctx[digest_idx] );\n\n\t\tlet bytes_2 = uint256_ops::to_fixed_bytes( &ctx[digest_idx+1] );\n\n\t\tfor i in 0..32 {\n\n\t\t\tcombined_data[i] = bytes_1[i];\n\n\t\t\tcombined_data[i + 32] = bytes_2[i];\n\n\t\t}\n\n\t\t// prng.digest := keccak256(digest||val), nonce was written earlier.\n\n\t\tctx[digest_idx] = uint256_ops::keccak_256(&combined_data);\n\n\t\t// prng.counter := 0.\n\n\t\tctx[counter_idx] = uint256_ops::get_uint256(\"0\");\n\n\t}\n\n\n\n\treturn val;\n\n}\n", "file_path": "verifier/src/verifier_channel.rs", "rank": 1, "score": 340266.66397022526 }, { "content": "pub fn read_field_elements(channel_idx: usize, mix: bool, ctx: &mut Vec<Uint256>) -> Uint256 {\n\n\tlet result = read_bytes(channel_idx, mix, ctx).to_bytes_le();\n\n\treturn prime_field::from_montgomery( Uint256::from_bytes_le( &result ) );\n\n}\n\n\n\n\n", "file_path": "verifier/src/verifier_channel.rs", "rank": 2, "score": 334793.83343096334 }, { "content": "pub fn init_channel(channel_offset: usize, proof_offset: usize, public_input_hash: Uint256, ctx: &mut Vec<Uint256>) {\n\n\tctx[channel_offset] = uint256_ops::from_usize(proof_offset);\n\n\tinit_prng( get_prng_ptr(channel_offset), public_input_hash, ctx );\n\n}\n\n\n", "file_path": "verifier/src/verifier_channel.rs", "rank": 3, "score": 319142.66906716395 }, { "content": "pub fn verify_pow(channel_idx: usize, pow_bits: usize, ctx: &mut Vec<Uint256>, quarter_read_ptr: usize) {\n\n\tif pow_bits == 0 {\n\n\t\treturn;\n\n\t}\n\n\n\n\tlet mut bytes_bank: [u8; 41] = [0; 41];\n\n\n\n\t//Init bytes bank wih pow_val || digest || pow_bits \n\n\tlet pow_val_bytes: [u8; 32] = uint256_ops::to_fixed_bytes( &uint256_ops::get_uint256(\"0123456789abcded000000000000000000000000000000000000000000000000\") ); //0x0123456789abcded\n\n\tlet digest_bytes = uint256_ops::to_fixed_bytes(&ctx[channel_idx + 1]);\n\n\tfor i in 0..8 {\n\n\t\tbytes_bank[i] = pow_val_bytes[i];\n\n\t}\n\n\tfor i in 0..digest_bytes.len() {\n\n\t\tbytes_bank[i+8] = digest_bytes[i];\n\n\t}\n\n\tbytes_bank[40] = pow_bits as u8;\n\n\t//Do a Keccak on 42 bytes of 0-7: POW requirments, 8-46: digest, 41-42: pow_bits\n\n\tlet hash_bytes = uint256_ops::keccak_256(&bytes_bank).to_bytes_be();\n\n\t//Write hash to bytes_bank\n", "file_path": "verifier/src/verifier_channel.rs", "rank": 4, "score": 313589.57515868725 }, { "content": "pub fn send_field_elements(channel_idx: usize, n_elements: usize, target_idx_input: usize, ctx: &mut Vec<Uint256>) {\n\n\tassert!(n_elements < 0x1000000); //Overflow protection failed\n\n\n\n\tlet digest_idx = channel_idx + 1;\n\n\tlet counter_idx = channel_idx + 2;\n\n\tlet mut target_idx = target_idx_input;\n\n\n\n\tlet mask = uint256_ops::get_uint256(\"fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff\");\n\n\n\n\tlet end_idx = target_idx + n_elements;\n\n\twhile target_idx < end_idx {\n\n\n\n\t\tlet mut field_element = prime_field::get_k_modulus();\n\n\t\twhile field_element >= prime_field::get_k_modulus() {\n\n\n\n \tlet mut combined_data: [u8; 64] = [0; 64];\n\n\t\t\tlet digest_bytes = uint256_ops::to_fixed_bytes( &ctx[digest_idx] );\n\n\t\t\tlet counter_bytes = uint256_ops::to_fixed_bytes( &ctx[digest_idx+1] );\n\n \tfor i in 0..32 {\n\n \tcombined_data[i] = digest_bytes[i];\n", "file_path": "verifier/src/verifier_channel.rs", "rank": 5, "score": 312858.43513423234 }, { "content": "fn get_random_bytes(prng_idx: usize, ctx: &mut Vec<Uint256>) -> Uint256 {\n\n\tlet ( digest0, counter0) = load_prng(prng_idx, ctx);\n\n\n\n\t // returns 32 bytes (for random field elements or four queries at a time).\n\n\t let (digest, counter, random_bytes) = get_random_bytes_inner(digest0, counter0, ctx);\n\n\n\n\t store_prng(prng_idx, digest, counter, ctx);\n\n\t\n\n\t return random_bytes;\n\n}\n\n\n\n// fn mixSeedWithBytes(prng_idx: usize, data_bytes: &[u8], ctx: &mut Vec<Uint256>) {\n\n// \t// let digest = uint256_ops::make_copy( &ctx[prng_idx] );\n\n// \t// TODO: Implement init_prng(prngPtr, keccak256(abi.encodePacked(digest, dataBytes)));\n\n// }\n\n\n", "file_path": "verifier/src/verifier_channel.rs", "rank": 6, "score": 305483.89379209507 }, { "content": "pub fn air_specific_init(public_input: & Vec<Uint256>, ctx: & mut Vec<Uint256>) -> Uint256 {\n\n assert!(public_input.len() >= pub_input::OFFSET_PUBLIC_MEMORY); //public_input is too short\n\n\n\n // Context for generated code\n\n ctx[map::MM_OFFSET_SIZE] = prime_field::fpow(&uint256_ops::get_uint256(\"2\"), &uint256_ops::get_uint256(\"10\"));\n\n ctx[map::MM_HALF_OFFSET_SIZE] = prime_field::fpow(&uint256_ops::get_uint256(\"2\"), &uint256_ops::get_uint256(\"F\"));\n\n\n\n // Number of steps\n\n let log_n_steps = public_input[pub_input::OFFSET_LOG_N_STEPS].clone();\n\n assert!(log_n_steps < uint256_ops::get_uint256(\"32\")); //Number of steps is too large, steps >= 50\n\n ctx[map::MM_LOG_N_STEPS] = log_n_steps.clone();\n\n let log_trace_length = log_n_steps.clone() + uint256_ops::from_usize(stark_params::LOG_CPU_COMPONENT_HEIGHT);\n\n\n\n // Range check limits.\n\n ctx[map::MM_RC_MIN] = public_input[pub_input::OFFSET_RC_MIN].clone();\n\n ctx[map::MM_RC_MAX] = public_input[pub_input::OFFSET_RC_MAX].clone();\n\n assert!(ctx[map::MM_RC_MIN] <= ctx[map::MM_RC_MAX]); //rc_min must be <= rc_max\n\n assert!(ctx[map::MM_RC_MAX] <= ctx[map::MM_OFFSET_SIZE]); //rc_max out of range\n\n\n\n // Layout\n", "file_path": "verifier/src/verify_proof.rs", "rank": 7, "score": 303028.0175307742 }, { "content": "pub fn read_last_fri_layer(ctx: & mut Vec<Uint256>) {\n\n let fri_last_layer_deg_bound = ctx[map::MM_FRI_LAST_LAYER_DEG_BOUND].clone();\n\n let last_layer_idx: usize = uint256_ops::to_usize(&ctx[map::MM_CHANNEL]);\n\n //let last_layer_idx = ctx[channel_idx];\n\n let mut bad_input = 0;\n\n\n\n let prime_minus_one = prime_field::get_k_modulus() - prime_field::get_one_val();\n\n let channel_idx = 1 + map::MM_CHANNEL;\n\n\n\n // Make sure all the values are valid field elements.\n\n let last_layer_end = last_layer_idx + uint256_ops::to_usize( &fri_last_layer_deg_bound );\n\n let mut coefs_idx = last_layer_idx;\n\n while coefs_idx < last_layer_end {\n\n if ctx[coefs_idx] > prime_minus_one {\n\n bad_input = 1;\n\n break;\n\n }\n\n coefs_idx += 1;\n\n }\n\n\n", "file_path": "verifier/src/verify_proof.rs", "rank": 8, "score": 300057.25034766 }, { "content": "pub fn get_fri_steps(ctx: &mut Vec<Uint256>) -> Vec<Uint256> {\n\n\tlet len = uint256_ops::to_usize(\n\n\t\t&ctx[\n\n\t\t\tuint256_ops::to_usize( &ctx[map::MM_FRI_STEPS_PTR] )\n\n\t\t]\n\n\t);\n\n\n\n\tlet mut fri_steps: Vec<Uint256> = vec![];\n\n\tfor i in 0..len {\n\n\t\tfri_steps.push(\n\n\t\t\tctx[\n\n\t\t\t\tuint256_ops::to_usize(&ctx[map::MM_FRI_STEPS_PTR + i + 1])\n\n\t\t\t].clone()\n\n\t\t);\n\n\t}\n\n\treturn fri_steps;\n\n}\n\n\n\n\n\n\n", "file_path": "verifier/src/fri.rs", "rank": 9, "score": 295919.41546120384 }, { "content": "pub fn init_verifier_params(public_input: & Vec<Uint256>, proof_params: & Vec<Uint256>, ctx: & mut Vec<Uint256>) {\n\n assert!(proof_params.len() > PROOF_PARAMS_FRI_STEPS_OFFSET); //Invalid proof_params\n\n assert!(proof_params.len() == PROOF_PARAMS_FRI_STEPS_OFFSET + uint256_ops::to_usize(&proof_params[PROOF_PARAMS_N_FRI_STEPS_OFFSET])); //Invalid proof_params\n\n\n\n let log_blowup_factor = proof_params[PROOF_PARAMS_LOG_BLOWUP_FACTOR_OFFSET].clone();\n\n assert!(log_blowup_factor <= uint256_ops::get_uint256(\"10\")); //log_blowup_factor must be at most 16\n\n assert!(log_blowup_factor >= uint256_ops::get_uint256(\"1\")); //log_blowup_factor must be at least 1\n\n\n\n let pow_bits = proof_params[PROOF_PARAMS_PROOF_OF_WORK_BITS_OFFSET].clone();\n\n assert!(pow_bits <= uint256_ops::get_uint256(\"32\")); //pow_bits must be at most 50\n\n assert!( uint256_ops::to_usize(&pow_bits) >= MIN_PROOF_OF_WORK_BITS ); //MIN_PROOF_OF_WORK_BITS\n\n assert!( uint256_ops::to_usize(&pow_bits) < NUM_SECURITY_BITS ); //Proofs may not be purely based on PoW\n\n\n\n let log_fri_last_layer_deg_bound = proof_params[PROOF_PARAMS_FRI_LAST_LAYER_DEG_BOUND_OFFSET].clone();\n\n assert!(log_fri_last_layer_deg_bound <= uint256_ops::get_uint256(\"A\")); //log_fri_last_layer_deg_bound must be at most 10\n\n\n\n let n_fri_steps = uint256_ops::to_usize(&proof_params[PROOF_PARAMS_N_FRI_STEPS_OFFSET]);\n\n assert!(n_fri_steps <= 10); //Too many fri steps\n\n assert!(n_fri_steps > 1); //Not enough fri steps\n\n\n", "file_path": "verifier/src/verify_proof.rs", "rank": 10, "score": 292587.0937912159 }, { "content": "pub fn oods_check_layout_1(ctx: &mut Vec<Uint256>) -> (usize, usize) {\n\n\n\n\tlet mut batch_inverse_vec: Vec<Uint256> = vec![ uint256_ops::get_uint256(\"0\"); 2 * BATCH_INVERSE_SIZE ];\n\n\n\n\toods_prepare_inverses_layout_1(ctx, &mut batch_inverse_vec);\n\n\n\n\tlet mut fri_queue = 110;\n\n\tlet fri_queue_end = fri_queue + 3* uint256_ops::to_usize(&ctx[10]);\n\n\tlet mut trace_query_response = 1160;\n\n\tlet mut composition_query_responses = 2216;\n\n\n\n\t//Points to first actual value in batch_inverse_vec\n\n\t// The content of batchInverseOut is described in oodsPrepareInverses.\n\n\tlet mut denominators_idx = 1;\n\n\n\n\t while fri_queue < fri_queue_end {\n\n\t\t // res accumulates numbers modulo PRIME. Since 31*PRIME < 2**256, we may add up to\n\n\t\t// 31 numbers without fear of overflow, and use addmod modulo PRIME only every\n\n\t\t// 31 iterations, and once more at the very end.\n\n\t\tlet mut res = uint256_ops::get_uint256(\"0\");\n", "file_path": "verifier/src/oods_check.rs", "rank": 11, "score": 289901.83998472494 }, { "content": "fn init_prng(prng_offset: usize, public_input_hash: Uint256, ctx: &mut Vec<Uint256>) {\n\n\tstore_prng(prng_offset, public_input_hash, uint256_ops::get_uint256(\"0\"), ctx );\n\n}\n\n\n", "file_path": "verifier/src/verifier_channel.rs", "rank": 12, "score": 288755.43844378367 }, { "content": "// Verify the last set of coefficents sent from the prover\n\npub fn verify_last_layer(ctx: & mut Vec<Uint256>, n_points: usize) {\n\n\n\n\tlet fri_last_deg_bound = uint256_ops::to_usize( &ctx[map::MM_FRI_LAST_LAYER_DEG_BOUND] );\n\n\tlet group_order_minus_1 = uint256_ops::from_usize(fri_last_deg_bound) * ctx[map::MM_BLOW_UP_FACTOR].clone() - uint256_ops::get_uint256(\"0\");\n\n\tlet coeff_start = uint256_ops::to_usize( &ctx[map::MM_FRI_LAST_LAYER_PTR] );\n\n\n\n\tfor i in 0..n_points {\n\n\t\t\n\n\t\tlet mut point = ctx[map::MM_FRI_QUEUE + 3*i + 2].clone();\n\n\n\n\t\t// Invert point using inverse(point) == fpow(point, ord(point) - 1)\n\n\t\tpoint = prime_field::fpow(&point, &group_order_minus_1); \n\n\n\n\t\tassert!( horner_eval(coeff_start, point, fri_last_deg_bound, ctx) == ctx[map::MM_FRI_QUEUE + 3*i + 1] ); //Bad Last layer value\n\n\n\n\t}\n\n\t\n\n}\n\n\n\n\n", "file_path": "verifier/src/fri.rs", "rank": 13, "score": 285491.52366621763 }, { "content": "pub fn oods_prepare_inverses_layout_1(ctx: &mut Vec<Uint256>, batch_inverse_vec: &mut Vec<Uint256>) {\n\n\tlet trace_generator = ctx[345].clone();\n\n\t\n\n\t// The array expmodsAndPoints stores subexpressions that are needed\n\n\t// for the denominators computation.\n\n\t// The array is segmented as follows:\n\n\t// expmodsAndPoints[0:19] (.expmods) expmods used during calculations of the points below.\n\n\t// expmodsAndPoints[19:101] (.points) points used during the denominators calculation.\n\n\tlet mut exp_mods_and_points: Vec<Uint256> = vec![ uint256_ops::get_uint256(\"0\"); 101 ];\n\n\n\n\n\n\t/* Prepare expmods for computations of trace generator powers */\n\n\texp_mods_and_points[0] = prime_field::fmul(\n\n\t\ttrace_generator.clone(), trace_generator.clone() \n\n\t);\n\n\n\n\texp_mods_and_points[1] = prime_field::fmul(\n\n\t\texp_mods_and_points[0].clone(), trace_generator.clone() \n\n\t);\n\n\n", "file_path": "verifier/src/oods_check.rs", "rank": 14, "score": 283407.0923727697 }, { "content": "pub fn fri_verify_layers(ctx: & mut Vec<Uint256>) {\n\n\n\n assert!(map::MAX_SUPPORTED_MAX_FRI_STEP == FRI_MAX_FRI_STEP); //Incosistent MAX_FRI_STEP between MemoryMap.sol and FriLayer.sol\n\n\n\n\tlet fri_ctx = map::MM_FRI_CTX;\n\n\tinit_fri_groups(fri_ctx, ctx);\n\n\n\n\tlet channel_idx = map::MM_CHANNEL;\n\n\tlet merkle_queue_idx = map::MM_MERKLE_QUEUE;\n\n\tlet fri_queue = map::MM_FRI_QUEUE;\n\n\n\n\tlet mut fri_step = 1;\n\n\tlet mut n_live_queries = uint256_ops::to_usize(&ctx[map::MM_N_UNIQUE_QUERIES]);\n\n\n\n\t// Add 0 at the end of the queries array to avoid empty array check in readNextElment.\n\n ctx[map::MM_FRI_QUERIES_DELIMITER] = uint256_ops::get_uint256(\"0\");\n\n\n\n\t// Rather than converting all the values from Montgomery to standard form,\n\n\t// we can just pretend that the values are in standard form but all\n\n\t// the committed polynomials are multiplied by MontgomeryR.\n", "file_path": "verifier/src/fri.rs", "rank": 15, "score": 283261.2504935258 }, { "content": "pub fn oods_check(ctx: &mut Vec<Uint256>) {\n\n\toods_check_layout_1(ctx);\n\n}\n\n\n\n/* ------------------------------------------\n\n\t\tLAYOUT 1 OODS\n\n ------------------------------------------- */\n\n\n\n// For each query point we want to invert (2 + N_ROWS_IN_MASK) items:\n\n// The query point itself (x).\n\n// The denominator for the constraint polynomial (x-z^constraintDegree)\n\n// [(x-(g^rowNumber)z) for rowNumber in mask].\n\n static BATCH_INVERSE_CHUNK: usize = 2 + stark_params::N_ROWS_IN_MASK;\n\n static BATCH_INVERSE_SIZE: usize = BATCH_INVERSE_CHUNK + mem_map::MAX_N_QUERIES;\n\n\n\n\n\n/*\n\n\tBuilds and sums boundary constraints that check that the prover provided the proper evaluations\n\n\tout of domain evaluations for the trace and composition columns.\n\n\tThe inputs to this function are:\n", "file_path": "verifier/src/oods_check.rs", "rank": 16, "score": 282075.29421550914 }, { "content": "fn get_random_bytes_inner(digest: Uint256, counter: Uint256, ctx: &mut Vec<Uint256>) -> (Uint256, Uint256, Uint256) {\n\n\tlet prime_mask = uint256_ops::get_uint256(\"fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff\");\n\n\t\n\n\t// Do: Keccak( digest || counter)\n\n let mut combined_data: [u8; 64] = [0; 64];\n\n\tlet digest_bytes = uint256_ops::to_fixed_bytes(&digest);\n\n\tlet counter_bytes = uint256_ops::to_fixed_bytes(&counter);\n\n\tfor i in 0..32 {\n\n\t\tcombined_data[i] = digest_bytes[i];\n\n\t\tcombined_data[i + 32] = counter_bytes[i];\n\n\t}\n\n\tlet hash = uint256_ops::keccak_256(&combined_data);\n\n\n\n\treturn ( digest, counter + uint256_ops::get_uint256(\"1\"), uint256_ops::bitwise_and( &hash, &prime_mask ) );\n\n}\n\n\n", "file_path": "verifier/src/verifier_channel.rs", "rank": 17, "score": 272929.3053319012 }, { "content": "pub fn compute_first_fri_layer(ctx: & mut Vec<Uint256>) {\n\n //Prepare evaluation point\n\n adjust_query_indicies_and_prepare_eval_points(ctx);\n\n\n\n //Read and decommit trace\n\n read_query_responses_and_decommit(\n\n ctx, \n\n stark_params::N_COLUMNS_IN_MASK, \n\n stark_params::N_COLUMNS_IN_TRACE0, \n\n map::MM_TRACE_QUERY_RESPONSES, \n\n ctx[map::MM_TRACE_COMMITMENT].clone()\n\n );\n\n\n\n if stark_params::N_COLUMNS_IN_TRACE1 > 0 { //true - hash (simulated) interaction\n\n //Read and decommit second trace\n\n read_query_responses_and_decommit(\n\n ctx, \n\n stark_params::N_COLUMNS_IN_MASK, \n\n stark_params::N_COLUMNS_IN_TRACE1, \n\n map::MM_TRACE_QUERY_RESPONSES + stark_params::N_COLUMNS_IN_TRACE0, \n", "file_path": "verifier/src/verify_proof.rs", "rank": 18, "score": 272276.0596365419 }, { "content": "fn store_prng(state_idx: usize, digest: Uint256, counter: Uint256, ctx: &mut Vec<Uint256>) {\n\n\tctx[state_idx] = digest.clone();\n\n\tctx[state_idx + 1] = counter.clone();\n\n}\n\n\n", "file_path": "verifier/src/verifier_channel.rs", "rank": 19, "score": 267142.5593583499 }, { "content": "fn init_fri_groups(fri_ctx: usize, ctx: & mut Vec<Uint256>) {\n\n\tlet fri_group_idx = fri_ctx + FRI_CTX_TO_FRI_GROUP_OFFSET;\n\n\tlet fri_half_inv_group_idx = fri_ctx + FRI_CTX_TO_FRI_HALF_INV_GROUP_OFFSET;\n\n\n\n\t// FRI_GROUP_GEN is the coset generator.\n\n\t// Raising it to the (MAX_COSET_SIZE - 1) power gives us the inverse\n\n\tlet gen_fri_group = uint256_ops::get_uint256(FRI_GROUP_GEN);\n\n\n\n\tlet gen_fri_group_inv = prime_field::fpow( &gen_fri_group, &uint256_ops::from_usize(MAX_COSET_SIZE - 1) );\n\n\n\n\tctx[fri_half_inv_group_idx] = uint256_ops::get_uint256(\"1\");\n\n\tctx[fri_group_idx] = uint256_ops::get_uint256(\"1\");\n\n\tctx[fri_group_idx + 1] = prime_field::get_k_modulus() - uint256_ops::get_uint256(\"1\"); //PRIME - 1s\n\n\n\n\tlet mut last_val = uint256_ops::get_uint256(\"1\");\n\n\tlet mut last_val_inv = uint256_ops::get_uint256(\"1\"); \n\n\n\n\t// To compute [1, -1 (== g^n/2), g^n/4, -g^n/4, ...]\n\n\t// we compute half the elements and derive the rest using negation.\n\n\tfor i in 1..MAX_COSET_SIZE/2 {\n", "file_path": "verifier/src/fri.rs", "rank": 20, "score": 259304.6170709284 }, { "content": "fn load_prng(state_idx: usize, ctx: & Vec<Uint256>) -> (Uint256, Uint256) {\n\n\treturn ( uint256_ops::make_copy( &ctx[state_idx] ), uint256_ops::make_copy( &ctx[state_idx + 1] ) );\n\n}\n\n\n\n/* Auxiliary function for get_random_bytes */\n", "file_path": "verifier/src/verifier_channel.rs", "rank": 21, "score": 254304.3108842886 }, { "content": "fn get_prng_digest(prng_idx: usize, ctx: & Vec<Uint256>) -> Uint256 {\n\n\treturn uint256_ops::make_copy( &ctx[prng_idx] );\n\n}\n\n\n", "file_path": "verifier/src/verifier_channel.rs", "rank": 22, "score": 247487.44849291584 }, { "content": "pub fn oods_consistency_check(ctx: & mut Vec<Uint256>, registry: & HashMap<Uint256, bool>) {\n\n //Checks that information on memory pages (used from data by prover) is consistent with z, alpha values \n\n //TODO: DO we even need it? Uncomment when fixed//memory_fact_registry::verify_memory_page_facts(ctx, registry);\n\n\n\n let oods_point = ctx[map::MM_OODS_POINT].clone();\n\n\n\n //TODO: Test when remix stops tweaking\n\n println!(\"oods_point: {}\", oods_point);\n\n\n\n // The number of copies in the pedersen hash periodic columns is\n\n // nSteps / PEDERSEN_BUILTIN_RATIO / PEDERSEN_BUILTIN_REPETITIONS\n\n let n_penderson_hash_copies = uint256_ops::safe_div(\n\n &prime_field::fpow(&uint256_ops::get_uint256(\"2\"), &ctx[map::MM_LOG_N_STEPS]), \n\n &uint256_ops::from_usize(stark_params::PEDERSEN_BUILTIN_RATIO * stark_params::PEDERSEN_BUILTIN_REPETITIONS)\n\n );\n\n let z_point_pow_penderson = prime_field::fpow( &oods_point, &n_penderson_hash_copies );\n\n\n\n ctx[map::MM_PERIODIC_COLUMN__PEDERSEN__POINTS__X] = penderson_hash_x::compute(z_point_pow_penderson.clone());\n\n ctx[map::MM_PERIODIC_COLUMN__PEDERSEN__POINTS__Y] = penderson_hash_y::compute(z_point_pow_penderson.clone());\n\n\n", "file_path": "verifier/src/verify_proof.rs", "rank": 23, "score": 247447.3711206837 }, { "content": "//Calculates: Polynomial Constraints\n\n//RETURNS: compositionFromTraceValue - Uint256\n\npub fn get_composition_from_trace_val(ctx: &Vec<Uint256>) -> Uint256{\n\n\n\n\n\n\t// ------ DEBUGGING ---------\n\n\t// println!(\"\\n\\nctx:\\n\");\n\n\t// for i in map::MM_PERIODIC_COLUMN__PEDERSEN__POINTS__X..map::MM_CONSTRAINT_POLY_ARGS_END {\n\n\t// \tprintln!(\"\\\"{}\\\",\", ctx[i]);\n\n\t// }\n\n\t// println!(\"\\n\\n\");\n\n\t// ------ DEBUGGING ---------\n\n\t\n\n\n\n\tlet mut res = uint256_ops::get_uint256(\"0\");\n\n\n\n\tlet point = ctx[map::MM_OODS_POINT].clone();\n\n\n\n\t//Calculate and store commonly used exponents\n\n\tlet mut exp_mods: Vec<Uint256> = vec![uint256_ops::get_uint256(\"0\"); 21];\n\n\texp_mods[0] = prime_field::fpow(\n\n\t\t&point, &(ctx[map::MM_TRACE_LENGTH].clone()) );\n", "file_path": "verifier/src/polynomial_contrainsts.rs", "rank": 24, "score": 245697.9493834843 }, { "content": "pub fn compute_public_memory_quotient(ctx: &Vec<Uint256>) -> Uint256 {\n\n\tlet n_values = ctx[map::MM_N_PUBLIC_MEM_ENTRIES].clone();\n\n\tlet z = ctx[map::MM_MEMORY__MULTI_COLUMN_PERM__PERM__INTERACTION_ELM].clone();\n\n\tlet alpha = ctx[map::MM_MEMORY__MULTI_COLUMN_PERM__HASH_INTERACTION_ELM0].clone();\n\n\t// The size that is allocated to the public memory.\n\n\tlet pub_mem_size = uint256_ops::safe_div( &ctx[map::MM_TRACE_LENGTH], &uint256_ops::from_usize(PUBLIC_MEMORY_STEP) );\n\n\n\n\tassert!( n_values < uint256_ops::get_uint256(\"1000000\") ); //Overflow protection failed\n\n\tassert!( n_values < pub_mem_size ); //Number of values of public memory is too large\n\n\n\n\tlet n_pub_mem_pages = uint256_ops::to_usize(&ctx[map::MM_N_PUBLIC_MEM_PAGES]);\n\n\tlet cumulative_prod_ptr = \n\n\t\tuint256_ops::to_usize(&ctx[map::MM_PUBLIC_INPUT_PTR]) + public_input_offsets::get_offset_page_prod(0, n_pub_mem_pages);\n\n\tlet mut denominator = compute_pubic_memory_prod( cumulative_prod_ptr, n_pub_mem_pages, ctx );\n\n\n\n\t// Compute address + alpha * value for the first address-value pair for padding\n\n\tlet public_input_ptr = uint256_ops::to_usize( &ctx[map::MM_PUBLIC_INPUT_PTR] );\n\n\n\n\tlet padding_addr = ctx[public_input_ptr + public_input_offsets::OFFSET_PUBLIC_MEMORY_PADDING_ADDR].clone();\n\n\tlet padding_val = ctx[public_input_ptr + public_input_offsets::OFFSET_PUBLIC_MEMORY_PADDING_ADDR + 1].clone();\n", "file_path": "verifier/src/memory_fact_registry.rs", "rank": 25, "score": 241499.13009599078 }, { "content": "//Calculates hash asscoiated with public input\n\npub fn get_public_input_hash(public_input: &Vec<Uint256>) -> Uint256{\n\n\t// The initial seed consists of the first part of public_input. Specifically, it does not\n\n\t// include the page products (which are only known later in the process, as they depend on\n\n\t// the values of z and alpha)\n\n\tlet n_pages = uint256_ops::to_usize( &public_input[OFFSET_N_PUBLIC_MEMORY_PAGES] );\n\n\tlet pub_input_size_for_hash = get_offset_page_prod(0, n_pages);\n\n\n\n\tlet mut combined_data: Vec<u8> = vec![0; 32*pub_input_size_for_hash];\n\n\tfor i in 0..pub_input_size_for_hash { //TODO: is it starting from 1 or 0?\n\n\t\tlet bytes = uint256_ops::to_fixed_bytes( &public_input[i] );\n\n\t\tfor j in 0..bytes.len() {\n\n\t\t\tcombined_data[32*i + j] = bytes[j];\n\n\t\t}\n\n\t}\n\n\t\n\n\treturn uint256_ops::keccak_256( &combined_data );\n\n\n\n}\n\n\n\n\n", "file_path": "verifier/src/public_input_offsets.rs", "rank": 26, "score": 237910.2844107916 }, { "content": "fn adjust_query_indicies_and_prepare_eval_points(ctx: & mut Vec<Uint256>) {\n\n let n_unique_queries = uint256_ops::to_usize(&ctx[map::MM_N_UNIQUE_QUERIES]);\n\n let mut fri_queue = map::MM_FRI_QUEUE;\n\n let fri_queue_end = fri_queue + 3*n_unique_queries;\n\n let eval_domain_size = uint256_ops::to_usize(&ctx[map::MM_EVAL_DOMAIN_SIZE]);\n\n let log_eval_domain_size = uint256_ops::to_usize(&ctx[map::MM_LOG_EVAL_DOMAIN_SIZE]);\n\n let eval_domain_generator = ctx[map::MM_EVAL_DOMAIN_GENERATOR].clone();\n\n\n\n while fri_queue < fri_queue_end {\n\n let mut query_idx = uint256_ops::to_usize(&ctx[fri_queue]);\n\n\n\n // // Adjust queryIdx, see comment in function description.\n\n let adjusted_query_idx = query_idx + eval_domain_size;\n\n\n\n // Compute the evaluation point corresponding to the current queryIdx. \n\n ctx[map::MM_OODS_EVAL_POINTS] = prime_field::fpow(\n\n &eval_domain_generator, & uint256_ops::from_usize( bit_reverse(query_idx, log_eval_domain_size) )\n\n );\n\n\n\n fri_queue += 3;\n\n }\n\n}\n", "file_path": "verifier/src/verify_proof.rs", "rank": 27, "score": 231361.19063960598 }, { "content": "//Performs a keckkack256 hash on the input bytes and return a Uint256\n\npub fn keccak_256(input_data: &[u8]) -> Uint256 {\n\n let mut hasher = Keccak256::new();\n\n hasher.update(input_data);\n\n let result = hasher.finalize();\n\n let result_bytes = result.as_slice();\n\n\n\n return Uint256::from_bytes_be( &result_bytes );\n\n}\n\n\n", "file_path": "verifier/src/uint256_ops.rs", "rank": 28, "score": 221001.2444937661 }, { "content": "pub fn verify_memory_page_facts(ctx: & Vec<Uint256>, registry: & HashMap<Uint256, bool>) {\n\n let n_pub_mem_pages = uint256_ops::to_usize(&ctx[map::MM_N_PUBLIC_MEM_PAGES]);\n\n\n\n for page in 0..n_pub_mem_pages {\n\n // Fetch page values from the public input (hash, product and size)\n\n let mem_hash_ptr = uint256_ops::to_usize(&ctx[map::MM_PUBLIC_INPUT_PTR]) + public_input_offsets::get_offset_page_hash(page);\n\n let prod_ptr = uint256_ops::to_usize(&ctx[map::MM_PUBLIC_INPUT_PTR]) + public_input_offsets::get_offset_page_prod(page, n_pub_mem_pages);\n\n let page_size_ptr = uint256_ops::to_usize(&ctx[map::MM_PUBLIC_INPUT_PTR]) + public_input_offsets::get_offset_page_size(page);\n\n\n\n let page_size = ctx[page_size_ptr].clone();\n\n let prod = ctx[prod_ptr].clone();\n\n let mem_hash = ctx[mem_hash_ptr].clone();\n\n\n\n let mut page_addr = 0;\n\n if page > 0 {\n\n page_addr = uint256_ops::to_usize(&ctx[ \n\n uint256_ops::to_usize(&ctx[map::MM_PUBLIC_INPUT_PTR]) + public_input_offsets::get_offset_page_addr(page) \n\n ]);\n\n }\n\n let page_type = if page == 0 { REGULAR_PAGE } else { CONTINUOUS_PAGE };\n", "file_path": "verifier/src/memory_fact_registry.rs", "rank": 30, "score": 210586.59025514452 }, { "content": "pub fn to_usize(val: & Uint256) -> usize {\n\n let mut bytes: [u8; 8] = [0; 8];\n\n let val_bytes = val.to_bytes_le();\n\n for i in 0..7 {\n\n let mut byte = 0;\n\n if i < val_bytes.len() { byte = val_bytes[i]; }\n\n bytes[i] = byte;\n\n }\n\n return usize::from_le_bytes( bytes );\n\n}\n\n\n", "file_path": "verifier/src/uint256_ops.rs", "rank": 31, "score": 205856.9173201166 }, { "content": "pub fn from_usize(val: usize) -> Uint256 {\n\n let val_bytes = val.to_le_bytes();\n\n return Uint256::from_bytes_le( &val_bytes );\n\n}\n\n\n", "file_path": "verifier/src/uint256_ops.rs", "rank": 32, "score": 205856.91732011654 }, { "content": "pub fn get_bootload_program() -> Vec<Uint256> {\n\n\tlet prog_bigint = get_bootload_program_bigint();\n\n\n\n\tlet mut prog: Vec<Uint256> = vec![];\n\n\n\n\tfor i in 0..prog_bigint.len() {\n\n\t\tprog.push( Uint256::from_bytes_le( &prog_bigint[i].to_bytes_le() ) );\n\n\t}\n\n\n\n\treturn prog;\n\n}\n\n\n\n\n\n\n", "file_path": "verifier/src/cairo_bootloader.rs", "rank": 33, "score": 205267.82968039723 }, { "content": "pub fn validate_params(fri_steps: & mut Vec<Uint256>, log_trace_length: Uint256, log_fri_last_layer_deg_bound: Uint256) {\n\n assert!(fri_steps[0] == uint256_ops::get_uint256(\"0\")); //Only eta0 == 0 is currently supported\n\n\n\n let mut expected_log_deg_bound = log_fri_last_layer_deg_bound;\n\n let n_fri_steps = fri_steps.len();\n\n for i in 1..n_fri_steps {\n\n let fri_step = fri_steps[i].clone();\n\n assert!(fri_step > uint256_ops::get_uint256(\"0\")); // Only the first fri step can be 0\n\n assert!(fri_step <= uint256_ops::get_uint256(\"4\")); //Max supported fri step is 4.\n\n expected_log_deg_bound += fri_step;\n\n }\n\n\n\n // FRI starts with a polynomial of degree 'traceLength'.\n\n // After applying all the FRI steps we expect to get a polynomial of degree less\n\n // than friLastLayerDegBound.\n\n assert!(expected_log_deg_bound == log_trace_length); //Fri params do not match trace length\n\n}\n\n\n\n\n\n\n\n\n\n/* -------------------------\n\n\tFRI Protocol Code\n\n\t\tVerifies that the compositional polynomial sent by the prover \n\n\t\tis valid in poly-log(degree) time\n\n --------------------------- */\n\n\n\n\n", "file_path": "verifier/src/fri.rs", "rank": 34, "score": 204844.50726488075 }, { "content": "//Returns 32 bytes corresponding to big endian of val\n\n// Used in Keccak to emulate behaviour of EVM memory\n\npub fn to_fixed_bytes(val: & Uint256) -> [u8; 32] {\n\n let mut fixed_bytes: [u8; 32] = [0; 32];\n\n let val_bytes = val.to_bytes_be();\n\n for i in 0..val_bytes.len() {\n\n fixed_bytes[32 - val_bytes.len() + i] = val_bytes[i];\n\n }\n\n return fixed_bytes;\n\n}\n\n\n", "file_path": "verifier/src/uint256_ops.rs", "rank": 35, "score": 203897.86557540658 }, { "content": "pub fn read_query_responses_and_decommit(\n\n ctx: & mut Vec<Uint256>, \n\n n_total_columns: usize, \n\n n_columns: usize, \n\n proof_data_idx: usize, \n\n merkle_root: Uint256\n\n) {\n\n assert!( n_columns <= stark_params::N_COLUMNS_IN_MASK + stark_params::CONSTRAINTS_DEGREE_BOUND ); //Too many columns\n\n\n\n let n_unique_queries = ctx[map::MM_N_UNIQUE_QUERIES].clone();\n\n let channel_idx = map::MM_CHANNEL;\n\n let mut fri_queue = map::MM_FRI_QUEUE;\n\n let fri_queue_end = fri_queue + uint256_ops::to_usize(&n_unique_queries) * 3;\n\n let mut merkle_ptr = map::MM_MERKLE_QUEUE;\n\n let l_hash_mask = get_hash_mask();\n\n let proof_data_skip_bytes = n_total_columns - n_columns;\n\n let mut proof_data_i = proof_data_idx; \n\n let mut proof_idx = uint256_ops::to_usize(&ctx[channel_idx]);\n\n\n\n while fri_queue < fri_queue_end {\n", "file_path": "verifier/src/verify_proof.rs", "rank": 36, "score": 179900.89768171142 }, { "content": "pub fn from_montgomery_bytes(bs: &[u8]) -> Uint256 {\n\n let val = Uint256::from_bytes_le(bs);\n\n return from_montgomery(val);\n\n}\n\n\n\n\n\n/* --------------\n\n Unit Testing\n\n------------------ */\n\n#[cfg(test)]\n\nmod tests {\n\n // Note this useful idiom: importing names from outer (for mod tests) scope.\n\n use super::*;\n\n use crate::uint256_ops::get_uint256;\n\n\n\n // #[test]\n\n // fn test_fsub_underflow() {\n\n // let val = fsub( get_uint256(\"0\"), get_uint256(\"1\") );\n\n // assert_eq!(val, get_k_modulus()-get_uint256(\"1\") );\n\n\n", "file_path": "verifier/src/prime_field.rs", "rank": 37, "score": 179160.93252724418 }, { "content": "pub fn bitwise_not(val: Uint256) -> Uint256 {\n\n let val_bytes = val.to_bytes_le();\n\n\n\n let mut result_bytes: [u8; 32] = [0; 32]; \n\n for i in 0..32 {\n\n let mut val = 0;\n\n if i < val_bytes.len() { val = val_bytes[i]; }\n\n result_bytes[i] = !val;\n\n }\n\n\n\n return Uint256::from_bytes_le(&result_bytes);\n\n}\n\n\n\n\n", "file_path": "verifier/src/uint256_ops.rs", "rank": 38, "score": 177482.55743371893 }, { "content": "//Returns copy of Uint256 from reference\n\npub fn make_copy(val: & Uint256) -> Uint256 {\n\n let val_bytes = val.to_bytes_le();\n\n return Uint256::from_bytes_le( &val_bytes );\n\n}\n\n\n", "file_path": "verifier/src/uint256_ops.rs", "rank": 39, "score": 174419.540793107 }, { "content": "// Using BigUint allows overflow of Uint256\n\npub fn fadd(a: Uint256, b: Uint256) -> Uint256 {\n\n let add = BigUint::from_bytes_le( &a.to_bytes_le() ) + BigUint::from_bytes_le( &b.to_bytes_le() );\n\n let val_bytes = add.modpow( &BigUint::new(vec![1]) , &BigUint::from_bytes_le( &get_k_modulus().to_bytes_le() ) ).to_bytes_le(); // (a + b) % K_MOD\n\n return Uint256::from_bytes_le(&val_bytes);\n\n}\n\n\n", "file_path": "verifier/src/prime_field.rs", "rank": 40, "score": 174042.95589809053 }, { "content": " ---------------- */\n\npub fn fmul(a: Uint256, b: Uint256) -> Uint256 {\n\n //Convert to BigUint and perform multiplication to avoid overflow\n\n let prod = BigUint::from_bytes_le( &a.to_bytes_le() ) * BigUint::from_bytes_le( &b.to_bytes_le() );\n\n let val_bytes = prod.modpow( &BigUint::new(vec![1]), &BigUint::from_bytes_le( &get_k_modulus().to_bytes_le() ) ).to_bytes_le(); // (a * b) % K_MOD\n\n return Uint256::from_bytes_le(&val_bytes);\n\n}\n\n\n", "file_path": "verifier/src/prime_field.rs", "rank": 41, "score": 174034.0845339513 }, { "content": "pub fn fsub(a: Uint256, b: Uint256) -> Uint256 {\n\n let res = fadd(\n\n a.clone(), get_k_modulus() - b.clone()\n\n );\n\n return res;\n\n}\n\n\n", "file_path": "verifier/src/prime_field.rs", "rank": 42, "score": 174034.0845339513 }, { "content": "pub fn get_public_input_length(n_pages: usize) -> usize {\n\n\treturn OFFSET_PUBLIC_MEMORY + 4 * n_pages - 1;\n\n}", "file_path": "verifier/src/public_input_offsets.rs", "rank": 43, "score": 172354.62193903065 }, { "content": "// Performs bitwise and between 2 vals\n\npub fn bitwise_and(val1: & Uint256, val2: & Uint256) -> Uint256 {\n\n let val1_bytes = val1.to_bytes_le();\n\n let val2_bytes = val2.to_bytes_le();\n\n\n\n let mut result_bytes: [u8; 32] = [0; 32]; \n\n for i in 0..32 {\n\n let mut val1 = 0;\n\n let mut val2 = 0;\n\n if i < val1_bytes.len() { val1 = val1_bytes[i]; }\n\n if i < val2_bytes.len() { val2 = val2_bytes[i]; }\n\n result_bytes[i] = val1 & val2;\n\n }\n\n\n\n return Uint256::from_bytes_le(&result_bytes);\n\n}\n\n\n", "file_path": "verifier/src/uint256_ops.rs", "rank": 44, "score": 171780.61478267296 }, { "content": "pub fn inverse(val: & Uint256) -> Uint256 {\n\n let base = get_k_modulus() - get_uint256(\"2\");\n\n let val_bytes = val.modpow( &base, &get_k_modulus() ).to_bytes_le();\n\n return Uint256::from_bytes_le(&val_bytes);\n\n}\n\n\n", "file_path": "verifier/src/prime_field.rs", "rank": 45, "score": 171739.28128951482 }, { "content": " ------------------------- */\n\npub fn compute(x: Uint256) -> Uint256 {\n\n\tlet mut result = uint256_ops::get_uint256(\"0\");\n\n\n\n\tresult = uint256_ops::get_uint256(\"7e08f9d222cc0764fb5ca69e51ad4cdb7f1b612058568a142bc7a4cdd0e39c4\") + prime_field::fmul(\n\n\t\t\tuint256_ops::get_uint256(\"29f6aa5fc92eab8b8b9871c8449c1f617b808ea9860717f3e5e1678672ec565\") + prime_field::fmul(\n\n\t\t\t\tuint256_ops::get_uint256(\"5115ade709c058be5dc6f406794062642086e431bab03c9a86d53c79aa83db4\") + prime_field::fmul(\n\n\t\t\t\t\tuint256_ops::get_uint256(\"2d6129632b4fc43e4142abf55fe2d1f3e79dfa01c73d8fb56a465dbd07a9682\") + prime_field::fmul(\n\n\t\t\t\t\t\tuint256_ops::get_uint256(\"14f3359ce0d2891d1bc2b6f4d2d6dd71fe22925b8a09f66147db095a9d4983\") + prime_field::fmul(\n\n\t\t\t\t\t\t\tuint256_ops::get_uint256(\"75a127d817aee244517479bab5c4bfc2a0035d43d673badaf64d8adf94353bd\") + prime_field::fmul(\n\n\t\t\t\t\t\t\t\tuint256_ops::get_uint256(\"62b07622f501888a668440d9b856be4b0c3bf12a401fc2bebaeab4a7e1684ad\") + prime_field::fmul(\n\n\t\t\t\t\t\t\t\t\tresult.clone(), x.clone()\n\n\t\t\t\t\t\t\t\t), x.clone()\n\n\t\t\t\t\t\t\t), x.clone()\n\n\t\t\t\t\t\t), x.clone()\n\n\t\t\t\t\t), x.clone()\n\n\t\t\t\t), x.clone()\n\n\t\t\t), x.clone()\n\n\t\t);\n\n\n\n\tresult = uint256_ops::get_uint256(\"55e928ba557ed7fe0ecde6d1fbb83d112e6b06a087b4013b9c425fa36eb0415\") + prime_field::fmul(\n", "file_path": "verifier/src/penderson_hash_y_column.rs", "rank": 46, "score": 171739.28128951482 }, { "content": "pub fn from_montgomery(val: Uint256) -> Uint256 {\n\n let prod = fmul( val.clone(), get_k_montgomery_r_inv() );\n\n let val_bytes = prod.modpow( &get_uint256(\"1\") , &get_k_modulus() ).to_bytes_le(); // (val * montgomery_inv_r) % K_MOD\n\n return Uint256::from_bytes_le(&val_bytes);\n\n}\n\n\n", "file_path": "verifier/src/prime_field.rs", "rank": 47, "score": 171739.28128951482 }, { "content": " ------------------------- */\n\npub fn compute(x: Uint256) -> Uint256 {\n\n\tlet mut result = uint256_ops::get_uint256(\"0\");\n\n\n\n\tresult = uint256_ops::get_uint256(\"f524ffcb160c3dfcc72d40b12754e2dc26433a37b8207934f489a203628137\") + prime_field::fmul(\n\n\t\t\tuint256_ops::get_uint256(\"23b940cd5c4f2e13c6df782f88cce6294315a1b406fda6137ed4a330bd80e37\") + prime_field::fmul(\n\n\t\t\t\tuint256_ops::get_uint256(\"62e62fafc55013ee6450e33e81f6ba8524e37558ea7df7c06785f3784a3d9a8\") + prime_field::fmul(\n\n\t\t\t\t\tuint256_ops::get_uint256(\"347dfb13aea22cacbef33972ad3017a5a9bab04c296295d5d372bad5e076a80\") + prime_field::fmul(\n\n\t\t\t\t\t\tuint256_ops::get_uint256(\"6c930134c99ac7200d41939eb29fb4f4e380b3f2a11437dd01d12fd9ebe8909\") + prime_field::fmul(\n\n\t\t\t\t\t\t\tuint256_ops::get_uint256(\"49d16d6e3720b63f7d1e74ed7fd8ea759132735c094c112c0e9dd8cc4653820\") + prime_field::fmul(\n\n\t\t\t\t\t\t\t\tuint256_ops::get_uint256(\"23a2994e807cd40717d68f37e1d765f4354a81b12374c82f481f09f9faff31a\") + prime_field::fmul(\n\n\t\t\t\t\t\t\t\t\tresult.clone(), x.clone()\n\n\t\t\t\t\t\t\t\t), x.clone()\n\n\t\t\t\t\t\t\t), x.clone()\n\n\t\t\t\t\t\t), x.clone()\n\n\t\t\t\t\t), x.clone()\n\n\t\t\t\t), x.clone()\n\n\t\t\t), x.clone()\n\n\t\t);\n\n\n\n\n", "file_path": "verifier/src/ecdsa_points_y_column.rs", "rank": 48, "score": 171739.28128951482 }, { "content": " ------------------------- */\n\npub fn compute(x: Uint256) -> Uint256 {\n\n\tlet mut result = uint256_ops::get_uint256(\"0\");\n\n\n\n\tresult = uint256_ops::get_uint256(\"5d4c38bd21ee4c36da189b6114280570d274811852ed6788ba0570f2414a914\") + prime_field::fmul(\n\n\t\t\tuint256_ops::get_uint256(\"324182d53af0aa949e3b5ef1cda6d56bed021853be8bcef83bf87df8b308b5a\") + prime_field::fmul(\n\n\t\t\t\tuint256_ops::get_uint256(\"4e1b2bc38487c21db3fcea13aaf850884b9aafee1e3a9e045f204f24f4ed900\") + prime_field::fmul(\n\n\t\t\t\t\tuint256_ops::get_uint256(\"5febf85978de1a675512012a9a5d5c89590284d93ae486a94b7bd8df0032421\") + prime_field::fmul(\n\n\t\t\t\t\t\tuint256_ops::get_uint256(\"f685b119593168b5dc2b7887e7f1720165a1bd180b86185590ba3393987935\") + prime_field::fmul(\n\n\t\t\t\t\t\t\tuint256_ops::get_uint256(\"2bc4092c868bab2802fe0ba3cffdb1eed98b88a2a35d8c9b94a75f695bd3323\") + prime_field::fmul(\n\n\t\t\t\t\t\t\t\tuint256_ops::get_uint256(\"22aac295d2c9dd7e94269a4a72b2fb3c3af04a0cb42ed1f66cfd446fc505ee2\") + prime_field::fmul(\n\n\t\t\t\t\t\t\t\t\tresult.clone(), x.clone()\n\n\t\t\t\t\t\t\t\t), x.clone()\n\n\t\t\t\t\t\t\t), x.clone()\n\n\t\t\t\t\t\t), x.clone()\n\n\t\t\t\t\t), x.clone()\n\n\t\t\t\t), x.clone()\n\n\t\t\t), x.clone()\n\n\t\t);\n\n\n\n\tresult = uint256_ops::get_uint256(\"44a14e5af0c3454a97df201eb3e4c91b5925d06da6741c055504c10ea8a534d\") + prime_field::fmul(\n", "file_path": "verifier/src/ecdsa_points_x_column.rs", "rank": 49, "score": 171739.28128951482 }, { "content": " ------------------------- */\n\npub fn compute(x: Uint256) -> Uint256 {\n\n\tlet mut result = uint256_ops::get_uint256(\"0\");\n\n\n\n\tresult = uint256_ops::get_uint256(\"549a83d43c90aaf1a28c445c81abc883cb61e4353a84ea0fcb15ccee6d6482f\") + prime_field::fmul(\n\n\t\t\tuint256_ops::get_uint256(\"6f753527f0dec9b713d52f08e4556a3963a2f7e5e282b2e97ffde3e12569b76\") + prime_field::fmul(\n\n\t\t\t\tuint256_ops::get_uint256(\"233eff8cfcc744de79d412f724898d13c0e53b1132046ee45db7a101242a73f\") + prime_field::fmul(\n\n\t\t\t\t\tuint256_ops::get_uint256(\"60105b3cb5aab151ce615173eaecbe94014ff5d72e884addcd4b9d973fed9fd\") + prime_field::fmul(\n\n\t\t\t\t\t\tuint256_ops::get_uint256(\"295046a010dd6757176414b0fd144c1d2517fc463df01a12c0ab58bbbac26ea\") + prime_field::fmul(\n\n\t\t\t\t\t\t\tuint256_ops::get_uint256(\"4cec4cd52fab6da76b4ab7a41ffd844aad8981917d2295273ff6ab2cce622d8\") + prime_field::fmul(\n\n\t\t\t\t\t\t\t\tuint256_ops::get_uint256(\"43869b387c2d0eab20661ebdfaca58b4b23feac014e1e1d9413164312e77da\") + prime_field::fmul(\n\n\t\t\t\t\t\t\t\t\tresult.clone(), x.clone()\n\n\t\t\t\t\t\t\t\t), x.clone()\n\n\t\t\t\t\t\t\t), x.clone()\n\n\t\t\t\t\t\t), x.clone()\n\n\t\t\t\t\t), x.clone()\n\n\t\t\t\t), x.clone()\n\n\t\t\t), x.clone()\n\n\t\t);\n\n\n\n\tresult = uint256_ops::get_uint256(\"4ccee6b6ecd4ea8733198e95935d13474d34cf54d7631fde59720e40378e1eb\") + prime_field::fmul(\n", "file_path": "verifier/src/penderson_hash_x_column.rs", "rank": 50, "score": 171739.28128951482 }, { "content": "// Performs bitwise xor between 2 vals\n\npub fn bitwise_xor(val1: & Uint256, val2: & Uint256) -> Uint256 {\n\n let val1_bytes = val1.to_bytes_le();\n\n let val2_bytes = val2.to_bytes_le();\n\n\n\n let mut result_bytes: [u8; 32] = [0; 32]; \n\n for i in 0..32 {\n\n let mut val1 = 0;\n\n let mut val2 = 0;\n\n if i < val1_bytes.len() { val1 = val1_bytes[i]; }\n\n if i < val2_bytes.len() { val2 = val2_bytes[i]; }\n\n result_bytes[i] = val1 ^ val2;\n\n }\n\n\n\n return Uint256::from_bytes_le(&result_bytes);\n\n}\n\n\n\n\n", "file_path": "verifier/src/uint256_ops.rs", "rank": 51, "score": 169139.79860515654 }, { "content": "pub fn safe_div(numerator: &Uint256, denominator: &Uint256) -> Uint256 {\n\n assert!(*denominator > get_uint256(\"0\")); //The denominator must not be zero\n\n\n\n let num_bigint = BigUint::from_bytes_le( &(*numerator).to_bytes_le() );\n\n let den_bigint = BigUint::from_bytes_le( &(*denominator).to_bytes_le() );\n\n let num_mod_den = Uint256::from_bytes_le( \n\n &num_bigint.modpow( \n\n &BigUint::new(vec![1]), \n\n &den_bigint\n\n ).to_bytes_le() \n\n );\n\n assert!(num_mod_den == get_uint256(\"0\")); //The numerator is not divisible by the denominator\n\n\n\n return numerator.clone() / denominator.clone();\n\n}\n\n\n\n\n\n/* --------------\n\n Unit Testing\n\n------------------ */\n", "file_path": "verifier/src/uint256_ops.rs", "rank": 52, "score": 169139.79860515654 }, { "content": "pub fn mod_prime(val: Uint256) -> Uint256 {\n\n let val_bigint = BigUint::from_bytes_le( &val.to_bytes_le() );\n\n return Uint256::from_bytes_le( \n\n &val_bigint.modpow( \n\n &BigUint::new(vec![1]), \n\n &BigUint::from_bytes_le( &get_k_modulus().to_bytes_le() ) \n\n ).to_bytes_le() \n\n ); // val % K_MOD\n\n}\n\n\n", "file_path": "verifier/src/prime_field.rs", "rank": 53, "score": 168533.35890044775 }, { "content": "pub fn fpow(val_u: & Uint256, exp_u: & Uint256) -> Uint256 {\n\n let val = BigUint::from_bytes_le( &(*val_u).to_bytes_le() );\n\n let exp = BigUint::from_bytes_le( &(*exp_u).to_bytes_le() );\n\n let pow_bytes = val.modpow( &exp, &BigUint::from_bytes_le( &get_k_modulus().to_bytes_le() ) ).to_bytes_le();\n\n return Uint256::from_bytes_le(&pow_bytes);\n\n}\n\n\n", "file_path": "verifier/src/prime_field.rs", "rank": 54, "score": 168064.5110413372 }, { "content": "pub fn get_k_montgomery_r() -> Uint256 {\n\n return get_uint256(\"7fffffffffffdf0ffffffffffffffffffffffffffffffffffffffffffffffe1\");\n\n}\n\n\n", "file_path": "verifier/src/prime_field.rs", "rank": 55, "score": 167233.54426313407 }, { "content": " ------------------ */\n\npub fn get_k_modulus() -> Uint256 {\n\n return get_uint256(\"800000000000011000000000000000000000000000000000000000000000001\");\n\n}\n\n\n", "file_path": "verifier/src/prime_field.rs", "rank": 56, "score": 167233.54426313407 }, { "content": "// Returns a Uint256 from string containg appropriate hex value\n\npub fn get_uint256(str: &str) -> Uint256 {\n\n let mut string_even = String::from(str);\n\n if str.len() % 2 != 0 { //If length is odd, prepend a 0\n\n let mut zero_string = String::from(\"0\");\n\n zero_string.push_str(str);\n\n string_even = zero_string.clone();\n\n }\n\n\n\n let val_bytes = hex::decode(string_even).expect(\"Whoops problem encoding str to hex: \");\n\n return Uint256::from_bytes_be(&val_bytes);\n\n}\n\n\n\n\n\n/* Takes lower 8 bits of val in little endian form and return a usize value*/\n", "file_path": "verifier/src/uint256_ops.rs", "rank": 58, "score": 165701.54813872481 }, { "content": "pub fn get_generator_val() -> Uint256 {\n\n return get_uint256(\"3\");\n\n}\n\n\n", "file_path": "verifier/src/prime_field.rs", "rank": 59, "score": 163717.5840505184 }, { "content": "pub fn get_gen_1024_val() -> Uint256 {\n\n return get_uint256(\"659d83946a03edd72406af6711825f5653d9e35dc125289a206c054ec89c4f1\");\n\n}\n\n\n\n\n\n\n\n\n\n/* ---------------\n\n Operations within the finite field\n\n defined by modulo PRIME, where PRIME is a large prime number\n", "file_path": "verifier/src/prime_field.rs", "rank": 60, "score": 163717.5840505184 }, { "content": "pub fn get_k_mod_mask() -> Uint256 {\n\n return get_uint256(\"0fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff\");\n\n}\n\n\n", "file_path": "verifier/src/prime_field.rs", "rank": 61, "score": 163717.5840505184 }, { "content": "pub fn get_one_val() -> Uint256 {\n\n return get_uint256(\"1\");\n\n}\n\n\n", "file_path": "verifier/src/prime_field.rs", "rank": 62, "score": 163717.5840505184 }, { "content": "pub fn get_k_montgomery_r_inv() -> Uint256 {\n\n return get_uint256(\"40000000000001100000000000012100000000000000000000000000000000\");\n\n}\n\n\n", "file_path": "verifier/src/prime_field.rs", "rank": 63, "score": 163717.5840505184 }, { "content": "// The format of the public input, starting at OFFSET_PUBLIC_MEMORY is as follows:\n\n// * For each page:\n\n// * First address in the page (this field is not included for the first page).\n\n// * Page size.\n\n// * Page hash.\n\n// # All data above this line, appears in the initial seed of the proof.\n\n// * For each page:\n\n// * Cumulative product.\n\npub fn get_offset_page_size(page_id: usize) -> usize {\n\n\treturn OFFSET_PUBLIC_MEMORY + 3 * page_id;\n\n}\n\n\n", "file_path": "verifier/src/public_input_offsets.rs", "rank": 64, "score": 157531.20599505756 }, { "content": "pub fn get_offset_page_addr(page_id: usize) -> usize {\n\n\tassert!(page_id >= 1); //Address of page 0 is not part of the public input\n\n\treturn OFFSET_PUBLIC_MEMORY + 3 * page_id - 1;\n\n}\n\n\n", "file_path": "verifier/src/public_input_offsets.rs", "rank": 65, "score": 157523.9282172327 }, { "content": "pub fn get_offset_page_hash(page_id: usize) -> usize {\n\n\treturn OFFSET_PUBLIC_MEMORY + 3 * page_id + 1;\n\n}\n\n\n", "file_path": "verifier/src/public_input_offsets.rs", "rank": 66, "score": 157523.9282172327 }, { "content": "fn get_prng_ptr(channel_idx: usize) -> usize {\n\n\treturn channel_idx + 1;\n\n}\n\n\n", "file_path": "verifier/src/verifier_channel.rs", "rank": 67, "score": 156254.11968261388 }, { "content": "pub fn get_offset_page_prod(page_id: usize, n_pages: usize) -> usize {\n\n\treturn OFFSET_PUBLIC_MEMORY + 3 * n_pages - 1 + page_id;\n\n}\n\n\n", "file_path": "verifier/src/public_input_offsets.rs", "rank": 68, "score": 152277.56722147562 }, { "content": "pub fn send_random_queries(\n\n\tchannel_idx: usize, count: usize, mask: Uint256, queries_out_idx: usize, stride: usize, ctx: &mut Vec<Uint256>\n\n) -> Uint256 {\n\n\n\n\tlet mut shift = 0;\n\n\tlet mut end_idx = queries_out_idx;\n\n\tlet mut val = uint256_ops::get_uint256(\"0\");\n\n\n\n\tfor _ in 0..count {\n\n\t\tif shift == 0 {\n\n\t\t\tval = get_random_bytes( get_prng_ptr(channel_idx), ctx );\n\n\t\t\tshift = 8;\n\n\t\t}\n\n\t\t\n\n\t\tshift -= 2;\n\n\t\tlet r_shift = val.clone() / prime_field::fpow( &uint256_ops::get_uint256(\"2\"), &uint256_ops::from_usize(shift) ); // val >> shift\n\n\t\tlet query_idx = uint256_ops::bitwise_and( &mask, &r_shift );\n\n\t\t\n\n\t\t// Insert new query_idx in the correct place like insertion sort.\n\n\t\tlet mut idx_cpy = end_idx;\n", "file_path": "verifier/src/verifier_channel.rs", "rank": 69, "score": 151636.162829686 }, { "content": "fn bit_reverse(num: Uint256, num_of_bits: usize) -> usize {\n\n\tassert!( num_of_bits == 256 || num < prime_field::fpow( &uint256_ops::get_uint256(\"2\"), &uint256_ops::from_usize(num_of_bits) ) ); // Make sure number size is correctly specified\n\n\n\n\tlet mut r = 0;\n\n\tlet mut n = uint256_ops::to_usize(&num);\n\n\tfor _ in 0..num_of_bits {\n\n\t\tr = (r * 2) | (n % 2);\n\n\t\tn = n / 2;\n\n\t}\n\n\n\n\treturn r;\n\n}\n\n\n", "file_path": "verifier/src/fri.rs", "rank": 70, "score": 135721.73362121248 }, { "content": "fn sub(a: Uint256, b: Uint256) -> Uint256 {\n\n\treturn a-b;\n\n}", "file_path": "verifier/src/polynomial_contrainsts.rs", "rank": 71, "score": 131894.789012529 }, { "content": "pub fn verify_proof(\n\n proof_params: Vec<Uint256>,\n\n proof: Vec<Uint256>,\n\n task_meta_data: Vec<Uint256>,\n\n cairo_aux_input: Vec<Uint256>,\n\n cairo_verifier_id: Uint256,\n\n) {\n\n\n\n /* ------------ GPS Statement Verifier ----------- */\n\n\n\n /* Auxiliary inputs check */\n\n assert!(cairo_aux_input.len() > pub_input::OFFSET_N_PUBLIC_MEMORY_PAGES); //Invalid cairoAuxInput length\n\n let n_pages = uint256_ops::to_usize( &cairo_aux_input[pub_input::OFFSET_N_PUBLIC_MEMORY_PAGES] );\n\n assert!(cairo_aux_input.len() == pub_input::get_public_input_length( n_pages ) + 2); //Invalid cairoAuxInput length\n\n\n\n /* Transform cairo_aux_input -> cairoPublic input (- z, alpha) */\n\n // The values z and alpha are used only for the fact registration of the main page.\n\n // They are not needed in the auxiliary input of CpuVerifier as they are computed there.\n\n // Create a copy of cairo_aux_input without z and alpha.\n\n let mut cairo_pub_input: Vec<Uint256> = vec![ uint256_ops::get_uint256(\"0\"); cairo_aux_input.len()-2 ];\n", "file_path": "verifier/src/verify_proof.rs", "rank": 72, "score": 122025.26214602642 }, { "content": "pub fn horner_eval(\n\n\tcoefs_start: usize, point: Uint256, n_coefs: usize, ctx: & Vec<Uint256>\n\n) -> Uint256 {\n\n\tlet mut result = get_uint256(\"0\");\n\n\n\n\tassert!( n_coefs % 8 == 0 ); // Number of polynomial coefficients must be divisible by 8\n\n\tassert!( n_coefs < 4096 ); // No more than 4096 coefficients are supported\n\n\n\n\tlet mut coefs_idx = coefs_start + n_coefs;\n\n\n\n\twhile coefs_idx > coefs_start {\n\n\t\t// Reduce coefs_idx by 8 field elements.\n\n\t\tcoefs_idx -= 8;\n\n\n\n\t\t// Apply 4 Horner steps (result := result * point + coef).\n\n\t\tresult = make_copy(&ctx[coefs_idx + 4]) + prime_field::fmul(\n\n\n\n\t\t\t\tmake_copy(&ctx[coefs_idx + 5]) + prime_field::fmul(\n\n\t\t\t\t\tmake_copy(&ctx[coefs_idx + 6]) + prime_field::fmul(\n\n\t\t\t\t\t\tmake_copy(&ctx[coefs_idx + 7]) + prime_field::fmul(\n", "file_path": "verifier/src/horner_eval.rs", "rank": 73, "score": 120307.56586074477 }, { "content": "pub fn compute_next_layer(\n\n\tchannel_idx: usize, fri_queue_idx: usize, merkle_queue_idx: usize, n_queries: usize,\n\n\tfri_eval_point: Uint256, fri_coset_size: usize, fri_ctx: usize, ctx: &mut Vec<Uint256>\n\n) -> usize {\n\n\t\n\n\tlet fri_queue_end = fri_queue_idx + 3 * n_queries;\n\n\tlet mut fri_queue_tail = fri_queue_idx;\n\n\tlet mut fri_queue_head = fri_queue_idx;\n\n\tlet mut merkle_queue_tail = merkle_queue_idx;\n\n\n\n\t//Do: get coset inputs and do fri steps while still in fri queue\n\n\n\n\tlet (new_queue_head0, index0, coset_offset0) = gather_coset_inputs(\n\n\t\tchannel_idx, fri_ctx, fri_queue_head, fri_coset_size, ctx\n\n\t);\n\n\tdo_fri_steps(\n\n fri_ctx, fri_queue_tail, coset_offset0, fri_eval_point.clone(), fri_coset_size, index0, merkle_queue_tail, ctx\n\n\t);\n\n\n\n\tmerkle_queue_tail += 2;\n", "file_path": "verifier/src/fri.rs", "rank": 74, "score": 120307.56586074474 }, { "content": "pub fn register_public_memory_main_page(\n\n\ttask_meta_data: & Vec<Uint256>, cairo_aux_input: & Vec<Uint256>, registry: &mut HashMap<Uint256, bool>\n\n) -> (usize, Uint256, Uint256) {\n\n\tlet prime = prime_field::get_k_modulus();\n\n\tlet n_tasks = uint256_ops::to_usize(&task_meta_data[0].clone());\n\n\tassert!( n_tasks < 2usize.pow(30) ); //Invalid number of tasks\n\n\n\n\t// Public memory length\n\n\tlet pub_mem_len = \n\n\t\tcairo_bootloader::PROGRAM_SIZE + /*return fp and pc*/2 + N_MAIN_ARGS + N_MAIN_RETURN_VALUES + /*Number of tasks cell*/1 + 2 * n_tasks;\n\n\n\n\tlet mut public_memory: Vec<Uint256> = vec![uint256_ops::get_uint256(\"0\"); pub_mem_len * public_input_offsets::N_WORDS_PER_PUBLIC_MEMORY_ENTRY];\n\n\n\n\tlet mut offset = 0;\n\n\n\n\t// Write public memory, which is a list of pairs (address, value).\n\n\t// Copy program segment to public memory\n\n\tlet bootloader_prog: Vec<Uint256> = cairo_bootloader::get_bootload_program();\n\n\tfor i in 0..bootloader_prog.len() {\n\n\t\t// Force that memory[i + INITIAL_PC] = bootloaderProgram[i].\n", "file_path": "verifier/src/memory_fact_registry.rs", "rank": 75, "score": 111065.21394142884 }, { "content": "fn get_bootload_program_bigint() -> Vec<BigUint> {\n\n\treturn vec![\n\n BigUint::parse_bytes(b\"290341444919459839\", 10).unwrap(),\n\n BigUint::parse_bytes(b\"4\", 10).unwrap(),\n\n BigUint::parse_bytes(b\"1226245742482522112\", 10).unwrap(),\n\n BigUint::parse_bytes(b\"166\", 10).unwrap(),\n\n BigUint::parse_bytes(b\"74168662805676031\", 10).unwrap(),\n\n BigUint::parse_bytes(b\"0\", 10).unwrap(),\n\n BigUint::parse_bytes(b\"146226256843603965\", 10).unwrap(),\n\n BigUint::parse_bytes(b\"4\", 10).unwrap(),\n\n BigUint::parse_bytes(b\"5191102238658887680\", 10).unwrap(),\n\n BigUint::parse_bytes(b\"2345108766317314046\", 10).unwrap(),\n\n BigUint::parse_bytes(b\"290341444919459839\", 10).unwrap(),\n\n BigUint::parse_bytes(b\"3\", 10).unwrap(),\n\n BigUint::parse_bytes(b\"4632937381316558848\", 10).unwrap(),\n\n BigUint::parse_bytes(b\"4612671182992932865\", 10).unwrap(),\n\n BigUint::parse_bytes(b\"4612671182992998402\", 10).unwrap(),\n\n BigUint::parse_bytes(b\"146226256843603968\", 10).unwrap(),\n\n BigUint::parse_bytes(b\"4\", 10).unwrap(),\n\n BigUint::parse_bytes(b\"74168662805676031\", 10).unwrap(),\n", "file_path": "verifier/src/cairo_bootloader.rs", "rank": 76, "score": 106617.10140397408 }, { "content": "fn gather_coset_inputs(\n\n\tchannel_idx: usize, fri_ctx: usize, fri_queue_head_input: usize, coset_size: usize, ctx: & mut Vec<Uint256>\n\n) -> (usize, Uint256, Uint256) {\n\n\n\n\tlet mut evals_on_coset_idx = fri_ctx + FRI_CTX_TO_COSET_EVALUATIONS_OFFSET;\n\n\tlet fri_group_idx = fri_ctx + FRI_CTX_TO_FRI_GROUP_OFFSET;\n\n\tlet mut fri_queue_head = fri_queue_head_input; //mutable copy of input\n\n\n\n\tlet mut queue_item_idx = ctx[fri_queue_head].clone();\n\n\n\n\t// The coset index is represented by the most significant bits of the queue item index.\n\n\tlet negated: Uint256 = uint256_ops::bitwise_not( uint256_ops::from_usize(coset_size-1) );\n\n\tlet coset_idx = uint256_ops::bitwise_and( &queue_item_idx.clone(), &negated );\n\n\tlet next_coset_idx = coset_idx.clone() + uint256_ops::from_usize(coset_size);\n\n\t\n\n\n\n\t// Get the algebraic coset offset:\n\n\t// I.e. given c*g^(-k) compute c, where\n\n\t// g is the generator of the coset group.\n\n\t// k is bitReverse(offsetWithinCoset, log2(cosetSize)).\n", "file_path": "verifier/src/fri.rs", "rank": 77, "score": 101637.30964908874 }, { "content": "#[test]\n\nfn test_honest_verification_layout1() {\n\n\t//Taken from https://ropsten.etherscan.io/tx/0xa70a21fd3bec7820417f382a7d22de7c51805632a5bd3f05d4e921e04235c560\n\n\tlet proof_params = vec![\n\n\t\tcairo_verifier::uint256_ops::get_uint256(\"000000000000000000000000000000000000000000000000000000000000000a\"),\n\n\t\tcairo_verifier::uint256_ops::get_uint256(\"0000000000000000000000000000000000000000000000000000000000000005\"),\n\n\t\tcairo_verifier::uint256_ops::get_uint256(\"000000000000000000000000000000000000000000000000000000000000001e\"),\n\n\t\tcairo_verifier::uint256_ops::get_uint256(\"0000000000000000000000000000000000000000000000000000000000000005\"),\n\n\t\tcairo_verifier::uint256_ops::get_uint256(\"0000000000000000000000000000000000000000000000000000000000000007\"),\n\n\t\tcairo_verifier::uint256_ops::get_uint256(\"0000000000000000000000000000000000000000000000000000000000000000\"),\n\n\t\tcairo_verifier::uint256_ops::get_uint256(\"0000000000000000000000000000000000000000000000000000000000000003\"),\n\n\t\tcairo_verifier::uint256_ops::get_uint256(\"0000000000000000000000000000000000000000000000000000000000000003\"),\n\n\t\tcairo_verifier::uint256_ops::get_uint256(\"0000000000000000000000000000000000000000000000000000000000000003\"),\n\n\t\tcairo_verifier::uint256_ops::get_uint256(\"0000000000000000000000000000000000000000000000000000000000000003\"),\n\n\t\tcairo_verifier::uint256_ops::get_uint256(\"0000000000000000000000000000000000000000000000000000000000000003\"),\n\n\t\tcairo_verifier::uint256_ops::get_uint256(\"0000000000000000000000000000000000000000000000000000000000000002\")\n\n\t];\n\n\tlet proof = vec![\n\n\t\tcairo_verifier::uint256_ops::get_uint256(\"ded9ec65e8b9ee0721ee7b5bb43f73aae66568e9000000000000000000000000\"),\n\n\t\tcairo_verifier::uint256_ops::get_uint256(\"50b8f85ab021231715a77775840f93630974d769000000000000000000000000\"),\n\n\t\tcairo_verifier::uint256_ops::get_uint256(\"dfbd1e874bbadd381b1e7063df6392bd2fe04b85000000000000000000000000\"),\n", "file_path": "verifier/tests/integration_test.rs", "rank": 78, "score": 96376.7384570305 }, { "content": "fn bit_reverse(num: usize, num_of_bits: usize) -> usize {\n\n\tassert!( num_of_bits == 256 || num < uint256_ops::to_usize(&prime_field::fpow( &uint256_ops::get_uint256(\"2\"), &uint256_ops::from_usize(num_of_bits) )) ); // Make sure number size is correctly specified\n\n\n\n\tlet mut r = 0;\n\n\tlet mut n = num;\n\n\tfor _ in 0..num_of_bits {\n\n\t\tr = (r * 2) | (n % 2);\n\n\t\tn = n / 2;\n\n\t}\n\n\n\n\treturn r;\n\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n/*\n\n Checks that the trace and the compostion agree at oodsPoint, assuming the prover provided us\n\n with the proper evaluations.\n\n\n\n Later, we will use boundery constraints to check that those evaluations are actully consistent\n\n with the commited trace and composition ploynomials.\n\n*/\n", "file_path": "verifier/src/verify_proof.rs", "rank": 79, "score": 94963.28252597 }, { "content": "fn verify_fri(\n\n\tproof: Vec<Uint256>, fri_queue: &mut Vec<Uint256>, evaluation_point: Uint256, fri_step_size: usize, expected_root: Uint256\n\n) {\n\n\tassert!(fri_step_size <= FRI_MAX_FRI_STEP); //FRI step size too large\n\n\t/*\n\n\t\tThe fri_queue should have of 3*n_queries + 1 elements, beginning with n_queries triplets\n\n\t\tof the form (query_index, FRI_value, FRI_inverse_point), and ending with a single buffer\n\n\t\tcell set to 0, which is accessed and read during the computation of the FRI layer.\n\n\t*/\n\n\tassert!( fri_queue.len() % 3 == 1); //FRI Queue must be composed of triplets plus one delimiter cell\n\n\tassert!( fri_queue.len() >= 4 ); //No query to process\n\n\tlet mut n_queries = fri_queue.len() / 3;\n\n\tfri_queue[3*n_queries] = uint256_ops::get_uint256(\"0\"); \n\n\n\n\t// Verify evaluation point within valid range.\n\n\tassert!(evaluation_point < prime_field::get_k_modulus()); //INVALID_EVAL_POINT\n\n\n\n\t// Queries need to be in the range [2**height .. 2**(height+1)-1] strictly incrementing.\n\n\t// i.e. we need to check that Qi+1 > Qi for each i,\n\n\t// but regarding the height range - it's sufficient to check that\n", "file_path": "verifier/src/fri.rs", "rank": 80, "score": 68990.62669331771 }, { "content": "fn do_fri_steps(\n\n\tfri_ctx: usize, fri_queue_tail: usize, coset_offset_input: Uint256, fri_eval_point: Uint256,\n\n\tfri_coset_size: usize, index: Uint256, merkle_queue_idx: usize, ctx: & mut Vec<Uint256>\n\n) {\n\n\n\n\tlet evals_on_coset_idx = fri_ctx + FRI_CTX_TO_COSET_EVALUATIONS_OFFSET;\n\n\tlet fri_half_inv_group_idx = fri_ctx + FRI_CTX_TO_FRI_HALF_INV_GROUP_OFFSET;\n\n\n\n\tlet mut fri_val = uint256_ops::get_uint256(\"0\");\n\n\tlet mut coset_offset = coset_offset_input;\n\n\n\n\n\n\t// Compare to expected FRI step sizes in order of likelihood, step size 3 being most common.\n\n\tif fri_coset_size == 8 {\n\n\t\tlet (fri_val_tmp, coset_offset_tmp) = do_3_fri_steps( fri_half_inv_group_idx, evals_on_coset_idx, coset_offset, fri_eval_point, ctx );\n\n\t\tfri_val = fri_val_tmp;\n\n\t\tcoset_offset = coset_offset_tmp;\n\n\t}else if fri_coset_size == 4 {\n\n\t\tlet (fri_val_tmp, coset_offset_tmp) = do_2_fri_steps( fri_half_inv_group_idx, evals_on_coset_idx, coset_offset, fri_eval_point, ctx );\n\n\t\tfri_val = fri_val_tmp;\n", "file_path": "verifier/src/fri.rs", "rank": 81, "score": 67883.72699823324 }, { "content": "fn do_2_fri_steps(\n\n\tfri_half_inv_group_idx: usize, evals_on_coset_idx: usize, coset_offset_input: Uint256, fri_eval_point: Uint256, ctx: & mut Vec<Uint256>\n\n) -> (Uint256, Uint256) {\n\n\n\n\tlet fri_eval_point_divbyx = prime_field::fmul( fri_eval_point.clone(), coset_offset_input.clone() );\n\n\n\n\tlet mut f0 = ctx[evals_on_coset_idx].clone();\n\n\tlet f1 = ctx[evals_on_coset_idx + 1].clone();\n\n\t// f0 < 3P ( = 1 + 1 + 1).\n\n\tf0 = (f0.clone() + f1.clone()) + prime_field::fmul( fri_eval_point_divbyx.clone(), f0.clone() + (prime_field::get_k_modulus() - f1.clone()) );\n\n\n\n\tlet mut f2 = ctx[evals_on_coset_idx + 2].clone();\n\n\tlet f3 = ctx[evals_on_coset_idx + 3].clone();\n\n\tf2 = prime_field::fadd( \n\n\t\tf2.clone() + f3.clone(), \n\n\t\tprime_field::fmul(\n\n\t\t\tf2.clone() + (prime_field::get_k_modulus() - f3.clone()), \n\n\t\t\tprime_field::fmul(\n\n\t\t\t\tctx[fri_half_inv_group_idx + 1].clone(), fri_eval_point_divbyx.clone()\n\n\t\t\t)\n", "file_path": "verifier/src/fri.rs", "rank": 82, "score": 67883.72699823324 }, { "content": "fn do_3_fri_steps(\n\n\tfri_half_inv_group_idx: usize, evals_on_coset_idx: usize, coset_offset_input: Uint256, fri_eval_point: Uint256, ctx: & mut Vec<Uint256>\n\n) -> (Uint256, Uint256) {\n\n\tlet prime = prime_field::get_k_modulus();\n\n\tlet m_prime = uint256_ops::get_uint256(\"8000000000000110000000000000000000000000000000000000000000000010\");\n\n\tlet mut f0 = ctx[evals_on_coset_idx].clone();\n\n\t\n\n\tlet fri_eval_point_divbyx = prime_field::fmul( fri_eval_point.clone(), coset_offset_input.clone() );\n\n\tlet fri_eval_point_divbyx_squared = prime_field::fmul( fri_eval_point_divbyx.clone(), fri_eval_point_divbyx.clone() );\n\n\tlet imaginary_unit = ctx[fri_half_inv_group_idx + 1].clone();\n\n\n\n\tlet f1 = ctx[evals_on_coset_idx + 1].clone();\n\n\t// f0 < 3P ( = 1 + 1 + 1).\n\n\tf0 = (f0.clone() + f1.clone()) + prime_field::fmul( fri_eval_point_divbyx.clone(), f0.clone() + (prime.clone() - f1.clone()) );\n\n\n\n\tlet mut f2 = ctx[evals_on_coset_idx + 2].clone();\n\n\tlet f3 = ctx[evals_on_coset_idx + 3].clone();\n\n\tf2 = (f2.clone() + f3.clone()) + \n\n\t\tprime_field::fmul(\n\n\t\t\tf2.clone() + (prime.clone() - f3.clone()), \n", "file_path": "verifier/src/fri.rs", "rank": 83, "score": 67883.72699823324 }, { "content": "fn do_4_fri_steps(\n\n\tfri_half_inv_group_idx: usize, evals_on_coset_idx: usize, coset_offset_input: Uint256, fri_eval_point: Uint256, ctx: & mut Vec<Uint256>\n\n) -> (Uint256, Uint256) {\n\n\n\n\tlet prime = prime_field::get_k_modulus();\n\n\tlet m_prime = uint256_ops::get_uint256(\"8000000000000110000000000000000000000000000000000000000000000010\");\n\n\tlet mut f0 = ctx[evals_on_coset_idx].clone();\n\n\t\n\n\tlet fri_eval_point_divbyx = prime_field::fmul( fri_eval_point.clone(), coset_offset_input.clone() );\n\n\tlet imaginary_unit = ctx[fri_half_inv_group_idx + 1].clone();\n\n\n\n\tlet f1 = ctx[evals_on_coset_idx + 1].clone();\n\n\t// f0 < 3P ( = 1 + 1 + 1).\n\n\tf0 = (f0.clone() + f1.clone()) + prime_field::fmul( fri_eval_point_divbyx.clone(), f0.clone() + (prime.clone() - f1.clone()) );\n\n\n\n\tlet mut f2 = ctx[evals_on_coset_idx + 2].clone();\n\n\tlet f3 = ctx[evals_on_coset_idx + 3].clone();\n\n\tf2 = (f2.clone() + f3.clone()) + \n\n\t\tprime_field::fmul(\n\n\t\t\tf2.clone() + (prime.clone() - f3.clone()), \n", "file_path": "verifier/src/fri.rs", "rank": 84, "score": 67883.72699823324 }, { "content": "fn compute_fact_hash(\n\n\tmemory_pairs: &Vec<Uint256>, z: Uint256, alpha: Uint256, prime: Uint256\n\n) -> (Uint256, Uint256, Uint256) {\n\n\tlet mem_size = memory_pairs.len() / 2;\n\n\n\n\tlet mut prod = uint256_ops::get_uint256(\"1\");\n\n\n\n\t// Each value of memoryPairs is a pair: (address, value)\n\n\tlet last_idx = memory_pairs.len()-1;\n\n\n\n\tlet mut idx = 0;\n\n\twhile idx < last_idx {\n\n\t\t// Compute address + alpha * value\n\n\t\tlet addr_val_linear_comb = prime_field::fadd(\n\n\t\t\tmemory_pairs[idx].clone(), prime_field::fmul(\n\n\t\t\t\tmemory_pairs[idx+1].clone(), alpha.clone()\n\n\t\t\t)\n\n\t\t);\n\n\t\tprod = prime_field::fmul(\n\n\t\t\tprod.clone(), z.clone() + prime.clone() - addr_val_linear_comb.clone()\n", "file_path": "verifier/src/memory_fact_registry.rs", "rank": 85, "score": 63530.646384022315 }, { "content": "fn compute_pubic_memory_prod(\n\n\tcumulative_prods_ptr: usize, n_pub_memory_pages: usize, ctx: & Vec<Uint256>\n\n) -> Uint256 {\n\n\tlet last_ptr = cumulative_prods_ptr + n_pub_memory_pages;\n\n\tlet mut res = uint256_ops::get_uint256(\"1\");\n\n\tlet mut ptr = cumulative_prods_ptr;\n\n\twhile ptr < last_ptr {\n\n\t\tres = prime_field::fmul( res.clone(), ctx[ptr].clone() );\n\n\t\tptr += 1;\n\n\t}\n\n\treturn res;\n\n}", "file_path": "verifier/src/memory_fact_registry.rs", "rank": 86, "score": 62282.564816907936 }, { "content": "\t// prng.digest := keccak256(digest||nonce), nonce was written earlier.\n\n\tctx[channel_idx + 1] = uint256_ops::keccak_256(&bytes_bank[0..40]);\n\n\t// prng.counter := 0.\n\n\tctx[channel_idx + 2] = uint256_ops::get_uint256(\"0\");\n\n\tctx[channel_idx] += uint256_ops::get_uint256(\"1\"); //TODO: This is incorect since 0x8 is added to proofPtr, not 0x20\n\n\tctx[quarter_read_ptr] = uint256_ops::get_uint256(\"1\"); \n\n\t//TODO: Maybe we could modify copy of proof to add 24 0 bytes infront of data read\n\n\n\n\tlet pow_threshold = prime_field::fpow( &uint256_ops::get_uint256(\"2\"), &uint256_ops::from_usize(256 - pow_bits) ); // 1 << 256 - pow_bits\n\n\n\n\tprintln!(\"pow_threshold: {}, pow_digest: {}\", pow_threshold, pow_digest);\n\n\n\n\tassert!(pow_digest < pow_threshold); //Proof of work check failed\n\n}\n\n\n\n\n\n/*\n\n\tSends random queries and returns an array of queries sorted in ascending order.\n\n\tGenerates count queries in the range [0, mask] and returns the number of unique queries.\n\n\tNote that mask is of the form 2^k-1 (for some k).\n\n\tNote that queriesOutPtr may be (and is) inteleaved with other arrays. The stride parameter\n\n\tis passed to indicate the distance between every two entries to the queries array, i.e.\n\n\tstride = 0x20*(number of interleaved arrays).\n\n*/\n", "file_path": "verifier/src/verifier_channel.rs", "rank": 92, "score": 43355.71640833701 }, { "content": "\tfor i in 0..32 {\n\n\t\tbytes_bank[i] = hash_bytes[i];\n\n\t}\n\n\t\n\n\n\n\t//Do a second hash of keccak256(keccak256(0123456789abcded || digest || workBits) || nonce)\n\n\tlet proof_data = ctx[ uint256_ops::to_usize(&ctx[channel_idx]) ].clone();\n\n\tlet proof_data_bytes = uint256_ops::to_fixed_bytes(&proof_data);\n\n\t//println!(\"proof_data: {}, proof_data_bytes: {:?}\", proof_data, proof_data_bytes);\n\n\tfor i in 0..8 {\n\n\t\tbytes_bank[i + 32] = proof_data_bytes[i]; //TODO: Make sure are we writing upper bytes of lower bytes\n\n\t}\n\n\t// Keccak of 0123456789abcded || digest || workBits) || nonce\n\n\tlet pow_digest = uint256_ops::keccak_256(&bytes_bank[0..40]);\n\n\n\n\n\n\n\n\tfor i in 0..digest_bytes.len() {\n\n\t\tbytes_bank[i] = digest_bytes[i];\n\n\t}\n", "file_path": "verifier/src/verifier_channel.rs", "rank": 93, "score": 43354.19952236115 }, { "content": " \tcombined_data[i + 32] = counter_bytes[i];\n\n \t}\n\n\n\n\t\t\tfield_element = uint256_ops::bitwise_and( &mask, &uint256_ops::keccak_256(&combined_data) );\n\n\n\n\t\t\t//println!(\"fieldElement: {}\", field_element);\n\n\n\n\t\t\tctx[counter_idx] += uint256_ops::get_uint256(\"1\");\n\n\t\t}\n\n\n\n\t\tctx[target_idx] = prime_field::from_montgomery(field_element);\n\n\n\n\t\t//println!(\"target_idx: {}, ctx[target_idx]: {}\", target_idx, ctx[target_idx]);\n\n\t\t//println!(\"ctx[counter_idx]: {}\", ctx[counter_idx]);\n\n\n\n\t\ttarget_idx += 1;\n\n\t}\n\n}\n\n\n", "file_path": "verifier/src/verifier_channel.rs", "rank": 94, "score": 43347.07531755552 }, { "content": "use num256::uint256::Uint256 as Uint256;\n\n\n\nuse crate::uint256_ops;\n\nuse crate::prime_field;\n\n\n\n\n\n/* Note:\n\n\tWe store the state of the channel in uint256[3] as follows:\n\n\t[0] proof pointer.\n\n\t[1] prng digest.\n\n\t[2] prng counter.\n\n*/\n\n\n\n\n", "file_path": "verifier/src/verifier_channel.rs", "rank": 95, "score": 43344.887979411316 }, { "content": "\t\t\t}\n\n\t\t}\n\n\n\n\t}\n\n\n\n\treturn uint256_ops::from_usize( (end_idx - queries_out_idx) / stride );\n\n\n\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n/* --------------------\n\n\tPRNG (Randomness derived from public input) \n\n --------------------- */\n\n\n", "file_path": "verifier/src/verifier_channel.rs", "rank": 96, "score": 43343.42138744292 }, { "content": "\t\tlet mut curr = uint256_ops::get_uint256(\"0\");\n\n\t\twhile idx_cpy > queries_out_idx {\n\n\t\t\tcurr = ctx[idx_cpy - stride].clone();\n\n\n\n\t\t\tif query_idx >= curr {\n\n\t\t\t\tbreak;\n\n\t\t\t}\n\n\n\n\t\t\tctx[idx_cpy] = curr.clone();\n\n\t\t\tidx_cpy -= stride;\n\n\t\t}\n\n\n\n\t\tif query_idx != curr {\n\n\t\t\tctx[idx_cpy] = uint256_ops::make_copy( &query_idx );\n\n\t\t\tend_idx += stride;\n\n\t\t} else {\n\n\t\t\t// Revert right shuffling.\n\n\t\t\twhile idx_cpy < end_idx {\n\n\t\t\t\tctx[idx_cpy] = uint256_ops::make_copy( &ctx[idx_cpy + stride] );\n\n\t\t\t\tidx_cpy += stride;\n", "file_path": "verifier/src/verifier_channel.rs", "rank": 97, "score": 43342.49139419765 }, { "content": " assert_eq!( bitwise_xor(&val1, &val2) , get_uint256(\"1510\") ); // == 5392\n\n }\n\n\n\n //TODO: Finish writing this test and se this code in to_fixed_bytes representation\n\n #[test]\n\n fn test_keccak() {\n\n //Test 1 - Blank 0s\n\n let input_data: [u8; 64] = [0; 64];\n\n assert_eq!( keccak_256(&input_data), get_uint256(\"ad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5\") ); //hash obtained from remix - ad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5\n\n\n\n // Test 2 - Determine to use le or be representation\n\n let mut combined_data: [u8; 64] = [0; 64];\n\n let bytes_val1 = get_uint256(\"12345\").to_bytes_be();\n\n let bytes_val2 = get_uint256(\"6789A\").to_bytes_be();\n\n\n\n for i in 0..bytes_val1.len() {\n\n combined_data[32 - bytes_val1.len() + i] = bytes_val1[i];\n\n }\n\n for i in 0..bytes_val2.len() {\n\n combined_data[64 - bytes_val2.len() + i] = bytes_val2[i];\n", "file_path": "verifier/src/uint256_ops.rs", "rank": 98, "score": 41908.90543260116 }, { "content": "use num256::uint256::Uint256 as Uint256;\n\nuse sha3::Keccak256;\n\nuse sha3::Digest;\n\nuse num_bigint::BigUint;\n\n\n\n// Returns a Uint256 from string containg appropriate hex value\n", "file_path": "verifier/src/uint256_ops.rs", "rank": 99, "score": 41903.608097321674 } ]
Rust
src/watcher.rs
rakaly/desktop
4d16ef506704109c4300410a2f6fc12d2fef51e7
use anyhow::{anyhow, Context}; use flate2::bufread::GzEncoder; use flate2::Compression; use log::{debug, info, warn}; use notify::{watcher, DebouncedEvent, RecursiveMode, Watcher}; use std::fs::File; use std::io::{BufReader, Read}; use std::path::Path; use std::sync::mpsc::channel; use std::time::Duration; pub struct Client { pub username: String, pub api_key: String, pub api_url: String, } impl Client { fn upload_zip(&self, path: &Path) -> anyhow::Result<()> { let file = File::open(path).context("unable to open")?; let size = file.metadata().map(|m| m.len()).unwrap_or(0); let reader = BufReader::new(file); let resp = ureq::post(&self.api_url) .auth(&self.username, &self.api_key) .set("Content-Length", &size.to_string()) .set("Content-Type", "application/zip") .send(reader); if resp.ok() { Ok(()) } else { let err = resp .into_string() .context("unable to interpret eror server response")?; Err(anyhow!("server responded with an error: {}", err)) } } fn upload_txt(&self, path: &Path) -> anyhow::Result<()> { let file = File::open(path).context("unable to open")?; let reader = BufReader::new(file); let mut buffer = Vec::new(); let mut gz = GzEncoder::new(reader, Compression::new(4)); gz.read_to_end(&mut buffer).context("unable to compress")?; let resp = ureq::post(&self.api_url) .auth(&self.username, &self.api_key) .set("Content-Encoding", "gzip") .send_bytes(&buffer); if resp.ok() { Ok(()) } else { let err = resp .into_string() .context("unable to interpret eror server response")?; Err(anyhow!("server responded with an error: {}", err)) } } } pub fn core_loop(watch_dir: &Path, client: &Client) -> anyhow::Result<()> { let (tx, rx) = channel(); let mut watcher = watcher(tx, Duration::from_secs(5)) .with_context(|| "unable to create file watcher".to_string())?; watcher .watch(watch_dir, RecursiveMode::Recursive) .with_context(|| format!("unable to watch: {}", watch_dir.display()))?; info!("watching directory for save files: {}", watch_dir.display()); log::logger().flush(); loop { match rx.recv() { Ok(DebouncedEvent::Error(e, path)) => { if let Some(path) = path { warn!("watch error on {}: {:?}", path.as_path().display(), e); } else { warn!("watch error: {:?}", e); } } Ok(DebouncedEvent::Write(path)) | Ok(DebouncedEvent::Create(path)) => { if !path.as_path().extension().map_or(false, |x| x == "eu4") { continue; } let path_display = path.as_path().display(); info!("detected write: {}", path_display); match process_file(client, &path) { Ok(_) => info!("successfully uploaded {}", path_display), Err(e) => warn!("{:?}", e), } } Ok(event) => { debug!("{:?}", event); continue; } Err(e) => warn!("watch error: {:?}", e), } log::logger().flush(); } } fn process_file(client: &Client, path: &Path) -> anyhow::Result<()> { let path_display = path.display(); let magic = { let mut buffer = [0; 4]; let mut file = File::open(path).with_context(|| format!("unable to open: {}", path_display))?; file.read_exact(&mut buffer) .with_context(|| format!("unable to read: {}", path_display))?; buffer }; match magic { [0x50, 0x4b, 0x03, 0x04] => client .upload_zip(&path) .with_context(|| format!("unable to upload zip: {}", path_display)), [b'E', b'U', b'4', b't'] => client .upload_txt(&path) .with_context(|| format!("unable to upload txt: {}", path_display)), x => Err(anyhow!( "unexpected file signature: {:?} - {}", x, path_display )), } }
use anyhow::{anyhow, Context}; use flate2::bufread::GzEncoder; use flate2::Compression; use log::{debug, info, warn}; use notify::{watcher, DebouncedEvent, RecursiveMode, Watcher}; use std::fs::File; use std::io::{BufReader, Read}; use std::path::Path; use std::sync::mpsc::channel; use std::time::Duration; pub struct Client { pub username: String, pub api_key: String, pub api_url: String, } impl Client { fn upload_zip(&self, path: &Path) -> anyhow::Result<()> { let file = File::open(path).context("unable to open")?; let size = file.metadata().map(|m| m.len()).unwrap_or(0); let reader = BufReader::new(file); let resp = ureq::post(&self.api_url) .auth(&self.username, &self.api_key) .set("Content-Length", &size.to_string()) .set("Content-Type", "application/zip") .send(reader); if resp.ok() { Ok(()) } else { let err = resp .into_strin
fn upload_txt(&self, path: &Path) -> anyhow::Result<()> { let file = File::open(path).context("unable to open")?; let reader = BufReader::new(file); let mut buffer = Vec::new(); let mut gz = GzEncoder::new(reader, Compression::new(4)); gz.read_to_end(&mut buffer).context("unable to compress")?; let resp = ureq::post(&self.api_url) .auth(&self.username, &self.api_key) .set("Content-Encoding", "gzip") .send_bytes(&buffer); if resp.ok() { Ok(()) } else { let err = resp .into_string() .context("unable to interpret eror server response")?; Err(anyhow!("server responded with an error: {}", err)) } } } pub fn core_loop(watch_dir: &Path, client: &Client) -> anyhow::Result<()> { let (tx, rx) = channel(); let mut watcher = watcher(tx, Duration::from_secs(5)) .with_context(|| "unable to create file watcher".to_string())?; watcher .watch(watch_dir, RecursiveMode::Recursive) .with_context(|| format!("unable to watch: {}", watch_dir.display()))?; info!("watching directory for save files: {}", watch_dir.display()); log::logger().flush(); loop { match rx.recv() { Ok(DebouncedEvent::Error(e, path)) => { if let Some(path) = path { warn!("watch error on {}: {:?}", path.as_path().display(), e); } else { warn!("watch error: {:?}", e); } } Ok(DebouncedEvent::Write(path)) | Ok(DebouncedEvent::Create(path)) => { if !path.as_path().extension().map_or(false, |x| x == "eu4") { continue; } let path_display = path.as_path().display(); info!("detected write: {}", path_display); match process_file(client, &path) { Ok(_) => info!("successfully uploaded {}", path_display), Err(e) => warn!("{:?}", e), } } Ok(event) => { debug!("{:?}", event); continue; } Err(e) => warn!("watch error: {:?}", e), } log::logger().flush(); } } fn process_file(client: &Client, path: &Path) -> anyhow::Result<()> { let path_display = path.display(); let magic = { let mut buffer = [0; 4]; let mut file = File::open(path).with_context(|| format!("unable to open: {}", path_display))?; file.read_exact(&mut buffer) .with_context(|| format!("unable to read: {}", path_display))?; buffer }; match magic { [0x50, 0x4b, 0x03, 0x04] => client .upload_zip(&path) .with_context(|| format!("unable to upload zip: {}", path_display)), [b'E', b'U', b'4', b't'] => client .upload_txt(&path) .with_context(|| format!("unable to upload txt: {}", path_display)), x => Err(anyhow!( "unexpected file signature: {:?} - {}", x, path_display )), } }
g() .context("unable to interpret eror server response")?; Err(anyhow!("server responded with an error: {}", err)) } }
function_block-function_prefixed
[ { "content": "pub fn write_minimal_config<P: AsRef<Path>>(\n\n input: &UserInputConfig,\n\n destination: P,\n\n) -> anyhow::Result<UploaderConfig> {\n\n let path = destination.as_ref();\n\n let config_data =\n\n toml::ser::to_vec(&input).context(\"unable to serialize user input to a config\")?;\n\n\n\n std::fs::write(path, &config_data)\n\n .with_context(|| format!(\"Unable to write config file: {}\", path.display()))?;\n\n\n\n parse_config(&config_data)\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 2, "score": 75829.361849868 }, { "content": "pub fn read_config<P: AsRef<Path>>(location: P) -> anyhow::Result<UploaderConfig> {\n\n let path = location.as_ref();\n\n let config_data =\n\n std::fs::read(path).with_context(|| format!(\"Failed to read {}\", path.display()))?;\n\n let config = parse_config(&config_data)\n\n .with_context(|| format!(\"Malformatted config file: {}\", path.display()))?;\n\n Ok(config)\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 3, "score": 74279.5606035707 }, { "content": "fn default_api_url() -> String {\n\n String::from(\"https://rakaly.com/api/upload\")\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 4, "score": 59459.51102419711 }, { "content": "pub fn run() {\n\n service_dispatcher::start(\"myservice\", ffi_service_main).unwrap();\n\n}\n", "file_path": "src/service/windows.rs", "rank": 5, "score": 54849.91661778465 }, { "content": "#[cfg(feature = \"gui\")]\n\npub fn run() {\n\n gui::run();\n\n}\n", "file_path": "src/gui/mod.rs", "rank": 6, "score": 54849.91661778465 }, { "content": "pub fn print_help() {\n\n let _ = writeln!(io::stdout(), \"{}\", HELP);\n\n}\n", "file_path": "src/cli.rs", "rank": 7, "score": 54849.91661778465 }, { "content": "pub fn run() {\n\n // describe the main window\n\n let main_window = WindowDesc::new(build_root_widget)\n\n .title(WINDOW_TITLE)\n\n .window_size((400.0, 400.0));\n\n\n\n // create the initial app state\n\n let initial_state = HelloState {\n\n steam_name: \"\".into(),\n\n api_key: \"\".into(),\n\n };\n\n\n\n // start the application\n\n AppLauncher::with_window(main_window)\n\n .launch(initial_state)\n\n .expect(\"Failed to launch application\");\n\n}\n\n\n", "file_path": "src/gui/gui.rs", "rank": 8, "score": 54849.91661778465 }, { "content": "pub fn setup_logger(exec_path: &PathBuf, log_level: log::LevelFilter) -> anyhow::Result<()> {\n\n let log_file = exec_path.with_file_name(\"uploader.log\");\n\n fern::Dispatch::new()\n\n .format(|out, message, record| {\n\n out.finish(format_args!(\n\n \"[{}][{}][{}] {}\",\n\n chrono::Local::now().to_rfc3339(),\n\n record.level(),\n\n record.target(),\n\n message\n\n ))\n\n })\n\n .level(log_level)\n\n .chain(std::io::stdout())\n\n .chain(fern::log_file(log_file)?)\n\n .apply()\n\n .context(\"unable to setup logging\")?;\n\n Ok(())\n\n}\n", "file_path": "src/logging.rs", "rank": 9, "score": 54125.55847558376 }, { "content": "#[cfg(target_os = \"windows\")]\n\npub fn run_service() {\n\n windows::run()\n\n}\n", "file_path": "src/service/mod.rs", "rank": 10, "score": 53283.79672539033 }, { "content": "fn build_root_widget() -> impl Widget<HelloState> {\n\n let watch_dir = if let Some(user_dirs) = directories::UserDirs::new() {\n\n user_dirs\n\n .document_dir()\n\n .map(|x| {\n\n x.join(\"Paradox Interactive\")\n\n .join(\"Europa Universalis IV\")\n\n .join(\"save games\")\n\n })\n\n .map(|x| x.display().to_string())\n\n .unwrap_or_else(|| String::from(\"unknown\"))\n\n } else {\n\n String::from(\"unknown\")\n\n };\n\n\n\n let intro_text_1 = Label::new(\"When the \\\"Start\\\" button is pressed, Rakaly will\");\n\n let intro_text_2 = Label::new(\"automatically start watching the following directory\");\n\n let intro_text_3 = Label::new(\"for any changes:\");\n\n let intro_text_4 = Label::new(watch_dir);\n\n let intro_text_5 = Label::new(\"Rakaly will upload the new files to the server.\");\n", "file_path": "src/gui/gui.rs", "rank": 11, "score": 50687.366156598015 }, { "content": "pub fn get_user_input() -> UserInputConfig {\n\n let _ = writeln!(io::stdout(), \"Welcome to Rakaly.\");\n\n let _ = writeln!(\n\n io::stdout(),\n\n \"A config file was not detected, so we'll create one\"\n\n );\n\n let _ = write!(io::stdout(), \"Steam username: \");\n\n let _ = io::stdout().flush();\n\n let username: String = text_io::read!(\"{}\\n\");\n\n let _ = write!(io::stdout(), \"{}'s API key: \", username);\n\n let _ = io::stdout().flush();\n\n let api_key: String = text_io::read!(\"{}\\n\");\n\n UserInputConfig { username, api_key }\n\n}\n", "file_path": "src/config.rs", "rank": 12, "score": 47768.87201979541 }, { "content": "pub fn project_home() -> anyhow::Result<ProjectDirs> {\n\n if let Some(x) = ProjectDirs::from(\"com\", \"\", \"rakaly\") {\n\n Ok(x)\n\n } else {\n\n anyhow::bail!(\"unable to locate project directory\")\n\n }\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 13, "score": 45236.106985010236 }, { "content": "pub fn parse_args() -> anyhow::Result<ParsedArgs> {\n\n let mut args = pico_args::Arguments::from_env();\n\n\n\n if args.contains([\"-h\", \"--help\"]) {\n\n return Ok(ParsedArgs::Help);\n\n }\n\n\n\n if args.contains(\"--version\") {\n\n return Ok(ParsedArgs::Version);\n\n }\n\n\n\n let subcommand = args\n\n .subcommand()\n\n .context(\"unable to extract subcommand\")?\n\n .unwrap_or_else(|| String::from(\"gui\"));\n\n\n\n match subcommand.to_ascii_lowercase().as_str() {\n\n \"gui\" => Ok(ParsedArgs::Gui {\n\n config: get_config_path(&mut args)?,\n\n }),\n\n \"run\" => Ok(ParsedArgs::Run {\n\n config: get_config_path(&mut args)?,\n\n }),\n\n \"install-service\" => Ok(ParsedArgs::InstallService),\n\n \"run-service\" => Ok(ParsedArgs::RunService),\n\n \"uninstall-service\" => Ok(ParsedArgs::UninstallService),\n\n x => anyhow::bail!(\"unrecognized subcommand: {}\", x),\n\n }\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 14, "score": 45236.106985010236 }, { "content": "pub fn parse_config(data: &[u8]) -> anyhow::Result<UploaderConfig> {\n\n toml::de::from_slice(data).context(\"unable to deserialize toml config\")\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 15, "score": 41157.933987913195 }, { "content": "#[derive(Clone, Data, Lens)]\n\nstruct HelloState {\n\n steam_name: String,\n\n api_key: String,\n\n}\n\n\n", "file_path": "src/gui/gui.rs", "rank": 16, "score": 40316.837392737536 }, { "content": "fn get_config_path(args: &mut Arguments) -> anyhow::Result<PathBuf> {\n\n let cli_config: Option<PathBuf> = args.opt_value_from_str([\"-c\", \"--config\"])?;\n\n match cli_config {\n\n Some(path) => Ok(path),\n\n None => project_home().map(|x| x.config_dir().join(\"config.toml\")),\n\n }\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 17, "score": 37266.203028955584 }, { "content": "fn my_service_main(_arguments: Vec<OsString>) {\n\n // The entry point where execution will start on a background thread after a call to\n\n // `service_dispatcher::start` from `main`.\n\n}\n\n\n", "file_path": "src/service/windows.rs", "rank": 18, "score": 35437.12706818279 }, { "content": "#[cfg(unix)]\n\nfn main() {}\n", "file_path": "build.rs", "rank": 19, "score": 25916.74649555509 }, { "content": "#[cfg(windows)]\n\nfn main() {\n\n let mut res = winres::WindowsResource::new();\n\n res.set_icon(\"assets/icon.ico\");\n\n res.compile().unwrap();\n\n}\n\n\n", "file_path": "build.rs", "rank": 20, "score": 25916.74649555509 }, { "content": "fn main() -> anyhow::Result<()> {\n\n fn run() -> anyhow::Result<()> {\n\n let exec_path = env::current_exe()?;\n\n let args = cli::parse_args()?;\n\n match args {\n\n ParsedArgs::Help => {\n\n cli::print_help();\n\n Ok(())\n\n }\n\n ParsedArgs::Version => {\n\n let _ = writeln!(io::stdout(), \"{}\", env!(\"CARGO_PKG_VERSION\"));\n\n Ok(())\n\n }\n\n ParsedArgs::Gui { .. } => {\n\n gui::run();\n\n Ok(())\n\n }\n\n ParsedArgs::Run {\n\n config: config_path,\n\n } => {\n", "file_path": "src/main.rs", "rank": 25, "score": 22117.35215552355 }, { "content": "fn default_log_level() -> log::LevelFilter {\n\n log::LevelFilter::Info\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 26, "score": 20068.99814786423 }, { "content": "<h1 align=\"center\">\n\n <img src=\"assets/screenshot.png?raw=true\">\n\n<br/>\n\nRakaly Desktop\n\n</h1>\n\n\n\n**A WIP prototype desktop GUI for Rakaly functionality**\n\n\n\nNot ready for consumption\n\n\n\nGoals:\n\n\n\n- **Lightweight**: Rakaly Desktop must use a minimum of resources (CPU and RAM) so as to not impact gameplay.\n\n- **Cross platform**: Any platform that can play EU4 can use Rakaly Desktop\n\n- **Service**: The app should be able to tie into a windows service so that it can start on boot\n\n\n\nRight now there are two subcommands implemented:\n\n\n\n- `run`: Watches the configured directory (or if not configured, the current working directory) for EU4 file changes and then upload to an endpoint. If the EU4 file is a zip file, it is uploaded verbatim, else text files are gzipped before upload.\n\n- `gui`: playground for the same thing but in GUI form.\n", "file_path": "README.md", "rank": 27, "score": 14681.189308975105 }, { "content": "use anyhow::Context;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{\n\n io::{self, Write},\n\n path::{Path, PathBuf},\n\n};\n\n\n\n#[derive(Serialize)]\n\npub struct UserInputConfig {\n\n pub username: String,\n\n pub api_key: String,\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct UploaderConfig {\n\n pub username: String,\n\n pub api_key: String,\n\n pub watch_directory: Option<PathBuf>,\n\n\n\n #[serde(default = \"default_api_url\")]\n\n pub api_url: String,\n\n\n\n #[serde(default = \"default_log_level\")]\n\n pub log_level: log::LevelFilter,\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 28, "score": 9.845027178360255 }, { "content": "use anyhow::Context;\n\nuse std::path::PathBuf;\n\n\n", "file_path": "src/logging.rs", "rank": 29, "score": 6.660485159134654 }, { "content": " let config = if !config_path.as_path().exists() {\n\n let user_input = config::get_user_input();\n\n config::write_minimal_config(&user_input, &config_path)?\n\n } else {\n\n config::read_config(&config_path)?\n\n };\n\n\n\n logging::setup_logger(&exec_path, config.log_level)?;\n\n\n\n let client = watcher::Client {\n\n username: config.username,\n\n api_key: config.api_key,\n\n api_url: config.api_url,\n\n };\n\n\n\n let watch_path = config\n\n .watch_directory\n\n .as_deref()\n\n .unwrap_or_else(|| exec_path.parent().unwrap_or_else(|| exec_path.as_path()));\n\n\n", "file_path": "src/main.rs", "rank": 30, "score": 6.634135689486293 }, { "content": " watcher::core_loop(&watch_path, &client)\n\n }\n\n ParsedArgs::RunService => {\n\n service::run_service();\n\n Ok(())\n\n }\n\n ParsedArgs::InstallService => {\n\n service::run_service();\n\n Ok(())\n\n }\n\n ParsedArgs::UninstallService => {\n\n service::run_service();\n\n Ok(())\n\n }\n\n }\n\n }\n\n\n\n let res = run();\n\n if let Err(ref e) = res {\n\n if log_enabled!(Level::Debug) {\n", "file_path": "src/main.rs", "rank": 31, "score": 6.433220504313218 }, { "content": "use anyhow::Context;\n\nuse directories::ProjectDirs;\n\nuse pico_args::Arguments;\n\nuse std::{\n\n io::{self, Write},\n\n path::PathBuf,\n\n};\n\n\n\npub enum ParsedArgs {\n\n Help,\n\n Version,\n\n Gui { config: PathBuf },\n\n Run { config: PathBuf },\n\n InstallService,\n\n UninstallService,\n\n RunService,\n\n}\n\n\n\nconst HELP: &str = r#\"\n\nAutomatically upload saves to rakaly.com when a new file is detected\n", "file_path": "src/cli.rs", "rank": 32, "score": 5.977310638732692 }, { "content": "mod cli;\n\nmod config;\n\nmod gui;\n\nmod logging;\n\nmod service;\n\nmod watcher;\n\n\n\nuse cli::ParsedArgs;\n\nuse log::{error, log_enabled, Level};\n\nuse std::env;\n\nuse std::io::{self, Write};\n\n\n", "file_path": "src/main.rs", "rank": 33, "score": 3.562413385241477 }, { "content": "use std::ffi::OsString;\n\nuse windows_service::service_dispatcher;\n\n\n\nwindows_service::define_windows_service!(ffi_service_main, my_service_main);\n\n\n", "file_path": "src/service/windows.rs", "rank": 34, "score": 3.4809924652084465 }, { "content": "use druid::widget::{Align, Button, Flex, Label, Padding, TextBox};\n\nuse druid::{AppLauncher, Data, Lens, LocalizedString, Widget, WidgetExt, WindowDesc};\n\n\n\nconst VERTICAL_WIDGET_SPACING: f64 = 20.0;\n\nconst HORIZTONAL_WIDGET_SPACING: f64 = 8.0;\n\nconst WINDOW_TITLE: LocalizedString<HelloState> = LocalizedString::new(\"Rakaly\");\n\n\n\n#[derive(Clone, Data, Lens)]\n", "file_path": "src/gui/gui.rs", "rank": 35, "score": 3.3443865781476556 } ]
Rust
src/compile.rs
shino16/cargo-auto-bundle
1cbd9fe1a3a8ea7f6ca499372a6d9eb7ca0edd8a
use super::ModPath; use anyhow::Result; use itertools::Itertools; use proc_macro2::{Ident, Span}; use quote::ToTokens; use std::{ collections::BTreeMap, path::{Path, PathBuf}, }; pub fn compile( crate_name: &str, paths: &[ModPath], file_paths: &[PathBuf], mod_visibility: BTreeMap<ModPath, String>, macros: &[String], ) -> Result<String> { let mut res = String::new(); let mut location = ModPath::new(); for (path, file_path) in paths.into_iter().zip(file_paths) { let base = location .iter() .zip(path.iter()) .take_while(|(a, b)| a == b) .count(); while location.len() > base { let p = location.pop().unwrap(); res += &format!("\n}} // mod {}\n", p); } while location.len() < path.len() { let name = &path[location.len()]; location.push(name.clone()); if mod_visibility .get(&location) .filter(|s| s.is_empty()) .is_some() { res += &format!("\nmod {} {{\n", name); } else { let vis = mod_visibility .get(&location) .cloned() .unwrap_or("pub".to_owned()); res += &format!("\n{} mod {} {{\n", vis, name); } } res += "\n"; res += &read_process(&file_path, crate_name, false, macros)?; } while let Some(p) = location.pop() { res += &format!("\n}} // mod {}\n", p); } Ok(reduce_newline(res)) } fn reduce_newline(mut s: String) -> String { let bytes = unsafe { s.as_bytes_mut() }; let mut j = 0; let mut newline_cnt = 0; for i in 0..bytes.len() { if bytes[i] == b'\n' { newline_cnt += 1; } else { newline_cnt = 0; } if newline_cnt <= 2 { bytes[j] = bytes[i]; j += 1; } } s.truncate(j); s } pub fn compile_entry(path: &Path, crate_name: &str, macros: &[String]) -> Result<String> { Ok(read_process(path, crate_name, true, macros)?) } fn read_process<'a>( file_path: &Path, crate_name: &'a str, external: bool, macros: &[String], ) -> Result<String> { use syn::visit::Visit; struct Visitor<'ast, 'a, 'b> { use_spans: Vec<(&'ast Ident, Span)>, remove_spans: Vec<(Span, Span)>, crate_name: &'a str, macros: &'b [String], } impl<'ast, 'a, 'b> Visit<'ast> for Visitor<'ast, 'a, 'b> { fn visit_item_use(&mut self, item: &'ast syn::ItemUse) { if let syn::UseTree::Path(ref path) = item.tree { if let syn::UseTree::Name(ref name) = *path.tree { if path.ident == self.crate_name && self.macros.contains(&name.ident.to_string()) { if path.ident != "crate" { let mut iter = item.to_token_stream().into_iter(); let start = iter.next().unwrap().span(); let end = iter.last().unwrap().span(); self.remove_spans.push((start, end)); } return; } } self.use_spans.push((&path.ident, path.ident.span())); }; } fn visit_item_mod(&mut self, item: &'ast syn::ItemMod) { if item.semi.is_some() { let mut iter = item.to_token_stream().into_iter(); let start = iter.next().unwrap().span(); let end = iter.last().unwrap().span(); self.remove_spans.push((start, end)); } syn::visit::visit_item_mod(self, item); } } let content = std::fs::read_to_string(file_path)?; let file = syn::parse_file(&content)?; let mut visitor = Visitor { use_spans: Vec::new(), remove_spans: Vec::new(), crate_name: if external { crate_name } else { "crate" }, macros, }; visitor.visit_file(&file); let mut targets = Vec::new(); for (ident, span) in visitor.use_spans { if !external && ident.to_string() == "crate" { targets.push((span.end(), span.end(), format!("::{}", crate_name))); } if external && ident.to_string() == crate_name { targets.push((span.start(), span.start(), "crate::".to_owned())); } } for (start, end) in visitor.remove_spans { targets.push((start.start(), end.end(), "".to_owned())); } targets.sort_unstable(); let lines = content.lines().collect_vec(); if lines.is_empty() { return Ok("".to_owned()); } let (mut line_pos, mut col_pos) = (0, 0); let mut res = String::new(); for (start, end, pat) in targets { while line_pos < start.line - 1 { res += &lines[line_pos][col_pos..]; res += "\n"; line_pos += 1; col_pos = 0; } if pat.is_empty() && lines[start.line - 1][..start.column] .chars() .all(|c| c.is_ascii_whitespace()) && lines[end.line - 1][end.column..] .chars() .all(|c| c.is_ascii_whitespace()) { line_pos = end.line; col_pos = 0; } else { res += &lines[line_pos][..start.column]; res += &pat; line_pos = end.line - 1; col_pos = end.column; } } if line_pos < lines.len() { res += &lines[line_pos][col_pos..]; res += "\n"; lines[line_pos + 1..].into_iter().for_each(|line| { res += line; res += "\n"; }); } Ok(res) }
use super::ModPath; use anyhow::Result; use itertools::Itertools; use proc_macro2::{Ident, Span}; use quote::ToTokens; use std::{ collections::BTreeMap, path::{Path, PathBuf}, }; pub fn compile( crate_name: &str, paths: &[ModPath], file_paths: &[PathBuf], mod_visibility: BTreeMap<ModPath, String>, macros: &[String], ) -> Result<String> { let mut res = String::new(); let mut location = ModPath::new(); for (path, file_path) in paths.into_iter().zip(file_paths) { let base = location .iter() .zip(path.iter()) .take_while(|(a, b)| a == b) .count(); while location.len() > base { let p = location.pop().unwrap(); res += &format!("\n}} // mod {}\n", p); } while location.len() < path.len() { let name = &path[location.len()]; location.push(name.clone()); if mod_visibility .get(&location) .filter(|s| s.is_empty()) .is_some() { res += &format!("\nmod {} {{\n", name); } else { let vis = mod_visibility .get(&location) .cloned() .unwrap_or("pub".to_owned()); res += &format!("\n{} mod {} {{\n", vis, name); } } res += "\n"; res += &read_process(&file_path, crate_name, false, macros)?; } while let Some(p) = location.pop() { res += &format!("\n}} // mod {}\n", p); } Ok(reduce_newline(res)) } fn reduce_newline(mut s: String) -> String { let bytes = unsafe { s.as_bytes_mut() }; let mut j = 0; let mut newline_cnt = 0; for i in 0..bytes.len() { if bytes[i] == b'\n' { newline_cnt += 1; } else { newline_cnt = 0; } if newline_cnt <= 2 { bytes[j] = bytes[i]; j += 1; } } s.truncate(j); s } pub fn compile_entry(path: &Path, crate_name: &str, macros: &[String]) -> Result<String> { Ok(read_process(path, crate_name, true, macros)?) } fn read_process<'a>( file_path: &Path, crate_name: &'a str, external: bool, macros: &[String], ) -> Result<String> { use syn::visit::Visit; struct Visitor<'ast, 'a, 'b> { use_spans: Vec<(&'ast Ident, Span)>, remove_spans: Vec<(Span, Span)>, crate_name: &'a str, macros: &'b [String], } impl<'ast, 'a, 'b> Visit<'ast> for Visitor<'ast, 'a, 'b> { fn visit_item_use(&mut self, item: &'ast syn::ItemUse) { if let syn::UseTree::Path(ref path) = item.tree { if let syn::UseTree::Name(ref name) = *path.tree { if path.ident == self.crate_name && self.macros.contains(&name.ident.to_string()) { if path.ident != "crate" { let mut iter = item.to_token_stream().into_iter(); let start = iter.next().unwrap().span(); let end = iter.last().unwrap().span(); self.remove_spans.push((start, end)); } return; } } self.use_spans.push((&path.ident, path.ident.span())); }; }
fn visit_item_mod(&mut self, item: &'ast syn::ItemMod) { if item.semi.is_some() { let mut iter = item.to_token_stream().into_iter(); let start = iter.next().unwrap().span(); let end = iter.last().unwrap().span(); self.remove_spans.push((start, end)); } syn::visit::visit_item_mod(self, item); } } let content = std::fs::read_to_string(file_path)?; let file = syn::parse_file(&content)?; let mut visitor = Visitor { use_spans: Vec::new(), remove_spans: Vec::new(), crate_name: if external { crate_name } else { "crate" }, macros, }; visitor.visit_file(&file); let mut targets = Vec::new(); for (ident, span) in visitor.use_spans { if !external && ident.to_string() == "crate" { targets.push((span.end(), span.end(), format!("::{}", crate_name))); } if external && ident.to_string() == crate_name { targets.push((span.start(), span.start(), "crate::".to_owned())); } } for (start, end) in visitor.remove_spans { targets.push((start.start(), end.end(), "".to_owned())); } targets.sort_unstable(); let lines = content.lines().collect_vec(); if lines.is_empty() { return Ok("".to_owned()); } let (mut line_pos, mut col_pos) = (0, 0); let mut res = String::new(); for (start, end, pat) in targets { while line_pos < start.line - 1 { res += &lines[line_pos][col_pos..]; res += "\n"; line_pos += 1; col_pos = 0; } if pat.is_empty() && lines[start.line - 1][..start.column] .chars() .all(|c| c.is_ascii_whitespace()) && lines[end.line - 1][end.column..] .chars() .all(|c| c.is_ascii_whitespace()) { line_pos = end.line; col_pos = 0; } else { res += &lines[line_pos][..start.column]; res += &pat; line_pos = end.line - 1; col_pos = end.column; } } if line_pos < lines.len() { res += &lines[line_pos][col_pos..]; res += "\n"; lines[line_pos + 1..].into_iter().for_each(|line| { res += line; res += "\n"; }); } Ok(res) }
function_block-function_prefix_line
[ { "content": "fn visit_use_file(path: &Path) -> Result<Vec<ModPath>> {\n\n use syn::UseTree::{self, *};\n\n fn dfs(tree: &UseTree, prefix: &mut ModPath, buf: &mut Vec<ModPath>) {\n\n match tree {\n\n Path(path) => {\n\n prefix.push(path.ident.to_string());\n\n dfs(&*path.tree, prefix, buf);\n\n prefix.pop().unwrap();\n\n }\n\n Name(name) => {\n\n prefix.push(name.ident.to_string());\n\n buf.push(prefix.clone());\n\n prefix.pop();\n\n }\n\n Rename(rename) => {\n\n prefix.push(rename.ident.to_string());\n\n buf.push(prefix.clone());\n\n prefix.pop();\n\n }\n\n Glob(_) => {\n", "file_path": "src/traverse.rs", "rank": 2, "score": 73849.67746343982 }, { "content": "fn main() -> Result<()> {\n\n let Opt::AutoBundle {\n\n crate_path,\n\n entry_point,\n\n list_deps,\n\n } = Opt::from_args();\n\n\n\n let crate_path = crate_path.canonicalize()?;\n\n\n\n let toml_path = crate_path.join(\"Cargo.toml\").canonicalize()?;\n\n let manifest = cargo_toml::Manifest::from_path(&toml_path)?;\n\n let crate_name = if let Some(lib) = manifest.lib {\n\n lib.name.unwrap()\n\n } else {\n\n panic!(\"No lib package found.\");\n\n };\n\n\n\n let crate_root = crate_path.join(\"src\").canonicalize()?;\n\n\n\n let (paths, file_paths, mods_visibility, macros) =\n", "file_path": "src/main.rs", "rank": 5, "score": 24855.060557523146 }, { "content": " }\n\n impl<'ast> Visit<'ast> for Visitor {\n\n fn visit_item_macro(&mut self, item: &'ast syn::ItemMacro) {\n\n if item.attrs.contains(&syn::parse_quote!(#[macro_export])) {\n\n if let Some(ref ident) = item.ident {\n\n self.macros.push(ident.to_string());\n\n }\n\n }\n\n syn::visit::visit_item_macro(self, item);\n\n }\n\n fn visit_item_mod(&mut self, item: &'ast syn::ItemMod) {\n\n self.path.push(item.ident.to_string());\n\n if item.content.is_some() {\n\n self.mods.push(self.path.clone());\n\n }\n\n syn::visit::visit_item_mod(self, item);\n\n self.path.pop();\n\n }\n\n }\n\n\n", "file_path": "src/traverse.rs", "rank": 13, "score": 16.615931780528054 }, { "content": " Ok(())\n\n }\n\n\n\n pub fn run(\n\n &mut self,\n\n ) -> Result<(\n\n Vec<ModPath>,\n\n Vec<PathBuf>,\n\n BTreeMap<ModPath, String>,\n\n Vec<String>,\n\n )> {\n\n self.scan_mods(\n\n &mut self.crate_root.clone(),\n\n &mut vec![self.crate_name.clone()],\n\n )?;\n\n\n\n let mut result = Vec::new();\n\n self.todo = self\n\n .todo\n\n .iter()\n", "file_path": "src/traverse.rs", "rank": 14, "score": 16.52955314483785 }, { "content": "use anyhow::{Error, Result};\n\nuse itertools::Itertools;\n\nuse std::{\n\n collections::BTreeMap,\n\n path::{Path, PathBuf},\n\n};\n\n\n\nuse syn::visit::Visit;\n\n\n\nuse super::ModPath;\n\n\n\npub struct Traverse {\n\n crate_root: PathBuf,\n\n crate_name: String,\n\n entry_path: ModPath,\n\n todo: Vec<ModPath>,\n\n exclude: PathBuf,\n\n mods_location: BTreeMap<ModPath, (PathBuf, ModPath)>,\n\n mods_visibility: BTreeMap<ModPath, String>,\n\n exported_macros: Vec<String>,\n", "file_path": "src/traverse.rs", "rank": 15, "score": 16.275100625486246 }, { "content": " entry_path,\n\n todo: use_paths\n\n .into_iter()\n\n .filter(|p| if [crate_name, \"crate\", \"self\", \"super\"].contains(&(&*p[0])) {\n\n true\n\n } else {\n\n if p[0] != \"std\" && p[0] != \"core\" {\n\n eprintln!(\"[warning] skipping `{}`\", p[0]);\n\n }\n\n false\n\n })\n\n .collect(),\n\n exclude: crate_root.join(\"bin\"),\n\n mods_location: BTreeMap::new(),\n\n mods_visibility: BTreeMap::new(),\n\n exported_macros: Vec::new(),\n\n })\n\n }\n\n\n\n fn canonicalize(&self, path: &ModPath, at: &ModPath) -> ModPath {\n", "file_path": "src/traverse.rs", "rank": 16, "score": 15.997817058058233 }, { "content": "use anyhow::Result;\n\nuse std::path::PathBuf;\n\nuse structopt::StructOpt;\n\n\n\npub type ModPath = Vec<String>;\n\n\n\nmod compile;\n\nmod traverse;\n\n\n\n#[derive(StructOpt)]\n\n#[structopt(bin_name(\"cargo\"))]\n", "file_path": "src/main.rs", "rank": 17, "score": 15.560459369644207 }, { "content": " traverse::Traverse::new(&crate_root, &crate_name, &entry_point)?.run()?;\n\n\n\n if list_deps {\n\n for file_path in file_paths {\n\n println!(\"{}\", file_path.to_string_lossy());\n\n }\n\n return Ok(());\n\n }\n\n\n\n let mut result = compile::compile_entry(&entry_point, &crate_name, &macros)?;\n\n let compiled = compile::compile(&crate_name, &paths, &file_paths, mods_visibility, &macros)?;\n\n if !compiled.is_empty() {\n\n result += \"\\n\";\n\n result += &compiled;\n\n }\n\n\n\n let mut s = 0;\n\n while result.as_bytes().get(s) == Some(&b'\\n') {\n\n s += 1;\n\n }\n\n print!(\"{}\", &result[s..]);\n\n\n\n Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 18, "score": 14.697499130737215 }, { "content": " buf.push(prefix.clone());\n\n }\n\n Group(group) => {\n\n group.items.iter().for_each(|tree| dfs(tree, prefix, buf));\n\n }\n\n }\n\n }\n\n\n\n #[derive(Default)]\n\n struct Visitor {\n\n paths: Vec<ModPath>,\n\n }\n\n impl<'ast> Visit<'ast> for Visitor {\n\n fn visit_item_use(&mut self, item: &'ast syn::ItemUse) {\n\n dfs(&item.tree, &mut Vec::new(), &mut self.paths);\n\n }\n\n }\n\n\n\n let content = std::fs::read_to_string(path)?;\n\n let mut visitor = Visitor::default();\n\n visitor.visit_file(&syn::parse_file(&content)?);\n\n\n\n Ok(visitor.paths)\n\n}\n", "file_path": "src/traverse.rs", "rank": 19, "score": 14.641198304856836 }, { "content": " .filter(|p| if [&self.crate_name, \"crate\", \"self\", \"super\"].contains(&(&*p[0])) {\n\n true\n\n } else {\n\n if p[0] != \"std\" && p[0] != \"core\" {\n\n eprintln!(\"[warning] skipping `{}`\", p[0]);\n\n }\n\n false\n\n })\n\n .filter(|p| p.len() != 2 || !self.exported_macros.contains(&p[1]))\n\n .map(|p| self.find_mod_file(&p, &path).map(|(_, p)| p.to_owned()))\n\n .collect();\n\n\n\n Ok(canonical?.into_iter().unique().collect())\n\n }\n\n\n\n fn scan_mods(&mut self, file_path: &mut PathBuf, path: &mut ModPath) -> Result<()> {\n\n struct Visitor {\n\n mods: Vec<ModPath>,\n\n macros: Vec<String>,\n\n path: ModPath,\n", "file_path": "src/traverse.rs", "rank": 20, "score": 14.473630094019136 }, { "content": " .insert(path.to_owned(), (file_path.to_owned(), path.to_owned()));\n\n let content = std::fs::read_to_string(&file_path)?;\n\n let file = syn::parse_file(&content)?;\n\n let mut visitor = Visitor {\n\n mods: Vec::new(),\n\n macros: Vec::new(),\n\n path: path.to_owned(),\n\n };\n\n visitor.visit_file(&file);\n\n for mod_path in visitor.mods {\n\n self.mods_location\n\n .insert(mod_path, (file_path.clone(), path.clone())); // TODO\n\n }\n\n self.exported_macros.extend_from_slice(&visitor.macros);\n\n if name_str != \"mod\" && name_str != \"lib\" {\n\n path.pop();\n\n }\n\n }\n\n file_path.pop();\n\n }\n", "file_path": "src/traverse.rs", "rank": 21, "score": 14.292561203525233 }, { "content": "}\n\n\n\nimpl Traverse {\n\n pub fn new(crate_root: &Path, crate_name: &str, entry_point: &Path) -> Result<Self> {\n\n let entry_point = entry_point.canonicalize()?;\n\n let use_paths = visit_use_file(&entry_point)?;\n\n let mut entry_path: Vec<String> = entry_point\n\n .parent()\n\n .unwrap()\n\n .join(entry_point.file_stem().unwrap())\n\n .strip_prefix(crate_root)\n\n .unwrap_or(&Path::new(\"\"))\n\n .into_iter()\n\n .map(|p| p.to_string_lossy().to_string())\n\n .collect();\n\n entry_path.insert(0, crate_name.to_owned());\n\n\n\n Ok(Traverse {\n\n crate_root: crate_root.to_owned(),\n\n crate_name: crate_name.to_owned(),\n", "file_path": "src/traverse.rs", "rank": 22, "score": 14.046402142106633 }, { "content": " for entry in std::fs::read_dir(&file_path)? {\n\n let entry = entry?;\n\n if entry.path() == self.exclude {\n\n continue;\n\n }\n\n let name = entry.file_name();\n\n file_path.push(name.clone());\n\n let name_string = name\n\n .into_string()\n\n .map_err(|_| Error::msg(format!(\"Cannot open {:?}\", file_path)))?;\n\n if entry.metadata()?.is_dir() {\n\n path.push(name_string);\n\n self.scan_mods(file_path, path)?;\n\n path.pop();\n\n } else {\n\n let name_str = &name_string[..name_string.len() - 3];\n\n if name_str != \"mod\" && name_str != \"lib\" {\n\n path.push(name_str.to_owned());\n\n }\n\n self.mods_location\n", "file_path": "src/traverse.rs", "rank": 23, "score": 13.186490684605497 }, { "content": "## 詳細\n\n\n\n* `<crate>/Cargo.toml` をパースし、クレート名を取得します。\n\n* `<entry-point>` ファイルを起点に、対象クレート内のモジュールや構造体・トレイト等に対する `use` 宣言を辿り、依存するファイルを列挙します。\n\n* `--list-deps` が渡されたとき、これらのファイルへのパスを一行ずつ出力します。\n\n* そうでない場合は、これらのファイルを `<entry-point>` ファイルとまとめて出力します。このとき、\n\n * `<entry-point>` ファイルの中身が先に出力されます。\n\n * ファイル構造は `(公開性) mod (モジュール名) { ... }` という形で反映されます。\n\n * `<entry-point>` ファイル中の `use (クレート名)::...` と `<crate>` 内のファイルの `use crate::...` は、ともに `use crate::(クレート名)::...` で置き換えられます。(マクロの中身を除く)\n\n * `#[macro_export]` 属性が付された単一のマクロに対する `use` 宣言に対しては、特別な処理を行います。`use` 宣言のパスが `crate::` から始まる場合は何の処理も行わず、`(クレート名)::` から始まる場合は宣言ごと削除します。\n\n * 例えばモジュール `a` 内でマクロ `x` を `#[macro_export]` 付きで定義した場合、モジュール `a` 内に `pub use crate::x;` と書いて、これを使うときは `use (クレート名)::a::*;` とするよいと思います。\n\n * より簡単なのは、クレートのトップレベル(`<crate>/lib.rs`)でマクロを定義することです。これを直接 `use (クレート名)::x;` とすればうまくいきます。\n\n * `<crate>` 中の `(pub) mod (モジュール名);` は削除されます。\n\n\n", "file_path": "README.md", "rank": 24, "score": 10.103958984377337 }, { "content": " let mut res = if path[0] == \"self\" || path[0] == \"super\" {\n\n at.to_owned()\n\n } else {\n\n ModPath::new()\n\n };\n\n for p in path {\n\n match p as &str {\n\n \"crate\" => {\n\n res.push(self.crate_name.to_owned());\n\n }\n\n \"self\" => {}\n\n \"super\" => {\n\n res.pop().unwrap();\n\n }\n\n p => {\n\n res.push(p.to_owned());\n\n }\n\n }\n\n }\n\n res\n", "file_path": "src/traverse.rs", "rank": 25, "score": 9.553612473200133 }, { "content": " .map(|path| {\n\n self.find_mod_file(path, &self.entry_path)\n\n .map(|(_, p)| p.to_owned())\n\n })\n\n .try_collect()?;\n\n let mut pushed = self.todo.clone();\n\n while let Some(path) = self.todo.pop() {\n\n result.push(path.clone());\n\n let uses = self.visit_use(&path)?;\n\n for path in uses {\n\n if !pushed.contains(&path) {\n\n self.todo.push(path.clone());\n\n pushed.push(path);\n\n }\n\n }\n\n }\n\n result.sort();\n\n result.dedup();\n\n let paths = result\n\n .iter()\n", "file_path": "src/traverse.rs", "rank": 26, "score": 9.508460264834536 }, { "content": " }\n\n\n\n fn find_mod_file(&self, mod_path: &ModPath, at: &ModPath) -> Result<&(PathBuf, ModPath)> {\n\n let mut i = mod_path.len();\n\n while i != 0 && !self.mods_location.contains_key(&mod_path[..i]) {\n\n i -= 1;\n\n }\n\n if i != 0 {\n\n Ok(self.mods_location.get(&mod_path[..i]).unwrap())\n\n } else {\n\n self.find_mod_file(&self.canonicalize(mod_path, at), at)\n\n }\n\n }\n\n\n\n fn visit_use(&self, path: &ModPath) -> Result<Vec<ModPath>> {\n\n let paths = visit_use_file(&self.find_mod_file(&path, &path)?.0)?;\n\n\n\n let canonical: Result<Vec<_>, _> = paths\n\n .into_iter()\n\n .map(|p| self.canonicalize(&p, path))\n", "file_path": "src/traverse.rs", "rank": 27, "score": 9.349687627142504 }, { "content": " .map(|path| self.find_mod_file(path, path).map(|(p, _)| p.to_owned()))\n\n .try_collect()?;\n\n Ok((\n\n result,\n\n paths,\n\n std::mem::take(&mut self.mods_visibility),\n\n std::mem::take(&mut self.exported_macros),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/traverse.rs", "rank": 28, "score": 8.946515345885205 }, { "content": "## 注意\n\n\n\n1. 相対パスによる `use` 宣言への対応が不完全です。使うときはファイル内のトップレベルに置いてください。\n\n1. `use` 宣言されていないモジュールは、使われていたとしても走査対象になりません。例えば `let inv = my_library::math::modpow(n, MOD - 2, MOD);` のような記述があっても、`use my_library::math;` のような記述がなければ `math` モジュールは出力に反映されません。\n\n1. `use` 宣言以外の場所で `crate::...` と書かれていても、`crate::(クレート名)` には置き換えられません。(`use` 宣言は `rust-analyzer` が勝手に入れてくれるので、さぼっています)\n\n1. `<crate>` がバイナリクレートであった場合の動作は未確認です。\n\n1. `use` されていないモジュールの `impl` やマクロ定義は補足できません。\n\n1. マクロ定義・呼び出しの中身に含まれる `use` 宣言は無視されます。\n\n1. `(公開性) mod XXX;` に付された属性は無視され、出力に含まれません。\n\n1. 手続きマクロの展開は一切行いません。\n\n1. [自分のライブラリ](https://github.com/shino16/cpr) でそれっぽく動くことしか確認していません。本プログラムの動作に関連して発生したいかなる結果(CE、WA、TLE、…)について責任を負いません。\n", "file_path": "README.md", "rank": 29, "score": 7.176062030463317 }, { "content": "#[derive(StructOpt)]\n\n#[structopt(bin_name(\"cargo\"))]\n\nenum Opt {\n\n AutoBundle {\n\n #[structopt(short, long = \"crate\", default_value = \".\")]\n\n crate_path: PathBuf,\n\n #[structopt(short, long, default_value = \"src/main.rs\")]\n\n entry_point: PathBuf,\n\n #[structopt(short, long)]\n\n list_deps: bool,\n\n },\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 30, "score": 4.158290937676316 }, { "content": "# cargo-auto-bundle\n\n\n\nRustライブラリのコピペ作業やライブラリ管理を補助する競プロ用ツールです。\n\n\n\n## インストール\n\n\n\n```bash\n\n$ cargo install --git https://github.com/shino16/cargo-auto-bundle\n\n```\n\n\n\n## 使い方\n\n\n\n```bash\n\n$ cargo auto-bundle [--crate <crate [default: .]>] [--entry-point <entry-point [default: src/main.rs]>] [--list-deps]\n\n```\n\n\n\n## 使用例\n\n\n\n`lib`(クレート名)は任意のクレート名で置き換えてください。\n\n\n\n```rust\n\nuse lib::ds::fenwick::*;\n\nuse proconio::*;\n\n\n\n#[fastout]\n\nfn main() {\n\n input! {\n\n n: usize, q: usize,\n\n a: [u32; n],\n\n txy: [(u32, usize, usize); q],\n\n }\n\n let mut fwk = FenwickTree::new(a, GroupImpl(|| 0, |a, b| a ^ b, |a| a));\n\n for (t, x, y) in txy {\n\n match t {\n\n 1 => fwk.add(x - 1, y as u32),\n\n _ => println!(\"{}\", fwk.ask(x - 1, y)),\n\n }\n\n }\n\n}\n\n\n\n```\n\n\n\nこのコードを `lib` クレート中の `src/main.rs` に置き、次を実行します:\n\n\n\n```bash\n\n$ cargo auto-bundle > tmp.rs\n\n```\n\n\n\nこれを提出⇒[AC](https://atcoder.jp/contests/abc185/submissions/20195269)\n\n\n\nこのコードが依存する `lib::ds::fenwick` モジュールと、`lib::ds::fenwick` が依存する `lib::alg`、`lib::bits` の中身がモジュール構造を保って展開されています。\n\n\n\nまた、これは [`online-judge-tools/verification-helper`](https://github.com/online-judge-tools/verification-helper) と組合せて使うことができます。\n\n\n\n例:[`.github/workflows/ci.yml`](https://github.com/shino16/cpr/blob/master/.github/workflows/ci.yml) / [`.verify-helper/config.toml`](https://github.com/shino16/cpr/blob/master/.verify-helper/config.toml)\n\n\n\nなお、[`online-judge-tools/verification-helper`](https://github.com/online-judge-tools/verification-helper) には [Rustサポートが追加された](https://github.com/online-judge-tools/verification-helper/pull/346) ので、少し [書きかえ](https://github.com/shino16/verification-helper/commit/ac15e8072a522833c4dad69fa1414edd23beade9) が必要です。\n\n\n", "file_path": "README.md", "rank": 31, "score": 3.2316523503239956 } ]
Rust
fabric_contract/src/dataapi/wirebuffer.rs
wtllc/fabric-contract-api-rust
6cf261d7795f1e26169934757422bf13772c9589
/* * SPDX-License-Identifier: Apache-2.0 */ use super::TypeSchema; use std::fmt::Debug; pub struct WireBuffer { pub buffer: Option<Vec<u8>>, pub schema: TypeSchema, } impl WireBuffer { pub fn new( buffer: Vec<u8>, schema: TypeSchema, /*, converter: Box<dyn Converter>*/ ) -> Self { Self { buffer: Some(buffer), schema, } } pub fn new_unfilled(schema: TypeSchema /*, converter: Box<dyn Converter>*/) -> Self { Self { buffer: Option::None, schema, } } } impl Debug for WireBuffer { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match &self.buffer { Some(b) =>{ write!(f, "WireBuffer: {:?}", b.as_slice()) }, None => { write!(f, "WireBuffer: <emptry>") } } } } impl From<&WireBuffer> for String { fn from(wb: &WireBuffer) -> Self { match &wb.buffer { Some(buffer) => std::str::from_utf8(&buffer).unwrap().to_string(), None => "".to_string(), } } } impl From<&WireBuffer> for i32 { fn from(wb: &WireBuffer) -> Self { match &wb.buffer { Some(buffer) => { match std::str::from_utf8(&buffer) { Ok(a) => i32::from_str_radix(a,10).unwrap_or(0), _ => unreachable!(), } } None => 0, } } } impl From<&WireBuffer> for u32 { fn from(wb: &WireBuffer) -> Self { match &wb.buffer { Some(buffer) => { match std::str::from_utf8(&buffer) { Ok(a) => u32::from_str_radix(a,10).unwrap_or(0), _ => unreachable!(), } } None => 0, } } } pub trait WireBufferFromReturnType<T> { fn from_rt(self: &mut Self, _: T); } impl WireBufferFromReturnType<String> for WireBuffer { fn from_rt(self: &mut Self, s: String) { self.buffer = Some(s.into_bytes()); } } impl WireBufferFromReturnType<()> for WireBuffer { fn from_rt(self: &mut Self, _: ()) { self.buffer = None; } } impl WireBufferFromReturnType<bool> for WireBuffer { fn from_rt(self: &mut Self, b: bool) { self.buffer = match b { true => Some(b"true".to_vec()), false => Some(b"false".to_vec()), }; } } impl WireBufferFromReturnType<i8> for WireBuffer { fn from_rt(self: &mut Self, s: i8) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<i16> for WireBuffer { fn from_rt(self: &mut Self, s: i16) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<i32> for WireBuffer { fn from_rt(self: &mut Self, s: i32) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<i64> for WireBuffer { fn from_rt(self: &mut Self, s: i64) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<isize> for WireBuffer { fn from_rt(self: &mut Self, s: isize) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<u8> for WireBuffer { fn from_rt(self: &mut Self, s: u8) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<u16> for WireBuffer { fn from_rt(self: &mut Self, s: u16) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<u32> for WireBuffer { fn from_rt(self: &mut Self, s: u32) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<u64> for WireBuffer { fn from_rt(self: &mut Self, s: u64) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<usize> for WireBuffer { fn from_rt(self: &mut Self, s: usize) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<f32> for WireBuffer { fn from_rt(self: &mut Self, s: f32) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<f64> for WireBuffer { fn from_rt(self: &mut Self, s: f64) { self.buffer = Some(s.to_string().into_bytes()); } }
/* * SPDX-License-Identifier: Apache-2.0 */ use super::TypeSchema; use std::fmt::Debug; pub struct WireBuffer { pub buffer: Option<Vec<u8>>, pub schema: TypeSchema, } impl WireBuffer { pub fn new( buffer: Vec<u8>, schema: TypeSchema, /*, converter: Box<dyn Converter>*/ ) -> Self { Self { buffer: Some(buffer), schema, } } pub fn new_unfilled(schema: TypeSchema /*, converter: Box<dyn Converter>*/) -> Self { Self { buffer: Option::None, schema, } } } impl Debug for WireBuffer { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match &self.buffer { Some(b) =>{ write!(f, "WireBuffer: {:?}", b.as_slice()) }, None => { write!(f, "WireBuffer: <emptry>") } } } } impl From<&WireBuffer> for String { fn from(wb: &WireBuffer) -> Self { match &wb.buffer { Some(buffer) => std::str::from_utf8(&buffer).unwrap().to_string(), None => "".to_string(), } } } impl From<&WireBuffer> for i32 { fn from(wb: &WireBuffer) -> Self { match &wb.buffer { Some(buffer) => { match std::str::from_utf8(&buffer) { Ok(a) => i32::from_str_radix(a,10).unwrap_or(0), _ => unreachable!(), } } None => 0, } } } impl From<&WireBuffer> for u32 { fn from(wb: &WireBuffer) -> Self { match &wb.buffer { Some(buffer) => { match std::str::from_utf8(&buffer) { Ok(a) => u32::from_str_radix(a,10).unwrap_or(0), _ => unreachable!(), } } None => 0, } } } pub trait WireBufferFromReturnType<T> { fn from_rt(self: &mut Self, _: T); } impl WireBufferFromReturnType<String> for WireBuffer { fn from_rt(self: &mut Self, s: String) { self.buffer = Some(s.into_bytes()); } } impl WireBufferFromReturnType<()> for WireBuffer { fn from_rt(self: &mut Self, _: ()) { self.buffer = None; } } impl WireBufferFromReturnType<bool> for WireBuffer {
} impl WireBufferFromReturnType<i8> for WireBuffer { fn from_rt(self: &mut Self, s: i8) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<i16> for WireBuffer { fn from_rt(self: &mut Self, s: i16) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<i32> for WireBuffer { fn from_rt(self: &mut Self, s: i32) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<i64> for WireBuffer { fn from_rt(self: &mut Self, s: i64) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<isize> for WireBuffer { fn from_rt(self: &mut Self, s: isize) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<u8> for WireBuffer { fn from_rt(self: &mut Self, s: u8) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<u16> for WireBuffer { fn from_rt(self: &mut Self, s: u16) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<u32> for WireBuffer { fn from_rt(self: &mut Self, s: u32) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<u64> for WireBuffer { fn from_rt(self: &mut Self, s: u64) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<usize> for WireBuffer { fn from_rt(self: &mut Self, s: usize) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<f32> for WireBuffer { fn from_rt(self: &mut Self, s: f32) { self.buffer = Some(s.to_string().into_bytes()); } } impl WireBufferFromReturnType<f64> for WireBuffer { fn from_rt(self: &mut Self, s: f64) { self.buffer = Some(s.to_string().into_bytes()); } }
fn from_rt(self: &mut Self, b: bool) { self.buffer = match b { true => Some(b"true".to_vec()), false => Some(b"false".to_vec()), }; }
function_block-full_function
[ { "content": "pub trait Converter {\n\n fn into_string(&self, buffer: &[u8], ts: &TypeSchema) -> String;\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub struct JSONConverter {}\n\n\n\nimpl Converter for JSONConverter {\n\n // straight conversion\n\n fn into_string(&self, buffer: &[u8], ts: &TypeSchema) -> String {\n\n match ts.contract_type {\n\n CTString => String::from_utf8(buffer.to_vec()).unwrap(),\n\n _ => \"\".to_string(),\n\n }\n\n }\n\n}\n", "file_path": "fabric_contract/src/dataapi/serializer.rs", "rank": 1, "score": 137331.55126405164 }, { "content": "#[proc_macro_attribute]\n\npub fn contract_impl(\n\n _args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream,\n\n) -> proc_macro::TokenStream {\n\n // parse the incoming AST, we don't wish to modify the existing code so immediately\n\n // quote! this to ensure it's in the output.\n\n let ty = parse_macro_input!(input as syn::ItemImpl);\n\n let existing = quote! {#ty};\n\n\n\n // Need to navigate down to the indentifier of the struct\n\n // Honestly... not quite sure how this actually works as the self_ty structure is\n\n // quite a few structs/enums deep so how this can be directly accessed ??\n\n let type_name = match *ty.self_ty {\n\n syn::Type::Path(ref path) => &path.path,\n\n _ => panic!(),\n\n };\n\n\n\n // we need to have the names of the methods, both as literal strings\n\n // and identifiers to call\n\n let mut method_names = Vec::new();\n", "file_path": "fabric_contract_macros/src/lib.rs", "rank": 2, "score": 134858.2451606296 }, { "content": "/// The API calls made by the users contract implementation via the Collection, State interfaces etc..\n\n/// get rounted to here. They are then passed over the Wasm boundary to be sent off (somehow) to the peer\n\n///\n\npub fn runtime_host_call(service: String, cmd: String, data: Vec<u8>) -> Result<Vec<u8>> {\n\n trace!(\n\n \"Making host call {}::{}::len={}::\",\n\n service,\n\n cmd,\n\n data.len()\n\n );\n\n match host_call(\"wapc\", &service[..], &cmd[..], &data) {\n\n Ok(v) => Ok(v),\n\n Err(e) => {\n\n debug!(\"{:?}\", e);\n\n Err(e)\n\n }\n\n }\n\n}\n\n\n", "file_path": "fabric_contract/src/runtimeapi/wapc.rs", "rank": 3, "score": 113674.92729676371 }, { "content": "#[proc_macro_attribute]\n\npub fn transaction(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream,\n\n) -> proc_macro::TokenStream {\n\n let psitem = parse_macro_input!(input as ItemFn);\n\n let txargs = parse_macro_input!(args as AttributeArgs);\n\n\n\n\n\n\n\n\n\n\n\n let name = psitem.sig.ident.clone();\n\n let classname = syn::Ident::new(&format!(\"{}{}\", \"invoke_\", name), psitem.sig.ident.span());\n\n let metadata = syn::Ident::new(&format!(\"{}{}\", \"md_\", name), psitem.sig.ident.span());\n\n let name_as_literal = ident_to_litstr(&name);\n\n\n\n let ret_type = match psitem.sig.output.clone() {\n\n syn::ReturnType::Default => Box::new(syn::parse_quote!(())),\n\n syn::ReturnType::Type(_, ret_type) => ret_type,\n\n };\n", "file_path": "fabric_contract_macros/src/lib.rs", "rank": 4, "score": 103658.36622385081 }, { "content": "#[proc_macro_attribute]\n\npub fn property(\n\n _args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream,\n\n) -> proc_macro::TokenStream {\n\n input\n\n}\n\n\n\n///\n\n/// Use this to mark the structs that serve as complex data types\n\n/// Need to provide example\n\n///\n", "file_path": "fabric_contract_macros/src/lib.rs", "rank": 5, "score": 103658.36622385081 }, { "content": "// trait that is implemented by macro for each struct that does the final step in the routing to\n\n// the transaction functions\n\npub trait Routing {\n\n fn route3(\n\n &self,\n\n tx_fn: String,\n\n args: Vec<WireBuffer>,\n\n return_wb: TypeSchema,\n\n ) -> Result<WireBuffer, ContractError>;\n\n}\n\n\n", "file_path": "fabric_contract/src/contractapi/contract.rs", "rank": 6, "score": 103553.55302491013 }, { "content": "pub trait Metadata {\n\n /// Gets the detail of the functions, which is vector of strings\n\n fn get_fn_metadata(&self) -> Vec<TransactionFn>;\n\n}\n\n\n", "file_path": "fabric_contract/src/contractapi/contract.rs", "rank": 7, "score": 103546.03532489917 }, { "content": "/// Called from the register contract macro.\n\n///\n\n/// Initalize the settings of the logger etc.\n\npub fn init_logger() {\n\n log::set_logger(&LOGGER).unwrap();\n\n log::set_max_level(LevelFilter::Trace);\n\n\n\n // configure the panic hook, otherwise any panics\n\n // when running in Wasm will be lost\n\n panic::set_hook(Box::new(hook));\n\n}\n\n\n", "file_path": "fabric_contract/src/runtimeapi/logger.rs", "rank": 8, "score": 101926.61702942109 }, { "content": "pub fn get_context() -> Context {\n\n CONTEXT.with(|ctx| ctx.borrow().clone())\n\n}\n", "file_path": "fabric_contract/src/runtimeapi/wapc.rs", "rank": 9, "score": 95387.68159440385 }, { "content": "/// Collection Iterator\n\n///\n\n/// Standard Rust iterator over the returned states\n\npub trait CollectionIterator: Iterator {\n\n /// sets the paging size\n\n fn set_paging_size(pagesize: u32);\n\n\n\n /// number of fetched states\n\n fn get_fetched_count() -> u32;\n\n\n\n /// set the bookmark to a previous returned value\n\n fn set_bookmark(bookmark: String);\n\n\n\n /// get currentmark\n\n fn get_bookmark() -> String;\n\n\n\n // close\n\n // hope this can be done automatiacally....\n\n //\n\n}\n", "file_path": "fabric_contract/src/ledgerapi/collection.rs", "rank": 10, "score": 95257.47246895172 }, { "content": "pub trait DataType: Default {\n\n /// Converts into a state that can be handled and put into\n\n /// the ledger and private collections\n\n fn to_state(&self) -> State;\n\n\n\n ///\n\n fn get_key(&self) -> String;\n\n\n\n ///\n\n fn build_from_state(state: State) -> Self;\n\n\n\n ///\n\n fn form_key(k: &String) -> String;\n\n}\n", "file_path": "fabric_contract/src/ledgerapi/datatype.rs", "rank": 11, "score": 95257.47246895172 }, { "content": "pub trait VerifyHashConsistency<T> {\n\n fn verify_consistent(&self, o: T) -> Result<bool, LedgerError>;\n\n}\n\n\n\nimpl VerifyHashConsistency<String> for State {\n\n fn verify_consistent(&self, o: String) -> Result<bool, LedgerError> {\n\n todo!()\n\n }\n\n}\n\n\n\nimpl VerifyHashConsistency<State> for State {\n\n fn verify_consistent(&self, o: State) -> Result<bool, LedgerError> {\n\n todo!()\n\n }\n\n}\n\n\n\nimpl<T: DataType> VerifyHashConsistency<T> for State {\n\n fn verify_consistent(&self, o: T) -> Result<bool, LedgerError> {\n\n todo!()\n\n }\n\n}", "file_path": "fabric_contract/src/ledgerapi/state.rs", "rank": 12, "score": 93702.92879532909 }, { "content": "#[inline(never)] // not sure why this is not inlined?\n\npub fn log(s: &str) {\n\n console_log(s);\n\n}\n\n\n", "file_path": "fabric_contract/src/runtimeapi/wapc.rs", "rank": 13, "score": 93292.01002745544 }, { "content": "/// Trait that is implemented for each contract\n\n/// Default implementations here\n\npub trait Contract: Routing + Metadata {\n\n fn name(&self) -> String;\n\n // fn before_transaction(&self, ctx: Context);\n\n // fn after_transaction(&self, _ctx: Context) {\n\n // println!(\"Default After Tranasction\");\n\n // }\n\n\n\n /// Verify the client MSPID and the Peers MSPID are the same\n\n fn get_verified_client_org(&self) -> Result<String, ContractError> {\n\n let tx = Transaction::current_transaction();\n\n\n\n let peers_msp = tx.get_peer_mspid();\n\n let client_msp = tx.get_submitting_identity()?.get_mspid();\n\n if peers_msp != client_msp {\n\n Err(ContractError::from(\n\n \"Mismatch of Organization names\".to_string(),\n\n ))\n\n } else {\n\n Ok(client_msp)\n\n }\n\n }\n\n}\n", "file_path": "fabric_contract/src/contractapi/contract.rs", "rank": 14, "score": 91492.30338165347 }, { "content": "/// Hook function to capture the panic and route it\n\n/// to the logger\n\npub fn hook(info: &panic::PanicInfo) {\n\n let msg = info.to_string();\n\n\n\n // Finally, log the panic via waPC\n\n error!(\"[Panic]{}[/Panic]\", msg);\n\n}\n", "file_path": "fabric_contract/src/runtimeapi/logger.rs", "rank": 15, "score": 85643.71266314018 }, { "content": "///\n\n/// Map to the ContractService\n\n///\n\n/// ```ignore\n\n/// service ContractService {\n\n/// rpc GetMetadata (GetMetadataRequest) returns (GetMetadataResponse);\n\n/// rpc InvokeTransaction (InvokeTransactionRequest) returns (InvokeTransactionResponse);\n\n/// rpc RegisterPeer (RegisterPeerRequest) returns (RegisterPeerResponse);\n\n/// }\n\n/// ```\n\n///\n\npub fn handle_wapc(operation: &str, msg: &[u8]) -> CallResult {\n\n log(\">> handle_wapc\");\n\n match operation {\n\n \"InvokeTransaction\" => handle_tx_invoke(msg),\n\n \"GetMetadata\" => todo!(\"GetMetadata\"),\n\n \"RegisterPeer\" => todo!(\"RegisterPeer\"),\n\n _ => Err(\"Unknown operation being called\".into()),\n\n }\n\n}\n\n\n", "file_path": "fabric_contract/src/runtimeapi/wapc.rs", "rank": 16, "score": 77216.77402459568 }, { "content": "#[proc_macro_derive(DataTypeMacro)]\n\npub fn data_type_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n // Construct a representation of Rust code as a syntax tree\n\n // that we can manipulate\n\n let ast = syn::parse(input).unwrap();\n\n\n\n // Build the trait implementation\n\n impl_hello_macro(&ast)\n\n}\n\n\n", "file_path": "fabric_contract_macros/src/lib.rs", "rank": 17, "score": 72573.56676117895 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"asset_transfer_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"fabric_contract/dataapi/wirebuffer/trait.WireBufferFromReturnType.html\\\" title=\\\"trait fabric_contract::dataapi::wirebuffer::WireBufferFromReturnType\\\">WireBufferFromReturnType</a>&lt;<a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.Asset.html\\\" title=\\\"struct asset_transfer_rs::Asset\\\">Asset</a>&gt; for <a class=\\\"struct\\\" href=\\\"fabric_contract/dataapi/wirebuffer/struct.WireBuffer.html\\\" title=\\\"struct fabric_contract::dataapi::wirebuffer::WireBuffer\\\">WireBuffer</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::dataapi::wirebuffer::WireBuffer\"]}];\n", "file_path": "docs/apidoc/implementors/fabric_contract/dataapi/wirebuffer/trait.WireBufferFromReturnType.js", "rank": 18, "score": 66875.3908674591 }, { "content": "fn impl_hello_macro(ast: &syn::DeriveInput) -> proc_macro::TokenStream {\n\n let name = &ast.ident;\n\n let gen = quote! {\n\n impl DataType for #name {\n\n fn hello_macro() {\n\n println!(\"Hello, Macro! My name is {}!\", stringify!(#name));\n\n }\n\n }\n\n };\n\n gen.into()\n\n}\n\n\n", "file_path": "fabric_contract_macros/src/lib.rs", "rank": 19, "score": 65721.7756304141 }, { "content": "struct RuntimeLogger {\n\n level: Level,\n\n}\n\n\n\n/// Use the log crate for internal logging, and contract logging\n\n///\n\n/// following the example at https://docs.rs/log/0.4.8/log/fn.set_logger.html\n\nimpl log::Log for RuntimeLogger {\n\n fn enabled(&self, metadata: &Metadata) -> bool {\n\n metadata.level() <= self.level\n\n }\n\n\n\n fn log(&self, record: &Record) {\n\n if self.enabled(record.metadata()) {\n\n log(&format!(\"{} - {}\", record.level(), record.args())[..]);\n\n }\n\n }\n\n\n\n fn flush(&self) {}\n\n}\n\n\n", "file_path": "fabric_contract/src/runtimeapi/logger.rs", "rank": 20, "score": 56340.10822855805 }, { "content": " // First trait to implement on C\n\n trait Metadata {\n\n fn get_fn_metadata(&self) -> Vec<TransactionFn>;\n\n }\n", "file_path": "fabric_contract/src/contractapi/contractdefn.rs", "rank": 21, "score": 56092.71050920848 }, { "content": " // Second trait to implement on C\n\n trait Routing {\n\n fn route3(\n\n &self,\n\n tx_fn: String,\n\n args: Vec<WireBuffer>,\n\n return_wb: TypeSchema,\n\n ) -> Result<WireBuffer, ContractError>;\n\n }\n\n }\n\n\n\n impl Contract for MockTestContract {\n\n fn name(&self) -> String {\n\n \"TestContract\".to_string()\n\n } \n\n }\n\n\n\n #[test]\n\n fn new_defn() {\n\n\n\n let contract = MockTestContract::new();\n", "file_path": "fabric_contract/src/contractapi/contractdefn.rs", "rank": 22, "score": 56092.71050920848 }, { "content": "fn extract_arg_pat(a: FnArg) -> Pat {\n\n match a {\n\n FnArg::Typed(p) => *p.pat,\n\n _ => panic!(),\n\n }\n\n}\n", "file_path": "fabric_contract_macros/src/lib.rs", "rank": 23, "score": 53680.646025476904 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"basic_contract_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/convert/trait.From.html\\\" title=\\\"trait core::convert::From\\\">From</a>&lt;<a class=\\\"struct\\\" href=\\\"fabric_contract/ledgerapi/state/struct.State.html\\\" title=\\\"struct fabric_contract::ledgerapi::state::State\\\">State</a>&gt; for <a class=\\\"struct\\\" href=\\\"basic_contract_rs/struct.MyAsset.html\\\" title=\\\"struct basic_contract_rs::MyAsset\\\">MyAsset</a>\",\"synthetic\":false,\"types\":[\"basic_contract_rs::types::myasset::MyAsset\"]}];\n\nimplementors[\"fabric_contract\"] = [{\"text\":\"impl&lt;'_&gt; <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/convert/trait.From.html\\\" title=\\\"trait core::convert::From\\\">From</a>&lt;&amp;'_ <a class=\\\"struct\\\" href=\\\"fabric_contract/data/struct.WireBuffer.html\\\" title=\\\"struct fabric_contract::data::WireBuffer\\\">WireBuffer</a>&gt; for <a class=\\\"struct\\\" href=\\\"https://doc.rust-lang.org/nightly/alloc/string/struct.String.html\\\" title=\\\"struct alloc::string::String\\\">String</a>\",\"synthetic\":false,\"types\":[\"alloc::string::String\"]},{\"text\":\"impl&lt;'_&gt; <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/convert/trait.From.html\\\" title=\\\"trait core::convert::From\\\">From</a>&lt;&amp;'_ <a class=\\\"struct\\\" href=\\\"fabric_contract/data/struct.WireBuffer.html\\\" title=\\\"struct fabric_contract::data::WireBuffer\\\">WireBuffer</a>&gt; for <a class=\\\"primitive\\\" href=\\\"https://doc.rust-lang.org/nightly/std/primitive.i32.html\\\">i32</a>\",\"synthetic\":false,\"types\":[]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/convert/trait.From.html\\\" title=\\\"trait core::convert::From\\\">From</a>&lt;<a class=\\\"struct\\\" href=\\\"https://doc.rust-lang.org/nightly/alloc/string/struct.String.html\\\" title=\\\"struct alloc::string::String\\\">String</a>&gt; for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.ContractError.html\\\" title=\\\"struct fabric_contract::contract::ContractError\\\">ContractError</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::error::ContractError\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/convert/trait.From.html\\\" title=\\\"trait core::convert::From\\\">From</a>&lt;<a class=\\\"primitive\\\" href=\\\"https://doc.rust-lang.org/nightly/std/primitive.tuple.html\\\">(</a><a class=\\\"struct\\\" href=\\\"https://doc.rust-lang.org/nightly/alloc/string/struct.String.html\\\" title=\\\"struct alloc::string::String\\\">String</a>, <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.LedgerError.html\\\" title=\\\"struct fabric_contract::contract::LedgerError\\\">LedgerError</a><a class=\\\"primitive\\\" href=\\\"https://doc.rust-lang.org/nightly/std/primitive.tuple.html\\\">)</a>&gt; for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.ContractError.html\\\" title=\\\"struct fabric_contract::contract::ContractError\\\">ContractError</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::error::ContractError\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/convert/trait.From.html\\\" title=\\\"trait core::convert::From\\\">From</a>&lt;<a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.LedgerError.html\\\" title=\\\"struct fabric_contract::contract::LedgerError\\\">LedgerError</a>&gt; for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.ContractError.html\\\" title=\\\"struct fabric_contract::contract::ContractError\\\">ContractError</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::error::ContractError\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/convert/trait.From.html\\\" title=\\\"trait core::convert::From\\\">From</a>&lt;<a class=\\\"struct\\\" href=\\\"https://doc.rust-lang.org/nightly/alloc/string/struct.String.html\\\" title=\\\"struct alloc::string::String\\\">String</a>&gt; for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.LedgerError.html\\\" title=\\\"struct fabric_contract::contract::LedgerError\\\">LedgerError</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::error::LedgerError\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/convert/trait.From.html\\\" title=\\\"trait core::convert::From\\\">From</a>&lt;<a class=\\\"primitive\\\" href=\\\"https://doc.rust-lang.org/nightly/std/primitive.unit.html\\\">()</a>&gt; for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.State.html\\\" title=\\\"struct fabric_contract::contract::State\\\">State</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::ledgerapi::state::State\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/convert/trait.From.html\\\" title=\\\"trait core::convert::From\\\">From</a>&lt;<a class=\\\"primitive\\\" href=\\\"https://doc.rust-lang.org/nightly/std/primitive.tuple.html\\\">(</a><a class=\\\"struct\\\" href=\\\"https://doc.rust-lang.org/nightly/alloc/string/struct.String.html\\\" title=\\\"struct alloc::string::String\\\">String</a>, <a class=\\\"struct\\\" href=\\\"https://doc.rust-lang.org/nightly/alloc/vec/struct.Vec.html\\\" title=\\\"struct alloc::vec::Vec\\\">Vec</a>&lt;<a class=\\\"primitive\\\" href=\\\"https://doc.rust-lang.org/nightly/std/primitive.u8.html\\\">u8</a>&gt;<a class=\\\"primitive\\\" href=\\\"https://doc.rust-lang.org/nightly/std/primitive.tuple.html\\\">)</a>&gt; for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.State.html\\\" title=\\\"struct fabric_contract::contract::State\\\">State</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::ledgerapi::state::State\"]},{\"text\":\"impl&lt;'_&gt; <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/convert/trait.From.html\\\" title=\\\"trait core::convert::From\\\">From</a>&lt;&amp;'_ State&gt; for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.State.html\\\" title=\\\"struct fabric_contract::contract::State\\\">State</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::ledgerapi::state::State\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/convert/trait.From.html\\\" title=\\\"trait core::convert::From\\\">From</a>&lt;State&gt; for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.State.html\\\" title=\\\"struct fabric_contract::contract::State\\\">State</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::ledgerapi::state::State\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/convert/trait.From.html\\\" title=\\\"trait core::convert::From\\\">From</a>&lt;<a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.State.html\\\" title=\\\"struct fabric_contract::contract::State\\\">State</a>&gt; for State\",\"synthetic\":false,\"types\":[\"fabric_ledger_protos::ledger_messages::State\"]}];\n", "file_path": "docs/apidoc/implementors/core/convert/trait.From.js", "rank": 24, "score": 49774.122994571866 }, { "content": "fn extract_arg_idents(fn_args: Punctuated<FnArg, syn::token::Comma>) -> Vec<Pat> {\n\n fn_args\n\n .into_iter()\n\n .skip(1)\n\n .map(extract_arg_pat)\n\n .collect::<Vec<_>>()\n\n}\n\n\n", "file_path": "fabric_contract_macros/src/lib.rs", "rank": 25, "score": 49678.6496683926 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"asset_transfer_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/fmt/trait.Debug.html\\\" title=\\\"trait core::fmt::Debug\\\">Debug</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.Asset.html\\\" title=\\\"struct asset_transfer_rs::Asset\\\">Asset</a>\",\"synthetic\":false,\"types\":[\"asset_transfer_rs::types::asset::Asset\"]}];\n\nimplementors[\"fabric_contract\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/fmt/trait.Debug.html\\\" title=\\\"trait core::fmt::Debug\\\">Debug</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.TransactionFn.html\\\" title=\\\"struct fabric_contract::prelude::TransactionFn\\\">TransactionFn</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::contractapi::transaction::TransactionFn\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/fmt/trait.Debug.html\\\" title=\\\"trait core::fmt::Debug\\\">Debug</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.TransactionFnBuilder.html\\\" title=\\\"struct fabric_contract::prelude::TransactionFnBuilder\\\">TransactionFnBuilder</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::contractapi::transaction::TransactionFnBuilder\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/fmt/trait.Debug.html\\\" title=\\\"trait core::fmt::Debug\\\">Debug</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/data/struct.TypeSchema.html\\\" title=\\\"struct fabric_contract::data::TypeSchema\\\">TypeSchema</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::dataapi::typeschema::TypeSchema\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/fmt/trait.Debug.html\\\" title=\\\"trait core::fmt::Debug\\\">Debug</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/data/struct.WireBuffer.html\\\" title=\\\"struct fabric_contract::data::WireBuffer\\\">WireBuffer</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::dataapi::wirebuffer::WireBuffer\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/fmt/trait.Debug.html\\\" title=\\\"trait core::fmt::Debug\\\">Debug</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.ContractError.html\\\" title=\\\"struct fabric_contract::contract::ContractError\\\">ContractError</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::error::ContractError\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/fmt/trait.Debug.html\\\" title=\\\"trait core::fmt::Debug\\\">Debug</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.LedgerError.html\\\" title=\\\"struct fabric_contract::contract::LedgerError\\\">LedgerError</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::error::LedgerError\"]}];\n", "file_path": "docs/apidoc/implementors/core/fmt/trait.Debug.js", "rank": 26, "score": 48840.75779713187 }, { "content": "fn set_context(name: Context) {\n\n CONTEXT.with(|ctx| *ctx.borrow_mut() = name);\n\n}\n\n\n", "file_path": "fabric_contract/src/runtimeapi/wapc.rs", "rank": 27, "score": 47754.71335931331 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"fabric_contract\"] = [];\n", "file_path": "docs/apidoc/implementors/fabric_contract/data/trait.WireBufferFromReturnType.js", "rank": 28, "score": 45307.186854800486 }, { "content": "/// handle_tx_invoke called with the buffer that contains the request\n\n/// of what transaction function should be invoked\n\nfn handle_tx_invoke(msg: &[u8]) -> CallResult {\n\n trace!(\"handler_tx_invoke>>\");\n\n\n\n // decode the message and arguments\n\n let invoke_request = parse_from_bytes::<InvokeTransactionRequest>(&msg).unwrap();\n\n let fn_name = invoke_request.get_transaction_name();\n\n let args = invoke_request.get_args();\n\n let transient_args = invoke_request.get_transient_args();\n\n let request_ctx = invoke_request.get_context();\n\n set_context(Context::new(request_ctx));\n\n let ctx = get_context();\n\n\n\n // pass over to the contract manager to route\n\n trace!(\n\n \"making the routing call tx::{} fn::{}\",\n\n request_ctx.get_transaction_id(),\n\n fn_name\n\n );\n\n\n\n let mut response_msg = InvokeTransactionResponse::new();\n", "file_path": "fabric_contract/src/runtimeapi/wapc.rs", "rank": 29, "score": 43646.097246319056 }, { "content": "/// Convert from syn::Ident to literal string\n\nfn ident_to_litstr(ident: &syn::Ident) -> syn::LitStr {\n\n syn::LitStr::new(&ident.to_string()[..], proc_macro2::Span::call_site())\n\n}\n\n\n\n/// Use this to mark the functions that are considered to be transaction functions\n\n///\n\n/// Arguments to this provide the ability to indicate\n\n/// - that this function is intended to be evaluated or submitted\n\n/// - which arguments are from the set of transient data (use variable names)\n\n///\n\n/// # Example\n\n///\n\n/// ```ignore\n\n/// use fabric_contract::contract::*;\n\n/// #[Transaction]\n\n/// pub fn createAsset() { }\n\n///\n\n/// #[Transaction(submit)]\n\n/// pub fn createAnotherAsset() { }\n\n\n\n/// #[Transaction(evaluate)]\n\n/// pub fn readAsset() { }\n\n///\n\n/// #[Transaction(tranisent = {price, owner} )]\n\n/// pub fn createDetailedAsset(id: String, price: u32, owner: String ) { }\n\n/// ```\n\n///\n", "file_path": "fabric_contract_macros/src/lib.rs", "rank": 30, "score": 40295.57672270481 }, { "content": "\n\n#[derive(Debug, Copy, Clone)]\n\npub enum Format {\n\n Other,\n\n}\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub struct TypeSchema {\n\n pub contract_type: ContractType,\n\n pub format: Option<Format>,\n\n}\n\n\n\nimpl Default for TypeSchema {\n\n fn default() -> Self {\n\n Self {\n\n contract_type: ContractType::CTString,\n\n format: Option::None,\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for TypeSchema {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{:?}\", self.contract_type)\n\n }\n\n}\n\n\n\nimpl TypeSchema {}\n", "file_path": "fabric_contract/src/dataapi/typeschema.rs", "rank": 31, "score": 35702.55969192307 }, { "content": "use std::fmt;\n\n#[derive(Debug, Copy, Clone)]\n\npub enum ContractType {\n\n Integer,\n\n Long,\n\n Float,\n\n Double,\n\n CTString,\n\n Byte,\n\n Boolean,\n\n Date,\n\n DateTime,\n\n}\n\n\n\nimpl fmt::Display for ContractType {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{:?}\", self)\n\n // or, alternatively:\n\n // fmt::Debug::fmt(self, f)\n\n }\n", "file_path": "fabric_contract/src/dataapi/typeschema.rs", "rank": 32, "score": 35694.37718059487 }, { "content": "}\n\n\n\nimpl std::str::FromStr for ContractType {\n\n type Err = ();\n\n\n\n fn from_str(s: &str) -> Result<ContractType, ()> {\n\n match s {\n\n \"Integer\" | \"i32\" | \"u32\" => Ok(ContractType::Integer),\n\n \"Long\" | \"i64\" | \"u64\" => Ok(ContractType::Long),\n\n \"Float\" | \"f32\" => Ok(ContractType::Float),\n\n \"Double\" | \"d32\" => Ok(ContractType::Double),\n\n \"String\" => Ok(ContractType::CTString),\n\n \"Byte\" | \"i8\" | \"u8\" => Ok(ContractType::Byte),\n\n \"Boolean\" | \"bool\" => Ok(ContractType::Boolean),\n\n \"Date\" => Ok(ContractType::Date),\n\n \"DateTime\" => Ok(ContractType::DateTime),\n\n _ => Err(()),\n\n }\n\n }\n\n}\n", "file_path": "fabric_contract/src/dataapi/typeschema.rs", "rank": 33, "score": 35689.02954721829 }, { "content": "#[derive(Debug, Clone, Copy)]\n\npub struct JSONConverter {\n\n\n\n}\n\n\n\nimpl Converter for JSONConverter {\n\n\n\n // straight conversion \n\n fn into_string(&self, buffer: &Vec<u8>, ts: &TypeSchema) -> String {\n\n match ts.contract_type {\n\n CTString => { String::from_utf8(buffer.to_vec()).unwrap() }\n\n _ => { \"\".to_string() }\n\n }\n\n }\n\n \n\n}", "file_path": "fabric_contract/src/dataapi/JSONConverter.rs", "rank": 43, "score": 34731.933802092426 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"fabric_contract\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/iter/traits/collect/trait.FromIterator.html\\\" title=\\\"trait core::iter::traits::collect::FromIterator\\\">FromIterator</a>&lt;<a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.State.html\\\" title=\\\"struct fabric_contract::contract::State\\\">State</a>&gt; for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.StateQueryList.html\\\" title=\\\"struct fabric_contract::contract::StateQueryList\\\">StateQueryList</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::ledgerapi::statequerylist::StateQueryList\"]}];\n", "file_path": "docs/apidoc/implementors/core/iter/traits/collect/trait.FromIterator.js", "rank": 44, "score": 32982.343008194584 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"fabric_contract\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/iter/traits/collect/trait.IntoIterator.html\\\" title=\\\"trait core::iter::traits::collect::IntoIterator\\\">IntoIterator</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.StateQueryList.html\\\" title=\\\"struct fabric_contract::contract::StateQueryList\\\">StateQueryList</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::ledgerapi::statequerylist::StateQueryList\"]}];\n", "file_path": "docs/apidoc/implementors/core/iter/traits/collect/trait.IntoIterator.js", "rank": 45, "score": 32982.343008194584 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"asset_transfer_rs\"] = [{\"text\":\"impl Freeze for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.AssetTransfer.html\\\" title=\\\"struct asset_transfer_rs::AssetTransfer\\\">AssetTransfer</a>\",\"synthetic\":true,\"types\":[\"asset_transfer_rs::contracts::assettransfer::AssetTransfer\"]},{\"text\":\"impl Freeze for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.AssetTransferQuery.html\\\" title=\\\"struct asset_transfer_rs::AssetTransferQuery\\\">AssetTransferQuery</a>\",\"synthetic\":true,\"types\":[\"asset_transfer_rs::contracts::assettransferquery::AssetTransferQuery\"]},{\"text\":\"impl Freeze for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.Asset.html\\\" title=\\\"struct asset_transfer_rs::Asset\\\">Asset</a>\",\"synthetic\":true,\"types\":[\"asset_transfer_rs::types::asset::Asset\"]}];\n\nimplementors[\"basic_contract_rs\"] = [{\"text\":\"impl Freeze for <a class=\\\"struct\\\" href=\\\"basic_contract_rs/struct.AssetContract.html\\\" title=\\\"struct basic_contract_rs::AssetContract\\\">AssetContract</a>\",\"synthetic\":true,\"types\":[\"basic_contract_rs::contracts::assetcontract::AssetContract\"]},{\"text\":\"impl Freeze for <a class=\\\"struct\\\" href=\\\"basic_contract_rs/struct.MyAsset.html\\\" title=\\\"struct basic_contract_rs::MyAsset\\\">MyAsset</a>\",\"synthetic\":true,\"types\":[\"basic_contract_rs::types::myasset::MyAsset\"]}];\n\nimplementors[\"fabric_contract\"] = [{\"text\":\"impl Freeze for <a class=\\\"struct\\\" href=\\\"fabric_contract/blockchain/struct.ClientIdentity.html\\\" title=\\\"struct fabric_contract::blockchain::ClientIdentity\\\">ClientIdentity</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::blockchainapi::clientidentity::ClientIdentity\"]},{\"text\":\"impl Freeze for <a class=\\\"struct\\\" href=\\\"fabric_contract/blockchain/struct.Transaction.html\\\" title=\\\"struct fabric_contract::blockchain::Transaction\\\">Transaction</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::blockchainapi::transaction::Transaction\"]},{\"text\":\"impl Freeze for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.Context.html\\\" title=\\\"struct fabric_contract::contract::Context\\\">Context</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::context::Context\"]},{\"text\":\"impl Freeze for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.ContractDefn.html\\\" title=\\\"struct fabric_contract::prelude::ContractDefn\\\">ContractDefn</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::contractdefn::ContractDefn\"]},{\"text\":\"impl Freeze for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.ContractManager.html\\\" title=\\\"struct fabric_contract::contract::ContractManager\\\">ContractManager</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::contractmanager::ContractManager\"]},{\"text\":\"impl Freeze for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.TransactionFn.html\\\" title=\\\"struct fabric_contract::prelude::TransactionFn\\\">TransactionFn</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::transaction::TransactionFn\"]},{\"text\":\"impl Freeze for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.TransactionFnBuilder.html\\\" title=\\\"struct fabric_contract::prelude::TransactionFnBuilder\\\">TransactionFnBuilder</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::transaction::TransactionFnBuilder\"]},{\"text\":\"impl Freeze for <a class=\\\"struct\\\" href=\\\"fabric_contract/data/struct.TypeSchema.html\\\" title=\\\"struct fabric_contract::data::TypeSchema\\\">TypeSchema</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::dataapi::typeschema::TypeSchema\"]},{\"text\":\"impl Freeze for <a class=\\\"struct\\\" href=\\\"fabric_contract/data/struct.WireBuffer.html\\\" title=\\\"struct fabric_contract::data::WireBuffer\\\">WireBuffer</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::dataapi::wirebuffer::WireBuffer\"]},{\"text\":\"impl Freeze for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.ContractError.html\\\" title=\\\"struct fabric_contract::contract::ContractError\\\">ContractError</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::error::ContractError\"]},{\"text\":\"impl Freeze for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.LedgerError.html\\\" title=\\\"struct fabric_contract::contract::LedgerError\\\">LedgerError</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::error::LedgerError\"]},{\"text\":\"impl Freeze for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.Collection.html\\\" title=\\\"struct fabric_contract::contract::Collection\\\">Collection</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::collection::Collection\"]},{\"text\":\"impl Freeze for <a class=\\\"enum\\\" href=\\\"fabric_contract/contract/enum.CollectionName.html\\\" title=\\\"enum fabric_contract::contract::CollectionName\\\">CollectionName</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::collection::CollectionName\"]},{\"text\":\"impl Freeze for <a class=\\\"enum\\\" href=\\\"fabric_contract/contract/enum.KeyQueryHandler.html\\\" title=\\\"enum fabric_contract::contract::KeyQueryHandler\\\">KeyQueryHandler</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::collection::KeyQueryHandler\"]},{\"text\":\"impl Freeze for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.Ledger.html\\\" title=\\\"struct fabric_contract::contract::Ledger\\\">Ledger</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::ledger::Ledger\"]},{\"text\":\"impl Freeze for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.State.html\\\" title=\\\"struct fabric_contract::contract::State\\\">State</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::state::State\"]},{\"text\":\"impl Freeze for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.StateQueryList.html\\\" title=\\\"struct fabric_contract::contract::StateQueryList\\\">StateQueryList</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::statequerylist::StateQueryList\"]}];\n", "file_path": "docs/apidoc/implementors/core/marker/trait.Freeze.js", "rank": 46, "score": 24172.533765402426 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"asset_transfer_rs\"] = [{\"text\":\"impl&lt;'de&gt; <a class=\\\"trait\\\" href=\\\"https://docs.rs/serde/1.0.115/serde/de/trait.Deserialize.html\\\" title=\\\"trait serde::de::Deserialize\\\">Deserialize</a>&lt;'de&gt; for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.Asset.html\\\" title=\\\"struct asset_transfer_rs::Asset\\\">Asset</a>\",\"synthetic\":false,\"types\":[\"asset_transfer_rs::types::asset::Asset\"]}];\n", "file_path": "docs/apidoc/implementors/serde/de/trait.Deserialize.js", "rank": 47, "score": 24172.533765402426 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"fabric_contract\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://docs.rs/prost/0.6.1/prost/message/trait.Message.html\\\" title=\\\"trait prost::message::Message\\\">Message</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/runtime/runtime/items/struct.Arguments.html\\\" title=\\\"struct fabric_contract::runtime::runtime::items::Arguments\\\">Arguments</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::runtime::runtime::items::Arguments\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://docs.rs/prost/0.6.1/prost/message/trait.Message.html\\\" title=\\\"trait prost::message::Message\\\">Message</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/runtime/runtime/items/struct.Return.html\\\" title=\\\"struct fabric_contract::runtime::runtime::items::Return\\\">Return</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::runtime::runtime::items::Return\"]}];\n", "file_path": "docs/apidoc/implementors/prost/message/trait.Message.js", "rank": 48, "score": 24172.533765402426 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"fabric_contract\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/fmt/trait.Display.html\\\" title=\\\"trait core::fmt::Display\\\">Display</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.TransactionFn.html\\\" title=\\\"struct fabric_contract::prelude::TransactionFn\\\">TransactionFn</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::contractapi::transaction::TransactionFn\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/fmt/trait.Display.html\\\" title=\\\"trait core::fmt::Display\\\">Display</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/data/struct.TypeSchema.html\\\" title=\\\"struct fabric_contract::data::TypeSchema\\\">TypeSchema</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::dataapi::typeschema::TypeSchema\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/fmt/trait.Display.html\\\" title=\\\"trait core::fmt::Display\\\">Display</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.ContractError.html\\\" title=\\\"struct fabric_contract::contract::ContractError\\\">ContractError</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::error::ContractError\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/fmt/trait.Display.html\\\" title=\\\"trait core::fmt::Display\\\">Display</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.LedgerError.html\\\" title=\\\"struct fabric_contract::contract::LedgerError\\\">LedgerError</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::error::LedgerError\"]}];\n", "file_path": "docs/apidoc/implementors/core/fmt/trait.Display.js", "rank": 49, "score": 24172.533765402426 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"asset_transfer_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://docs.rs/serde/1.0.115/serde/ser/trait.Serialize.html\\\" title=\\\"trait serde::ser::Serialize\\\">Serialize</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.Asset.html\\\" title=\\\"struct asset_transfer_rs::Asset\\\">Asset</a>\",\"synthetic\":false,\"types\":[\"asset_transfer_rs::types::asset::Asset\"]}];\n", "file_path": "docs/apidoc/implementors/serde/ser/trait.Serialize.js", "rank": 50, "score": 24172.533765402426 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"fabric_contract\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/error/trait.Error.html\\\" title=\\\"trait std::error::Error\\\">Error</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.ContractError.html\\\" title=\\\"struct fabric_contract::contract::ContractError\\\">ContractError</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::error::ContractError\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/error/trait.Error.html\\\" title=\\\"trait std::error::Error\\\">Error</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.LedgerError.html\\\" title=\\\"struct fabric_contract::contract::LedgerError\\\">LedgerError</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::error::LedgerError\"]}];\n", "file_path": "docs/apidoc/implementors/std/error/trait.Error.js", "rank": 51, "score": 24172.533765402426 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"asset_transfer_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Sync.html\\\" title=\\\"trait core::marker::Sync\\\">Sync</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.AssetTransfer.html\\\" title=\\\"struct asset_transfer_rs::AssetTransfer\\\">AssetTransfer</a>\",\"synthetic\":true,\"types\":[\"asset_transfer_rs::contracts::assettransfer::AssetTransfer\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Sync.html\\\" title=\\\"trait core::marker::Sync\\\">Sync</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.AssetTransferQuery.html\\\" title=\\\"struct asset_transfer_rs::AssetTransferQuery\\\">AssetTransferQuery</a>\",\"synthetic\":true,\"types\":[\"asset_transfer_rs::contracts::assettransferquery::AssetTransferQuery\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Sync.html\\\" title=\\\"trait core::marker::Sync\\\">Sync</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.Asset.html\\\" title=\\\"struct asset_transfer_rs::Asset\\\">Asset</a>\",\"synthetic\":true,\"types\":[\"asset_transfer_rs::types::asset::Asset\"]}];\n\nimplementors[\"basic_contract_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Sync.html\\\" title=\\\"trait core::marker::Sync\\\">Sync</a> for <a class=\\\"struct\\\" href=\\\"basic_contract_rs/struct.AssetContract.html\\\" title=\\\"struct basic_contract_rs::AssetContract\\\">AssetContract</a>\",\"synthetic\":true,\"types\":[\"basic_contract_rs::contracts::assetcontract::AssetContract\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Sync.html\\\" title=\\\"trait core::marker::Sync\\\">Sync</a> for <a class=\\\"struct\\\" href=\\\"basic_contract_rs/struct.MyAsset.html\\\" title=\\\"struct basic_contract_rs::MyAsset\\\">MyAsset</a>\",\"synthetic\":true,\"types\":[\"basic_contract_rs::types::myasset::MyAsset\"]}];\n\nimplementors[\"fabric_contract\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Sync.html\\\" title=\\\"trait core::marker::Sync\\\">Sync</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/blockchain/struct.ClientIdentity.html\\\" title=\\\"struct fabric_contract::blockchain::ClientIdentity\\\">ClientIdentity</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::blockchainapi::clientidentity::ClientIdentity\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Sync.html\\\" title=\\\"trait core::marker::Sync\\\">Sync</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/blockchain/struct.Transaction.html\\\" title=\\\"struct fabric_contract::blockchain::Transaction\\\">Transaction</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::blockchainapi::transaction::Transaction\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Sync.html\\\" title=\\\"trait core::marker::Sync\\\">Sync</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.Context.html\\\" title=\\\"struct fabric_contract::contract::Context\\\">Context</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::context::Context\"]},{\"text\":\"impl !<a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Sync.html\\\" title=\\\"trait core::marker::Sync\\\">Sync</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.ContractDefn.html\\\" title=\\\"struct fabric_contract::prelude::ContractDefn\\\">ContractDefn</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::contractdefn::ContractDefn\"]},{\"text\":\"impl !<a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Sync.html\\\" title=\\\"trait core::marker::Sync\\\">Sync</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.ContractManager.html\\\" title=\\\"struct fabric_contract::contract::ContractManager\\\">ContractManager</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::contractmanager::ContractManager\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Sync.html\\\" title=\\\"trait core::marker::Sync\\\">Sync</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.TransactionFn.html\\\" title=\\\"struct fabric_contract::prelude::TransactionFn\\\">TransactionFn</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::transaction::TransactionFn\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Sync.html\\\" title=\\\"trait core::marker::Sync\\\">Sync</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.TransactionFnBuilder.html\\\" title=\\\"struct fabric_contract::prelude::TransactionFnBuilder\\\">TransactionFnBuilder</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::transaction::TransactionFnBuilder\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Sync.html\\\" title=\\\"trait core::marker::Sync\\\">Sync</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/data/struct.TypeSchema.html\\\" title=\\\"struct fabric_contract::data::TypeSchema\\\">TypeSchema</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::dataapi::typeschema::TypeSchema\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Sync.html\\\" title=\\\"trait core::marker::Sync\\\">Sync</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/data/struct.WireBuffer.html\\\" title=\\\"struct fabric_contract::data::WireBuffer\\\">WireBuffer</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::dataapi::wirebuffer::WireBuffer\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Sync.html\\\" title=\\\"trait core::marker::Sync\\\">Sync</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.ContractError.html\\\" title=\\\"struct fabric_contract::contract::ContractError\\\">ContractError</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::error::ContractError\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Sync.html\\\" title=\\\"trait core::marker::Sync\\\">Sync</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.LedgerError.html\\\" title=\\\"struct fabric_contract::contract::LedgerError\\\">LedgerError</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::error::LedgerError\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Sync.html\\\" title=\\\"trait core::marker::Sync\\\">Sync</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.Collection.html\\\" title=\\\"struct fabric_contract::contract::Collection\\\">Collection</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::collection::Collection\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Sync.html\\\" title=\\\"trait core::marker::Sync\\\">Sync</a> for <a class=\\\"enum\\\" href=\\\"fabric_contract/contract/enum.CollectionName.html\\\" title=\\\"enum fabric_contract::contract::CollectionName\\\">CollectionName</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::collection::CollectionName\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Sync.html\\\" title=\\\"trait core::marker::Sync\\\">Sync</a> for <a class=\\\"enum\\\" href=\\\"fabric_contract/contract/enum.KeyQueryHandler.html\\\" title=\\\"enum fabric_contract::contract::KeyQueryHandler\\\">KeyQueryHandler</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::collection::KeyQueryHandler\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Sync.html\\\" title=\\\"trait core::marker::Sync\\\">Sync</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.Ledger.html\\\" title=\\\"struct fabric_contract::contract::Ledger\\\">Ledger</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::ledger::Ledger\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Sync.html\\\" title=\\\"trait core::marker::Sync\\\">Sync</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.State.html\\\" title=\\\"struct fabric_contract::contract::State\\\">State</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::state::State\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Sync.html\\\" title=\\\"trait core::marker::Sync\\\">Sync</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.StateQueryList.html\\\" title=\\\"struct fabric_contract::contract::StateQueryList\\\">StateQueryList</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::statequerylist::StateQueryList\"]}];\n", "file_path": "docs/apidoc/implementors/core/marker/trait.Sync.js", "rank": 52, "score": 24172.533765402426 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"fabric_contract\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Copy.html\\\" title=\\\"trait core::marker::Copy\\\">Copy</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/data/struct.TypeSchema.html\\\" title=\\\"struct fabric_contract::data::TypeSchema\\\">TypeSchema</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::dataapi::typeschema::TypeSchema\"]}];\n", "file_path": "docs/apidoc/implementors/core/marker/trait.Copy.js", "rank": 53, "score": 24172.533765402426 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"asset_transfer_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/default/trait.Default.html\\\" title=\\\"trait core::default::Default\\\">Default</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.Asset.html\\\" title=\\\"struct asset_transfer_rs::Asset\\\">Asset</a>\",\"synthetic\":false,\"types\":[\"asset_transfer_rs::types::asset::Asset\"]}];\n\nimplementors[\"basic_contract_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/default/trait.Default.html\\\" title=\\\"trait core::default::Default\\\">Default</a> for <a class=\\\"struct\\\" href=\\\"basic_contract_rs/struct.MyAsset.html\\\" title=\\\"struct basic_contract_rs::MyAsset\\\">MyAsset</a>\",\"synthetic\":false,\"types\":[\"basic_contract_rs::types::myasset::MyAsset\"]}];\n\nimplementors[\"fabric_contract\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/default/trait.Default.html\\\" title=\\\"trait core::default::Default\\\">Default</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.Context.html\\\" title=\\\"struct fabric_contract::contract::Context\\\">Context</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::contractapi::context::Context\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/default/trait.Default.html\\\" title=\\\"trait core::default::Default\\\">Default</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.TransactionFn.html\\\" title=\\\"struct fabric_contract::prelude::TransactionFn\\\">TransactionFn</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::contractapi::transaction::TransactionFn\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/default/trait.Default.html\\\" title=\\\"trait core::default::Default\\\">Default</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.TransactionFnBuilder.html\\\" title=\\\"struct fabric_contract::prelude::TransactionFnBuilder\\\">TransactionFnBuilder</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::contractapi::transaction::TransactionFnBuilder\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/default/trait.Default.html\\\" title=\\\"trait core::default::Default\\\">Default</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/data/struct.TypeSchema.html\\\" title=\\\"struct fabric_contract::data::TypeSchema\\\">TypeSchema</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::dataapi::typeschema::TypeSchema\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/default/trait.Default.html\\\" title=\\\"trait core::default::Default\\\">Default</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.State.html\\\" title=\\\"struct fabric_contract::contract::State\\\">State</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::ledgerapi::state::State\"]}];\n", "file_path": "docs/apidoc/implementors/core/default/trait.Default.js", "rank": 54, "score": 24172.533765402426 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"asset_transfer_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Send.html\\\" title=\\\"trait core::marker::Send\\\">Send</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.AssetTransfer.html\\\" title=\\\"struct asset_transfer_rs::AssetTransfer\\\">AssetTransfer</a>\",\"synthetic\":true,\"types\":[\"asset_transfer_rs::contracts::assettransfer::AssetTransfer\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Send.html\\\" title=\\\"trait core::marker::Send\\\">Send</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.AssetTransferQuery.html\\\" title=\\\"struct asset_transfer_rs::AssetTransferQuery\\\">AssetTransferQuery</a>\",\"synthetic\":true,\"types\":[\"asset_transfer_rs::contracts::assettransferquery::AssetTransferQuery\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Send.html\\\" title=\\\"trait core::marker::Send\\\">Send</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.Asset.html\\\" title=\\\"struct asset_transfer_rs::Asset\\\">Asset</a>\",\"synthetic\":true,\"types\":[\"asset_transfer_rs::types::asset::Asset\"]}];\n\nimplementors[\"basic_contract_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Send.html\\\" title=\\\"trait core::marker::Send\\\">Send</a> for <a class=\\\"struct\\\" href=\\\"basic_contract_rs/struct.AssetContract.html\\\" title=\\\"struct basic_contract_rs::AssetContract\\\">AssetContract</a>\",\"synthetic\":true,\"types\":[\"basic_contract_rs::contracts::assetcontract::AssetContract\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Send.html\\\" title=\\\"trait core::marker::Send\\\">Send</a> for <a class=\\\"struct\\\" href=\\\"basic_contract_rs/struct.MyAsset.html\\\" title=\\\"struct basic_contract_rs::MyAsset\\\">MyAsset</a>\",\"synthetic\":true,\"types\":[\"basic_contract_rs::types::myasset::MyAsset\"]}];\n\nimplementors[\"fabric_contract\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Send.html\\\" title=\\\"trait core::marker::Send\\\">Send</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/blockchain/struct.ClientIdentity.html\\\" title=\\\"struct fabric_contract::blockchain::ClientIdentity\\\">ClientIdentity</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::blockchainapi::clientidentity::ClientIdentity\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Send.html\\\" title=\\\"trait core::marker::Send\\\">Send</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/blockchain/struct.Transaction.html\\\" title=\\\"struct fabric_contract::blockchain::Transaction\\\">Transaction</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::blockchainapi::transaction::Transaction\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Send.html\\\" title=\\\"trait core::marker::Send\\\">Send</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.Context.html\\\" title=\\\"struct fabric_contract::contract::Context\\\">Context</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::context::Context\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Send.html\\\" title=\\\"trait core::marker::Send\\\">Send</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.ContractDefn.html\\\" title=\\\"struct fabric_contract::prelude::ContractDefn\\\">ContractDefn</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::contractdefn::ContractDefn\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Send.html\\\" title=\\\"trait core::marker::Send\\\">Send</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.ContractManager.html\\\" title=\\\"struct fabric_contract::contract::ContractManager\\\">ContractManager</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::contractmanager::ContractManager\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Send.html\\\" title=\\\"trait core::marker::Send\\\">Send</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.TransactionFn.html\\\" title=\\\"struct fabric_contract::prelude::TransactionFn\\\">TransactionFn</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::transaction::TransactionFn\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Send.html\\\" title=\\\"trait core::marker::Send\\\">Send</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.TransactionFnBuilder.html\\\" title=\\\"struct fabric_contract::prelude::TransactionFnBuilder\\\">TransactionFnBuilder</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::transaction::TransactionFnBuilder\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Send.html\\\" title=\\\"trait core::marker::Send\\\">Send</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/data/struct.TypeSchema.html\\\" title=\\\"struct fabric_contract::data::TypeSchema\\\">TypeSchema</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::dataapi::typeschema::TypeSchema\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Send.html\\\" title=\\\"trait core::marker::Send\\\">Send</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/data/struct.WireBuffer.html\\\" title=\\\"struct fabric_contract::data::WireBuffer\\\">WireBuffer</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::dataapi::wirebuffer::WireBuffer\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Send.html\\\" title=\\\"trait core::marker::Send\\\">Send</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.ContractError.html\\\" title=\\\"struct fabric_contract::contract::ContractError\\\">ContractError</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::error::ContractError\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Send.html\\\" title=\\\"trait core::marker::Send\\\">Send</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.LedgerError.html\\\" title=\\\"struct fabric_contract::contract::LedgerError\\\">LedgerError</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::error::LedgerError\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Send.html\\\" title=\\\"trait core::marker::Send\\\">Send</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.Collection.html\\\" title=\\\"struct fabric_contract::contract::Collection\\\">Collection</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::collection::Collection\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Send.html\\\" title=\\\"trait core::marker::Send\\\">Send</a> for <a class=\\\"enum\\\" href=\\\"fabric_contract/contract/enum.CollectionName.html\\\" title=\\\"enum fabric_contract::contract::CollectionName\\\">CollectionName</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::collection::CollectionName\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Send.html\\\" title=\\\"trait core::marker::Send\\\">Send</a> for <a class=\\\"enum\\\" href=\\\"fabric_contract/contract/enum.KeyQueryHandler.html\\\" title=\\\"enum fabric_contract::contract::KeyQueryHandler\\\">KeyQueryHandler</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::collection::KeyQueryHandler\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Send.html\\\" title=\\\"trait core::marker::Send\\\">Send</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.Ledger.html\\\" title=\\\"struct fabric_contract::contract::Ledger\\\">Ledger</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::ledger::Ledger\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Send.html\\\" title=\\\"trait core::marker::Send\\\">Send</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.State.html\\\" title=\\\"struct fabric_contract::contract::State\\\">State</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::state::State\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Send.html\\\" title=\\\"trait core::marker::Send\\\">Send</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.StateQueryList.html\\\" title=\\\"struct fabric_contract::contract::StateQueryList\\\">StateQueryList</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::statequerylist::StateQueryList\"]}];\n", "file_path": "docs/apidoc/implementors/core/marker/trait.Send.js", "rank": 55, "score": 24172.533765402426 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"fabric_contract\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/clone/trait.Clone.html\\\" title=\\\"trait core::clone::Clone\\\">Clone</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.Context.html\\\" title=\\\"struct fabric_contract::contract::Context\\\">Context</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::contractapi::context::Context\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/clone/trait.Clone.html\\\" title=\\\"trait core::clone::Clone\\\">Clone</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.TransactionFn.html\\\" title=\\\"struct fabric_contract::prelude::TransactionFn\\\">TransactionFn</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::contractapi::transaction::TransactionFn\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/clone/trait.Clone.html\\\" title=\\\"trait core::clone::Clone\\\">Clone</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.TransactionFnBuilder.html\\\" title=\\\"struct fabric_contract::prelude::TransactionFnBuilder\\\">TransactionFnBuilder</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::contractapi::transaction::TransactionFnBuilder\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/clone/trait.Clone.html\\\" title=\\\"trait core::clone::Clone\\\">Clone</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/data/struct.TypeSchema.html\\\" title=\\\"struct fabric_contract::data::TypeSchema\\\">TypeSchema</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::dataapi::typeschema::TypeSchema\"]}];\n", "file_path": "docs/apidoc/implementors/core/clone/trait.Clone.js", "rank": 56, "score": 24172.533765402426 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"asset_transfer_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Unpin.html\\\" title=\\\"trait core::marker::Unpin\\\">Unpin</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.AssetTransfer.html\\\" title=\\\"struct asset_transfer_rs::AssetTransfer\\\">AssetTransfer</a>\",\"synthetic\":true,\"types\":[\"asset_transfer_rs::contracts::assettransfer::AssetTransfer\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Unpin.html\\\" title=\\\"trait core::marker::Unpin\\\">Unpin</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.AssetTransferQuery.html\\\" title=\\\"struct asset_transfer_rs::AssetTransferQuery\\\">AssetTransferQuery</a>\",\"synthetic\":true,\"types\":[\"asset_transfer_rs::contracts::assettransferquery::AssetTransferQuery\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Unpin.html\\\" title=\\\"trait core::marker::Unpin\\\">Unpin</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.Asset.html\\\" title=\\\"struct asset_transfer_rs::Asset\\\">Asset</a>\",\"synthetic\":true,\"types\":[\"asset_transfer_rs::types::asset::Asset\"]}];\n\nimplementors[\"basic_contract_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Unpin.html\\\" title=\\\"trait core::marker::Unpin\\\">Unpin</a> for <a class=\\\"struct\\\" href=\\\"basic_contract_rs/struct.AssetContract.html\\\" title=\\\"struct basic_contract_rs::AssetContract\\\">AssetContract</a>\",\"synthetic\":true,\"types\":[\"basic_contract_rs::contracts::assetcontract::AssetContract\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Unpin.html\\\" title=\\\"trait core::marker::Unpin\\\">Unpin</a> for <a class=\\\"struct\\\" href=\\\"basic_contract_rs/struct.MyAsset.html\\\" title=\\\"struct basic_contract_rs::MyAsset\\\">MyAsset</a>\",\"synthetic\":true,\"types\":[\"basic_contract_rs::types::myasset::MyAsset\"]}];\n\nimplementors[\"fabric_contract\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Unpin.html\\\" title=\\\"trait core::marker::Unpin\\\">Unpin</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/blockchain/struct.ClientIdentity.html\\\" title=\\\"struct fabric_contract::blockchain::ClientIdentity\\\">ClientIdentity</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::blockchainapi::clientidentity::ClientIdentity\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Unpin.html\\\" title=\\\"trait core::marker::Unpin\\\">Unpin</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/blockchain/struct.Transaction.html\\\" title=\\\"struct fabric_contract::blockchain::Transaction\\\">Transaction</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::blockchainapi::transaction::Transaction\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Unpin.html\\\" title=\\\"trait core::marker::Unpin\\\">Unpin</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.Context.html\\\" title=\\\"struct fabric_contract::contract::Context\\\">Context</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::context::Context\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Unpin.html\\\" title=\\\"trait core::marker::Unpin\\\">Unpin</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.ContractDefn.html\\\" title=\\\"struct fabric_contract::prelude::ContractDefn\\\">ContractDefn</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::contractdefn::ContractDefn\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Unpin.html\\\" title=\\\"trait core::marker::Unpin\\\">Unpin</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.ContractManager.html\\\" title=\\\"struct fabric_contract::contract::ContractManager\\\">ContractManager</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::contractmanager::ContractManager\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Unpin.html\\\" title=\\\"trait core::marker::Unpin\\\">Unpin</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.TransactionFn.html\\\" title=\\\"struct fabric_contract::prelude::TransactionFn\\\">TransactionFn</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::transaction::TransactionFn\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Unpin.html\\\" title=\\\"trait core::marker::Unpin\\\">Unpin</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.TransactionFnBuilder.html\\\" title=\\\"struct fabric_contract::prelude::TransactionFnBuilder\\\">TransactionFnBuilder</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::transaction::TransactionFnBuilder\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Unpin.html\\\" title=\\\"trait core::marker::Unpin\\\">Unpin</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/data/struct.TypeSchema.html\\\" title=\\\"struct fabric_contract::data::TypeSchema\\\">TypeSchema</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::dataapi::typeschema::TypeSchema\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Unpin.html\\\" title=\\\"trait core::marker::Unpin\\\">Unpin</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/data/struct.WireBuffer.html\\\" title=\\\"struct fabric_contract::data::WireBuffer\\\">WireBuffer</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::dataapi::wirebuffer::WireBuffer\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Unpin.html\\\" title=\\\"trait core::marker::Unpin\\\">Unpin</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.ContractError.html\\\" title=\\\"struct fabric_contract::contract::ContractError\\\">ContractError</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::error::ContractError\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Unpin.html\\\" title=\\\"trait core::marker::Unpin\\\">Unpin</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.LedgerError.html\\\" title=\\\"struct fabric_contract::contract::LedgerError\\\">LedgerError</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::error::LedgerError\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Unpin.html\\\" title=\\\"trait core::marker::Unpin\\\">Unpin</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.Collection.html\\\" title=\\\"struct fabric_contract::contract::Collection\\\">Collection</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::collection::Collection\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Unpin.html\\\" title=\\\"trait core::marker::Unpin\\\">Unpin</a> for <a class=\\\"enum\\\" href=\\\"fabric_contract/contract/enum.CollectionName.html\\\" title=\\\"enum fabric_contract::contract::CollectionName\\\">CollectionName</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::collection::CollectionName\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Unpin.html\\\" title=\\\"trait core::marker::Unpin\\\">Unpin</a> for <a class=\\\"enum\\\" href=\\\"fabric_contract/contract/enum.KeyQueryHandler.html\\\" title=\\\"enum fabric_contract::contract::KeyQueryHandler\\\">KeyQueryHandler</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::collection::KeyQueryHandler\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Unpin.html\\\" title=\\\"trait core::marker::Unpin\\\">Unpin</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.Ledger.html\\\" title=\\\"struct fabric_contract::contract::Ledger\\\">Ledger</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::ledger::Ledger\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Unpin.html\\\" title=\\\"trait core::marker::Unpin\\\">Unpin</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.State.html\\\" title=\\\"struct fabric_contract::contract::State\\\">State</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::state::State\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.Unpin.html\\\" title=\\\"trait core::marker::Unpin\\\">Unpin</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.StateQueryList.html\\\" title=\\\"struct fabric_contract::contract::StateQueryList\\\">StateQueryList</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::statequerylist::StateQueryList\"]}];\n", "file_path": "docs/apidoc/implementors/core/marker/trait.Unpin.js", "rank": 57, "score": 24172.533765402426 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"asset_transfer_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.UnwindSafe.html\\\" title=\\\"trait std::panic::UnwindSafe\\\">UnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.AssetTransfer.html\\\" title=\\\"struct asset_transfer_rs::AssetTransfer\\\">AssetTransfer</a>\",\"synthetic\":true,\"types\":[\"asset_transfer_rs::contracts::assettransfer::AssetTransfer\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.UnwindSafe.html\\\" title=\\\"trait std::panic::UnwindSafe\\\">UnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.AssetTransferQuery.html\\\" title=\\\"struct asset_transfer_rs::AssetTransferQuery\\\">AssetTransferQuery</a>\",\"synthetic\":true,\"types\":[\"asset_transfer_rs::contracts::assettransferquery::AssetTransferQuery\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.UnwindSafe.html\\\" title=\\\"trait std::panic::UnwindSafe\\\">UnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.Asset.html\\\" title=\\\"struct asset_transfer_rs::Asset\\\">Asset</a>\",\"synthetic\":true,\"types\":[\"asset_transfer_rs::types::asset::Asset\"]}];\n\nimplementors[\"basic_contract_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.UnwindSafe.html\\\" title=\\\"trait std::panic::UnwindSafe\\\">UnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"basic_contract_rs/struct.AssetContract.html\\\" title=\\\"struct basic_contract_rs::AssetContract\\\">AssetContract</a>\",\"synthetic\":true,\"types\":[\"basic_contract_rs::contracts::assetcontract::AssetContract\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.UnwindSafe.html\\\" title=\\\"trait std::panic::UnwindSafe\\\">UnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"basic_contract_rs/struct.MyAsset.html\\\" title=\\\"struct basic_contract_rs::MyAsset\\\">MyAsset</a>\",\"synthetic\":true,\"types\":[\"basic_contract_rs::types::myasset::MyAsset\"]}];\n\nimplementors[\"fabric_contract\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.UnwindSafe.html\\\" title=\\\"trait std::panic::UnwindSafe\\\">UnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/blockchain/struct.ClientIdentity.html\\\" title=\\\"struct fabric_contract::blockchain::ClientIdentity\\\">ClientIdentity</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::blockchainapi::clientidentity::ClientIdentity\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.UnwindSafe.html\\\" title=\\\"trait std::panic::UnwindSafe\\\">UnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/blockchain/struct.Transaction.html\\\" title=\\\"struct fabric_contract::blockchain::Transaction\\\">Transaction</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::blockchainapi::transaction::Transaction\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.UnwindSafe.html\\\" title=\\\"trait std::panic::UnwindSafe\\\">UnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.Context.html\\\" title=\\\"struct fabric_contract::contract::Context\\\">Context</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::context::Context\"]},{\"text\":\"impl !<a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.UnwindSafe.html\\\" title=\\\"trait std::panic::UnwindSafe\\\">UnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.ContractDefn.html\\\" title=\\\"struct fabric_contract::prelude::ContractDefn\\\">ContractDefn</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::contractdefn::ContractDefn\"]},{\"text\":\"impl !<a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.UnwindSafe.html\\\" title=\\\"trait std::panic::UnwindSafe\\\">UnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.ContractManager.html\\\" title=\\\"struct fabric_contract::contract::ContractManager\\\">ContractManager</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::contractmanager::ContractManager\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.UnwindSafe.html\\\" title=\\\"trait std::panic::UnwindSafe\\\">UnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.TransactionFn.html\\\" title=\\\"struct fabric_contract::prelude::TransactionFn\\\">TransactionFn</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::transaction::TransactionFn\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.UnwindSafe.html\\\" title=\\\"trait std::panic::UnwindSafe\\\">UnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.TransactionFnBuilder.html\\\" title=\\\"struct fabric_contract::prelude::TransactionFnBuilder\\\">TransactionFnBuilder</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::transaction::TransactionFnBuilder\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.UnwindSafe.html\\\" title=\\\"trait std::panic::UnwindSafe\\\">UnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/data/struct.TypeSchema.html\\\" title=\\\"struct fabric_contract::data::TypeSchema\\\">TypeSchema</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::dataapi::typeschema::TypeSchema\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.UnwindSafe.html\\\" title=\\\"trait std::panic::UnwindSafe\\\">UnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/data/struct.WireBuffer.html\\\" title=\\\"struct fabric_contract::data::WireBuffer\\\">WireBuffer</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::dataapi::wirebuffer::WireBuffer\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.UnwindSafe.html\\\" title=\\\"trait std::panic::UnwindSafe\\\">UnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.ContractError.html\\\" title=\\\"struct fabric_contract::contract::ContractError\\\">ContractError</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::error::ContractError\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.UnwindSafe.html\\\" title=\\\"trait std::panic::UnwindSafe\\\">UnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.LedgerError.html\\\" title=\\\"struct fabric_contract::contract::LedgerError\\\">LedgerError</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::error::LedgerError\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.UnwindSafe.html\\\" title=\\\"trait std::panic::UnwindSafe\\\">UnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.Collection.html\\\" title=\\\"struct fabric_contract::contract::Collection\\\">Collection</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::collection::Collection\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.UnwindSafe.html\\\" title=\\\"trait std::panic::UnwindSafe\\\">UnwindSafe</a> for <a class=\\\"enum\\\" href=\\\"fabric_contract/contract/enum.CollectionName.html\\\" title=\\\"enum fabric_contract::contract::CollectionName\\\">CollectionName</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::collection::CollectionName\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.UnwindSafe.html\\\" title=\\\"trait std::panic::UnwindSafe\\\">UnwindSafe</a> for <a class=\\\"enum\\\" href=\\\"fabric_contract/contract/enum.KeyQueryHandler.html\\\" title=\\\"enum fabric_contract::contract::KeyQueryHandler\\\">KeyQueryHandler</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::collection::KeyQueryHandler\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.UnwindSafe.html\\\" title=\\\"trait std::panic::UnwindSafe\\\">UnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.Ledger.html\\\" title=\\\"struct fabric_contract::contract::Ledger\\\">Ledger</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::ledger::Ledger\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.UnwindSafe.html\\\" title=\\\"trait std::panic::UnwindSafe\\\">UnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.State.html\\\" title=\\\"struct fabric_contract::contract::State\\\">State</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::state::State\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.UnwindSafe.html\\\" title=\\\"trait std::panic::UnwindSafe\\\">UnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.StateQueryList.html\\\" title=\\\"struct fabric_contract::contract::StateQueryList\\\">StateQueryList</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::statequerylist::StateQueryList\"]}];\n", "file_path": "docs/apidoc/implementors/std/panic/trait.UnwindSafe.js", "rank": 58, "score": 23713.97321257857 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"fabric_contract\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/cmp/trait.PartialEq.html\\\" title=\\\"trait core::cmp::PartialEq\\\">PartialEq</a>&lt;<a class=\\\"struct\\\" href=\\\"fabric_contract/runtime/runtime/items/struct.Arguments.html\\\" title=\\\"struct fabric_contract::runtime::runtime::items::Arguments\\\">Arguments</a>&gt; for <a class=\\\"struct\\\" href=\\\"fabric_contract/runtime/runtime/items/struct.Arguments.html\\\" title=\\\"struct fabric_contract::runtime::runtime::items::Arguments\\\">Arguments</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::runtime::runtime::items::Arguments\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/cmp/trait.PartialEq.html\\\" title=\\\"trait core::cmp::PartialEq\\\">PartialEq</a>&lt;<a class=\\\"struct\\\" href=\\\"fabric_contract/runtime/runtime/items/struct.Return.html\\\" title=\\\"struct fabric_contract::runtime::runtime::items::Return\\\">Return</a>&gt; for <a class=\\\"struct\\\" href=\\\"fabric_contract/runtime/runtime/items/struct.Return.html\\\" title=\\\"struct fabric_contract::runtime::runtime::items::Return\\\">Return</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::runtime::runtime::items::Return\"]}];\n", "file_path": "docs/apidoc/implementors/core/cmp/trait.PartialEq.js", "rank": 59, "score": 23713.97321257857 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"asset_transfer_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.RefUnwindSafe.html\\\" title=\\\"trait std::panic::RefUnwindSafe\\\">RefUnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.AssetTransfer.html\\\" title=\\\"struct asset_transfer_rs::AssetTransfer\\\">AssetTransfer</a>\",\"synthetic\":true,\"types\":[\"asset_transfer_rs::contracts::assettransfer::AssetTransfer\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.RefUnwindSafe.html\\\" title=\\\"trait std::panic::RefUnwindSafe\\\">RefUnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.AssetTransferQuery.html\\\" title=\\\"struct asset_transfer_rs::AssetTransferQuery\\\">AssetTransferQuery</a>\",\"synthetic\":true,\"types\":[\"asset_transfer_rs::contracts::assettransferquery::AssetTransferQuery\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.RefUnwindSafe.html\\\" title=\\\"trait std::panic::RefUnwindSafe\\\">RefUnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.Asset.html\\\" title=\\\"struct asset_transfer_rs::Asset\\\">Asset</a>\",\"synthetic\":true,\"types\":[\"asset_transfer_rs::types::asset::Asset\"]}];\n\nimplementors[\"basic_contract_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.RefUnwindSafe.html\\\" title=\\\"trait std::panic::RefUnwindSafe\\\">RefUnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"basic_contract_rs/struct.AssetContract.html\\\" title=\\\"struct basic_contract_rs::AssetContract\\\">AssetContract</a>\",\"synthetic\":true,\"types\":[\"basic_contract_rs::contracts::assetcontract::AssetContract\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.RefUnwindSafe.html\\\" title=\\\"trait std::panic::RefUnwindSafe\\\">RefUnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"basic_contract_rs/struct.MyAsset.html\\\" title=\\\"struct basic_contract_rs::MyAsset\\\">MyAsset</a>\",\"synthetic\":true,\"types\":[\"basic_contract_rs::types::myasset::MyAsset\"]}];\n\nimplementors[\"fabric_contract\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.RefUnwindSafe.html\\\" title=\\\"trait std::panic::RefUnwindSafe\\\">RefUnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/blockchain/struct.ClientIdentity.html\\\" title=\\\"struct fabric_contract::blockchain::ClientIdentity\\\">ClientIdentity</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::blockchainapi::clientidentity::ClientIdentity\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.RefUnwindSafe.html\\\" title=\\\"trait std::panic::RefUnwindSafe\\\">RefUnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/blockchain/struct.Transaction.html\\\" title=\\\"struct fabric_contract::blockchain::Transaction\\\">Transaction</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::blockchainapi::transaction::Transaction\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.RefUnwindSafe.html\\\" title=\\\"trait std::panic::RefUnwindSafe\\\">RefUnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.Context.html\\\" title=\\\"struct fabric_contract::contract::Context\\\">Context</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::context::Context\"]},{\"text\":\"impl !<a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.RefUnwindSafe.html\\\" title=\\\"trait std::panic::RefUnwindSafe\\\">RefUnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.ContractDefn.html\\\" title=\\\"struct fabric_contract::prelude::ContractDefn\\\">ContractDefn</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::contractdefn::ContractDefn\"]},{\"text\":\"impl !<a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.RefUnwindSafe.html\\\" title=\\\"trait std::panic::RefUnwindSafe\\\">RefUnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.ContractManager.html\\\" title=\\\"struct fabric_contract::contract::ContractManager\\\">ContractManager</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::contractmanager::ContractManager\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.RefUnwindSafe.html\\\" title=\\\"trait std::panic::RefUnwindSafe\\\">RefUnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.TransactionFn.html\\\" title=\\\"struct fabric_contract::prelude::TransactionFn\\\">TransactionFn</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::transaction::TransactionFn\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.RefUnwindSafe.html\\\" title=\\\"trait std::panic::RefUnwindSafe\\\">RefUnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/prelude/struct.TransactionFnBuilder.html\\\" title=\\\"struct fabric_contract::prelude::TransactionFnBuilder\\\">TransactionFnBuilder</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::contractapi::transaction::TransactionFnBuilder\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.RefUnwindSafe.html\\\" title=\\\"trait std::panic::RefUnwindSafe\\\">RefUnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/data/struct.TypeSchema.html\\\" title=\\\"struct fabric_contract::data::TypeSchema\\\">TypeSchema</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::dataapi::typeschema::TypeSchema\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.RefUnwindSafe.html\\\" title=\\\"trait std::panic::RefUnwindSafe\\\">RefUnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/data/struct.WireBuffer.html\\\" title=\\\"struct fabric_contract::data::WireBuffer\\\">WireBuffer</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::dataapi::wirebuffer::WireBuffer\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.RefUnwindSafe.html\\\" title=\\\"trait std::panic::RefUnwindSafe\\\">RefUnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.ContractError.html\\\" title=\\\"struct fabric_contract::contract::ContractError\\\">ContractError</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::error::ContractError\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.RefUnwindSafe.html\\\" title=\\\"trait std::panic::RefUnwindSafe\\\">RefUnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.LedgerError.html\\\" title=\\\"struct fabric_contract::contract::LedgerError\\\">LedgerError</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::error::LedgerError\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.RefUnwindSafe.html\\\" title=\\\"trait std::panic::RefUnwindSafe\\\">RefUnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.Collection.html\\\" title=\\\"struct fabric_contract::contract::Collection\\\">Collection</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::collection::Collection\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.RefUnwindSafe.html\\\" title=\\\"trait std::panic::RefUnwindSafe\\\">RefUnwindSafe</a> for <a class=\\\"enum\\\" href=\\\"fabric_contract/contract/enum.CollectionName.html\\\" title=\\\"enum fabric_contract::contract::CollectionName\\\">CollectionName</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::collection::CollectionName\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.RefUnwindSafe.html\\\" title=\\\"trait std::panic::RefUnwindSafe\\\">RefUnwindSafe</a> for <a class=\\\"enum\\\" href=\\\"fabric_contract/contract/enum.KeyQueryHandler.html\\\" title=\\\"enum fabric_contract::contract::KeyQueryHandler\\\">KeyQueryHandler</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::collection::KeyQueryHandler\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.RefUnwindSafe.html\\\" title=\\\"trait std::panic::RefUnwindSafe\\\">RefUnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.Ledger.html\\\" title=\\\"struct fabric_contract::contract::Ledger\\\">Ledger</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::ledger::Ledger\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.RefUnwindSafe.html\\\" title=\\\"trait std::panic::RefUnwindSafe\\\">RefUnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.State.html\\\" title=\\\"struct fabric_contract::contract::State\\\">State</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::state::State\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/panic/trait.RefUnwindSafe.html\\\" title=\\\"trait std::panic::RefUnwindSafe\\\">RefUnwindSafe</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/contract/struct.StateQueryList.html\\\" title=\\\"struct fabric_contract::contract::StateQueryList\\\">StateQueryList</a>\",\"synthetic\":true,\"types\":[\"fabric_contract::ledgerapi::statequerylist::StateQueryList\"]}];\n", "file_path": "docs/apidoc/implementors/std/panic/trait.RefUnwindSafe.js", "rank": 60, "score": 23272.48683257784 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"asset_transfer_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"fabric_contract/contractapi/contract/trait.Metadata.html\\\" title=\\\"trait fabric_contract::contractapi::contract::Metadata\\\">Metadata</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.AssetTransfer.html\\\" title=\\\"struct asset_transfer_rs::AssetTransfer\\\">AssetTransfer</a>\",\"synthetic\":false,\"types\":[\"asset_transfer_rs::contracts::assettransfer::AssetTransfer\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"fabric_contract/contractapi/contract/trait.Metadata.html\\\" title=\\\"trait fabric_contract::contractapi::contract::Metadata\\\">Metadata</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.AssetTransferQuery.html\\\" title=\\\"struct asset_transfer_rs::AssetTransferQuery\\\">AssetTransferQuery</a>\",\"synthetic\":false,\"types\":[\"asset_transfer_rs::contracts::assettransferquery::AssetTransferQuery\"]}];\n\nimplementors[\"basic_contract_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"fabric_contract/contractapi/contract/trait.Metadata.html\\\" title=\\\"trait fabric_contract::contractapi::contract::Metadata\\\">Metadata</a> for <a class=\\\"struct\\\" href=\\\"basic_contract_rs/struct.AssetContract.html\\\" title=\\\"struct basic_contract_rs::AssetContract\\\">AssetContract</a>\",\"synthetic\":false,\"types\":[\"basic_contract_rs::contracts::assetcontract::AssetContract\"]}];\n", "file_path": "docs/apidoc/implementors/fabric_contract/contractapi/contract/trait.Metadata.js", "rank": 61, "score": 23272.48683257784 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"asset_transfer_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"fabric_contract/contractapi/contract/trait.Contract.html\\\" title=\\\"trait fabric_contract::contractapi::contract::Contract\\\">Contract</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.AssetTransfer.html\\\" title=\\\"struct asset_transfer_rs::AssetTransfer\\\">AssetTransfer</a>\",\"synthetic\":false,\"types\":[\"asset_transfer_rs::contracts::assettransfer::AssetTransfer\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"fabric_contract/contractapi/contract/trait.Contract.html\\\" title=\\\"trait fabric_contract::contractapi::contract::Contract\\\">Contract</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.AssetTransferQuery.html\\\" title=\\\"struct asset_transfer_rs::AssetTransferQuery\\\">AssetTransferQuery</a>\",\"synthetic\":false,\"types\":[\"asset_transfer_rs::contracts::assettransferquery::AssetTransferQuery\"]}];\n\nimplementors[\"basic_contract_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"fabric_contract/contractapi/contract/trait.Contract.html\\\" title=\\\"trait fabric_contract::contractapi::contract::Contract\\\">Contract</a> for <a class=\\\"struct\\\" href=\\\"basic_contract_rs/struct.AssetContract.html\\\" title=\\\"struct basic_contract_rs::AssetContract\\\">AssetContract</a>\",\"synthetic\":false,\"types\":[\"basic_contract_rs::contracts::assetcontract::AssetContract\"]}];\n", "file_path": "docs/apidoc/implementors/fabric_contract/contractapi/contract/trait.Contract.js", "rank": 62, "score": 23272.48683257784 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"fabric_contract\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.StructuralPartialEq.html\\\" title=\\\"trait core::marker::StructuralPartialEq\\\">StructuralPartialEq</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/runtime/runtime/items/struct.Arguments.html\\\" title=\\\"struct fabric_contract::runtime::runtime::items::Arguments\\\">Arguments</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::runtime::runtime::items::Arguments\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/marker/trait.StructuralPartialEq.html\\\" title=\\\"trait core::marker::StructuralPartialEq\\\">StructuralPartialEq</a> for <a class=\\\"struct\\\" href=\\\"fabric_contract/runtime/runtime/items/struct.Return.html\\\" title=\\\"struct fabric_contract::runtime::runtime::items::Return\\\">Return</a>\",\"synthetic\":false,\"types\":[\"fabric_contract::runtime::runtime::items::Return\"]}];\n", "file_path": "docs/apidoc/implementors/core/marker/trait.StructuralPartialEq.js", "rank": 63, "score": 23272.48683257784 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"asset_transfer_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"fabric_contract/contractapi/contract/trait.Routing.html\\\" title=\\\"trait fabric_contract::contractapi::contract::Routing\\\">Routing</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.AssetTransfer.html\\\" title=\\\"struct asset_transfer_rs::AssetTransfer\\\">AssetTransfer</a>\",\"synthetic\":false,\"types\":[\"asset_transfer_rs::contracts::assettransfer::AssetTransfer\"]},{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"fabric_contract/contractapi/contract/trait.Routing.html\\\" title=\\\"trait fabric_contract::contractapi::contract::Routing\\\">Routing</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.AssetTransferQuery.html\\\" title=\\\"struct asset_transfer_rs::AssetTransferQuery\\\">AssetTransferQuery</a>\",\"synthetic\":false,\"types\":[\"asset_transfer_rs::contracts::assettransferquery::AssetTransferQuery\"]}];\n\nimplementors[\"basic_contract_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"fabric_contract/contractapi/contract/trait.Routing.html\\\" title=\\\"trait fabric_contract::contractapi::contract::Routing\\\">Routing</a> for <a class=\\\"struct\\\" href=\\\"basic_contract_rs/struct.AssetContract.html\\\" title=\\\"struct basic_contract_rs::AssetContract\\\">AssetContract</a>\",\"synthetic\":false,\"types\":[\"basic_contract_rs::contracts::assetcontract::AssetContract\"]}];\n", "file_path": "docs/apidoc/implementors/fabric_contract/contractapi/contract/trait.Routing.js", "rank": 64, "score": 23272.48683257784 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"asset_transfer_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"fabric_contract/ledgerapi/datatype/trait.DataType.html\\\" title=\\\"trait fabric_contract::ledgerapi::datatype::DataType\\\">DataType</a> for <a class=\\\"struct\\\" href=\\\"asset_transfer_rs/struct.Asset.html\\\" title=\\\"struct asset_transfer_rs::Asset\\\">Asset</a>\",\"synthetic\":false,\"types\":[\"asset_transfer_rs::types::asset::Asset\"]}];\n\nimplementors[\"basic_contract_rs\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"fabric_contract/ledgerapi/datatype/trait.DataType.html\\\" title=\\\"trait fabric_contract::ledgerapi::datatype::DataType\\\">DataType</a> for <a class=\\\"struct\\\" href=\\\"basic_contract_rs/struct.MyAsset.html\\\" title=\\\"struct basic_contract_rs::MyAsset\\\">MyAsset</a>\",\"synthetic\":false,\"types\":[\"basic_contract_rs::types::myasset::MyAsset\"]}];\n", "file_path": "docs/apidoc/implementors/fabric_contract/ledgerapi/datatype/trait.DataType.js", "rank": 65, "score": 22847.13843769631 }, { "content": "\n\nimpl WireBufferFromReturnType<Asset> for WireBuffer {\n\n fn from_rt(self: &mut Self, s: Asset) {\n\n // we've got a wire buffer object and we need to set the bytes here from the string\n\n let json = serde_json::to_string(&s).unwrap();\n\n debug!(\"wire buffer returning the value {}\",json.as_str());\n\n let buffer = json.into_bytes();\n\n self.buffer = Some(buffer);\n\n }\n\n}\n\n\n\nimpl From<&WireBuffer> for Asset {\n\n fn from(wb: &WireBuffer) -> Self {\n\n match &wb.buffer {\n\n Some(buffer) => {\n\n match std::str::from_utf8(&buffer) {\n\n Ok(a) => serde_json::from_str(a).unwrap(),\n\n _ => unreachable!(),\n\n }\n\n }\n\n None => panic!(),\n\n }\n\n }\n\n}", "file_path": "asset_transfer_secure_private_rs/src/types/asset.rs", "rank": 66, "score": 26.452029680684284 }, { "content": " fn from_rt(self: &mut Self, s: TradeAgreement) {\n\n // we've got a wire buffer object and we need to set the bytes here from the string\n\n let json = serde_json::to_string(&s).unwrap();\n\n debug!(\"wire buffer returning the value {}\", json.as_str());\n\n let buffer = json.into_bytes();\n\n self.buffer = Some(buffer);\n\n }\n\n}\n\n\n\nimpl From<&WireBuffer> for TradeAgreement {\n\n fn from(wb: &WireBuffer) -> Self {\n\n match &wb.buffer {\n\n Some(buffer) => match std::str::from_utf8(&buffer) {\n\n Ok(a) => serde_json::from_str(a).unwrap(),\n\n _ => unreachable!(),\n\n },\n\n None => panic!(),\n\n }\n\n }\n\n}\n", "file_path": "asset_transfer_secure_private_rs/src/types/trade_agreement.rs", "rank": 67, "score": 26.310777624766306 }, { "content": "}\n\n\n\nimpl WireBufferFromReturnType<AssetPrivate> for WireBuffer {\n\n fn from_rt(self: &mut Self, s: AssetPrivate) {\n\n // we've got a wire buffer object and we need to set the bytes here from the string\n\n let json = serde_json::to_string(&s).unwrap();\n\n debug!(\"wire buffer returning the value {}\",json.as_str());\n\n let buffer = json.into_bytes();\n\n self.buffer = Some(buffer);\n\n }\n\n}\n\n\n\nimpl From<&WireBuffer> for AssetPrivate {\n\n fn from(wb: &WireBuffer) -> Self {\n\n match &wb.buffer {\n\n Some(buffer) => {\n\n match std::str::from_utf8(&buffer) {\n\n Ok(a) => serde_json::from_str(a).unwrap(),\n\n _ => unreachable!(),\n\n }\n\n }\n\n None => panic!(),\n\n }\n\n }\n\n}", "file_path": "asset_transfer_secure_private_rs/src/types/asset_private.rs", "rank": 68, "score": 26.06826185692731 }, { "content": "\n\nimpl WireBufferFromReturnType<TransferReceipt> for WireBuffer {\n\n fn from_rt(self: &mut Self, s: TransferReceipt) {\n\n // we've got a wire buffer object and we need to set the bytes here from the string\n\n let json = serde_json::to_string(&s).unwrap();\n\n debug!(\"wire buffer returning the value {}\", json.as_str());\n\n let buffer = json.into_bytes();\n\n self.buffer = Some(buffer);\n\n }\n\n}\n\n\n\nimpl From<&WireBuffer> for TransferReceipt {\n\n fn from(wb: &WireBuffer) -> Self {\n\n match &wb.buffer {\n\n Some(buffer) => match std::str::from_utf8(&buffer) {\n\n Ok(a) => serde_json::from_str(a).unwrap(),\n\n _ => unreachable!(),\n\n },\n\n None => panic!(),\n\n }\n\n }\n\n}\n", "file_path": "asset_transfer_secure_private_rs/src/types/transfer_receipt.rs", "rank": 69, "score": 26.06826185692731 }, { "content": "}\n\n\n\nimpl WireBufferFromReturnType<MyAsset> for WireBuffer {\n\n fn from_rt(self: &mut Self, s: MyAsset) {\n\n // we've got a wire buffer object and we need to set the bytes here from the string\n\n let json = serde_json::to_string(&s).unwrap();\n\n \n\n let buffer = json.into_bytes();\n\n self.buffer = Some(buffer);\n\n }\n\n}\n\n\n\nimpl From<&WireBuffer> for MyAsset {\n\n fn from(wb: &WireBuffer) -> Self {\n\n match &wb.buffer {\n\n Some(buffer) => {\n\n match std::str::from_utf8(&buffer) {\n\n Ok(a) => serde_json::from_str(a).unwrap(),\n\n _ => unreachable!(),\n\n }\n\n }\n\n None => panic!(),\n\n }\n\n }\n\n}\n", "file_path": "basic_contract_rs/src/types/myasset.rs", "rank": 70, "score": 24.95250277210546 }, { "content": " pub transient: bool,\n\n}\n\n\n\nimpl std::fmt::Display for ParameterDefn {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{:?} {:?}\", self.name, self.type_schema)\n\n }\n\n}\n\n\n\nimpl std::convert::From<&str> for ParameterDefn {\n\n fn from(tx: &str) -> Self {\n\n // parse out the contract_name here\n\n let type_name: String;\n\n let arg_name: String;\n\n match tx.find(':') {\n\n Some(s) => {\n\n arg_name = tx[..s - 1].to_string();\n\n type_name = tx[s + 2..].to_string();\n\n }\n\n None => panic!(\"Code is not correct\"),\n", "file_path": "fabric_contract/src/contractapi/transaction.rs", "rank": 71, "score": 24.58870102060363 }, { "content": " let json = serde_json::to_string(&s).unwrap();\n\n debug!(\"wire buffer returning the value {}\",json.as_str());\n\n let buffer = json.into_bytes();\n\n self.buffer = Some(buffer);\n\n }\n\n}\n\n\n\nimpl From<&WireBuffer> for Asset {\n\n fn from(wb: &WireBuffer) -> Self {\n\n match &wb.buffer {\n\n Some(buffer) => {\n\n match std::str::from_utf8(&buffer) {\n\n Ok(a) => serde_json::from_str(a).unwrap(),\n\n _ => unreachable!(),\n\n }\n\n }\n\n None => panic!(),\n\n }\n\n }\n\n}", "file_path": "asset_transfer_rs/src/types/asset.rs", "rank": 72, "score": 24.112414571952144 }, { "content": "}\n\n\n\nimpl fmt::Display for TransactionFn {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"TxFn <{}> {:?}\", self.name, self.parameters)\n\n }\n\n}\n\n\n\nimpl TransactionFn {\n\n pub fn new(name: &str) -> TransactionFn {\n\n TransactionFn {\n\n name: String::from(name),\n\n return_type: TypeSchema {\n\n contract_type: ContractType::CTString,\n\n format: Option::None,\n\n },\n\n parameters: vec![],\n\n }\n\n }\n\n\n", "file_path": "fabric_contract/src/contractapi/transaction.rs", "rank": 73, "score": 24.06374224530101 }, { "content": " // we've got a wire buffer object and we need to set the bytes here from the string\n\n let json = serde_json::to_string(&s).unwrap();\n\n debug!(\"wire buffer returning the value {}\",json.as_str());\n\n let buffer = json.into_bytes();\n\n self.buffer = Some(buffer);\n\n }\n\n}\n\n\n\nimpl From<&WireBuffer> for PriceAgreement {\n\n fn from(wb: &WireBuffer) -> Self {\n\n match &wb.buffer {\n\n Some(buffer) => {\n\n match std::str::from_utf8(&buffer) {\n\n Ok(a) => serde_json::from_str(a).unwrap(),\n\n _ => unreachable!(),\n\n }\n\n }\n\n None => panic!(),\n\n }\n\n }\n\n}", "file_path": "asset_transfer_secure_private_rs/src/types/price_agreement.rs", "rank": 74, "score": 23.66648704165213 }, { "content": "/*\n\n * SPDX-License-Identifier: Apache-2.0\n\n */\n\n\n\npub mod typeschema;\n\npub use typeschema::TypeSchema;\n\n\n\npub mod wirebuffer;\n\npub use wirebuffer::WireBuffer;\n\npub use wirebuffer::WireBufferFromReturnType;\n\n\n\npub mod serializer;\n\npub use serializer::Converter;\n", "file_path": "fabric_contract/src/dataapi.rs", "rank": 75, "score": 23.638115296098686 }, { "content": "/*\n\n * SPDX-License-Identifier: Apache-2.0\n\n */\n\nuse fabric_contract::contract::*;\n\nuse fabric_contract::data::*;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::str::from_utf8;\n\n\n\n// Use the log crate to support logging\n\nuse log::debug;\n\n\n\n#[derive(Serialize, Deserialize, Debug, Default)]\n\npub struct TradeAgreement {\n\n id: String,\n\n price: u32,\n\n trade_id: String,\n\n}\n\n\n\nimpl TradeAgreement {\n\n pub fn new(id: String, price: u32, trade_id: String) -> TradeAgreement {\n", "file_path": "asset_transfer_secure_private_rs/src/types/trade_agreement.rs", "rank": 76, "score": 22.171426873677724 }, { "content": "/*\n\n * SPDX-License-Identifier: Apache-2.0\n\n */\n\nuse fabric_contract::contract::*;\n\nuse fabric_contract::data::*;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::str::from_utf8;\n\n\n\n// Use the log crate to support logging\n\nuse log::{debug};\n\n\n\n\n\n#[derive(Serialize, Deserialize, Debug, Default)]\n\npub struct PriceAgreement {\n\n id: String,\n\n price: u32\n\n}\n\n\n\nimpl PriceAgreement {\n\n pub fn new(id: String, price: u32) -> PriceAgreement {\n", "file_path": "asset_transfer_secure_private_rs/src/types/price_agreement.rs", "rank": 77, "score": 22.13799581617637 }, { "content": "\n\n cd\n\n }\n\n\n\n pub fn add_tx_fn(self: &mut ContractDefn, tx: transaction::TransactionFn) {\n\n self.methods.insert(tx.get_name(), tx);\n\n }\n\n\n\n pub fn add_new_method(self: &mut ContractDefn, name: &str, func: fn(&str) -> bool) {\n\n let tx = transaction::TransactionFn::new(name);\n\n debug!(\"{:?}\", tx);\n\n self.methods.insert(String::from(name), tx);\n\n }\n\n\n\n pub fn get_txfn(\n\n self: &ContractDefn,\n\n name: &str,\n\n ) -> Result<&transaction::TransactionFn, String> {\n\n match self.methods.get(&String::from(name)) {\n\n Some(t) => Ok(t),\n", "file_path": "fabric_contract/src/contractapi/contractdefn.rs", "rank": 78, "score": 21.87759702149596 }, { "content": "impl TransactionFnBuilder {\n\n pub fn new() -> TransactionFnBuilder {\n\n TransactionFnBuilder::default()\n\n }\n\n\n\n pub fn name(&mut self, name: &str) {\n\n self.name = name.to_string();\n\n }\n\n\n\n pub fn return_type(&mut self, return_type: TypeSchema) {\n\n self.return_type = return_type;\n\n }\n\n\n\n pub fn add_arg(&mut self, arg: &str) {\n\n self.parameters.push(ParameterDefn::from(arg));\n\n }\n\n\n\n pub fn build(self) -> TransactionFn {\n\n TransactionFn {\n\n name: self.name,\n\n return_type: self.return_type,\n\n parameters: self.parameters,\n\n }\n\n }\n\n}\n", "file_path": "fabric_contract/src/contractapi/transaction.rs", "rank": 79, "score": 21.74207599927983 }, { "content": "/*\n\n * SPDX-License-Identifier: Apache-2.0\n\n */\n\nuse fabric_contract::contract::*;\n\nuse fabric_contract::{blockchain::Transaction, data::*};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::str::from_utf8;\n\n\n\n// Use the log crate to support logging\n\nuse log::debug;\n\n\n\n#[derive(Serialize, Deserialize, Debug, Default)]\n\npub struct TransferReceipt {\n\n id: String,\n\n txid: String,\n\n timestamp: String, \n\n}\n\n\n\nimpl TransferReceipt {\n\n pub fn new(id: String) -> Self {\n", "file_path": "asset_transfer_secure_private_rs/src/types/transfer_receipt.rs", "rank": 80, "score": 21.687995903617157 }, { "content": " p.type_schema, /*,Box::new(JSONConverter {})*/\n\n ));\n\n }\n\n\n\n let buffer = self.contract.route3(name, updated_args, txfn.get_return())?;\n\n debug!(\"Returned buffer {:?}\",&buffer);\n\n Ok(buffer)\n\n }\n\n}\n\n\n\n// Test section\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use mockall::{automock, mock, predicate::*};\n\n use crate::contractapi::transaction::*;\n\n use crate::data::TypeSchema;\n\n use claim::assert_ok;\n\n \n\n mock! {\n\n TestContract {}\n\n // First trait to implement on C\n", "file_path": "fabric_contract/src/contractapi/contractdefn.rs", "rank": 81, "score": 21.520351721453636 }, { "content": "\n\nimpl AssetPrivate {\n\n pub fn new(id: String, appraised_value: u32,colour: String,itemcode:String, description: String) -> AssetPrivate {\n\n AssetPrivate {\n\n id,\n\n appraised_value,\n\n itemcode,\n\n description\n\n }\n\n }\n\n}\n\n\n\n/// Very important to implement the DataType Trait for the Asset\n\n///\n\n/// This provides the ability to store the data in the ledger\n\nimpl DataType for AssetPrivate {\n\n fn to_state(&self) -> State {\n\n let json = serde_json::to_string(self).unwrap();\n\n debug!(\"ToState::{}\",&json.as_str());\n\n let buffer = json.into_bytes();\n", "file_path": "asset_transfer_secure_private_rs/src/types/asset_private.rs", "rank": 82, "score": 21.4444768102513 }, { "content": " converter: Box<dyn Converter + Send>,\n\n}\n\n\n\nimpl ContractDefn {\n\n pub fn new(c: Box<dyn Contract + Send>) -> ContractDefn {\n\n let mut cd = ContractDefn {\n\n name: c.name(),\n\n methods: HashMap::new(),\n\n contract: c,\n\n converter: Box::new(JSONConverter {}),\n\n };\n\n\n\n let fns = cd.contract.get_fn_metadata();\n\n for t in fns {\n\n debug!(\"Function {:?}\", t);\n\n cd.add_tx_fn(t);\n\n }\n\n\n\n // last thing that we need to do is setup the data converter that\n\n // is required for this contract\n", "file_path": "fabric_contract/src/contractapi/contractdefn.rs", "rank": 83, "score": 21.313725347190584 }, { "content": " impl Routing for #type_name {\n\n\n\n fn route3(&self, tx_fn: String, args: Vec<WireBuffer>, return_wb: TypeSchema) -> Result<WireBuffer,ContractError> {\n\n log::debug!(\"Inside the contract (route3) {} {:?}\",tx_fn,args);\n\n match &tx_fn[..] {\n\n\n\n #(#method_names =>\n\n {\n\n log::debug!(\"calling\");\n\n self.#method_fns(args,return_wb)\n\n }\n\n\n\n , )*\n\n _ => Err(ContractError::from(String::from(\"Unknown transaction fn \")))\n\n }\n\n\n\n }\n\n }\n\n\n\n };\n\n\n\n // Hand the output tokens back to the compiler.\n\n output.into()\n\n}\n\n\n", "file_path": "fabric_contract_macros/src/lib.rs", "rank": 84, "score": 21.186700599253925 }, { "content": " }\n\n\n\n debug!(\"{} -> {} {}\",tx,arg_name,type_name);\n\n \n\n Self {\n\n name: arg_name,\n\n transient: false,\n\n type_schema: TypeSchema {\n\n contract_type: ContractType::from_str(&type_name[..]).unwrap(),\n\n format: Option::None,\n\n },\n\n }\n\n }\n\n}\n\n\n\n#[derive(Default, Debug, Clone)]\n\npub struct TransactionFn {\n\n name: String,\n\n return_type: TypeSchema,\n\n parameters: Vec<ParameterDefn>,\n", "file_path": "fabric_contract/src/contractapi/transaction.rs", "rank": 85, "score": 20.950167429032923 }, { "content": "\n\n fn build_from_state(state: State) -> Self {\n\n let b = state.value();\n\n\n\n let str = match from_utf8(&b) {\n\n Ok(a) => a,\n\n Err(_) => panic!(\"Err\"),\n\n };\n\n debug!(\"build_from_state:: {}\",&str);\n\n serde_json::from_str(str).unwrap()\n\n }\n\n\n\n fn form_key(k: &String) -> String {\n\n format!(\"Asset::{}\",k)\n\n }\n\n}\n\n\n\nimpl WireBufferFromReturnType<Asset> for WireBuffer {\n\n fn from_rt(self: &mut Self, s: Asset) {\n\n // we've got a wire buffer object and we need to set the bytes here from the string\n", "file_path": "asset_transfer_rs/src/types/asset.rs", "rank": 86, "score": 20.903611682150583 }, { "content": "/// api, eg, if a state can be found\n\n#[derive(Debug)]\n\npub struct LedgerError {\n\n msg: String,\n\n}\n\n\n\nimpl error::Error for LedgerError {\n\n fn source(&self) -> Option<&(dyn error::Error + 'static)> {\n\n Some(self)\n\n }\n\n}\n\n\n\nimpl std::convert::From<String> for LedgerError {\n\n fn from(msg: String) -> Self {\n\n Self { msg }\n\n }\n\n}\n\n\n\nimpl fmt::Display for LedgerError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self.msg)\n\n }\n\n}\n", "file_path": "fabric_contract/src/error.rs", "rank": 87, "score": 20.891617897445236 }, { "content": " }\n\n\n\n fn build_from_state(state: State) -> Self {\n\n let b = state.value();\n\n\n\n let str = match from_utf8(&b) {\n\n Ok(a) => a,\n\n Err(_) => panic!(\"Err\"),\n\n };\n\n debug!(\"build_from_state:: {}\",&str);\n\n serde_json::from_str(str).unwrap()\n\n }\n\n \n\n fn form_key(k: &String) -> String {\n\n format!(\"PriceAgreement#{}\",k)\n\n }\n\n}\n\n\n\nimpl WireBufferFromReturnType<PriceAgreement> for WireBuffer {\n\n fn from_rt(self: &mut Self, s: PriceAgreement) {\n", "file_path": "asset_transfer_secure_private_rs/src/types/price_agreement.rs", "rank": 88, "score": 20.718581634572438 }, { "content": "// the following (left for reference) is what the [contract_impl] macro would logically add\n\n// to this code\n\n\n\n// impl Routing for AssetContract { \n\n \n\n// fn route2(&self, ctx: Context, tx_fn: String, args: Vec<String>) -> Result<String,String>{\n\n// ctx.log(format!(\"Inside the contract {} {:?}\",tx_fn,args));\n\n// let _r = match &tx_fn[..] {\n\n// \"create_asset\" => {\n\n \n\n// let a0 = match args.get(0) {\n\n// Some(a) => Ok(a),\n\n// None => Err(String::from(\"Missing argument 0\")),\n\n// };\n\n \n\n// let a1 = match args.get(1) {\n\n// Some(a) => Ok(a),\n\n// None => Err(String::from(\"Missing argument 1\")),\n\n// };\n\n \n\n// let _r=self.create_asset(ctx, a0.unwrap().to_string(), a1.unwrap().to_string());\n\n// Ok(String::from(\"\"))\n\n// },\n\n// \"read_asset\" => {\n\n\n\n// let a0 = match args.get(0) {\n\n// Some(a) => Ok(a),\n\n// None => Err(String::from(\"Missing argument 0\")),\n\n// };\n\n\n\n// let _r=self.read_asset(ctx, a0.unwrap().to_string());\n\n// Ok(String::from(\"\"))\n\n// },\n\n// _ => Err(String::from(\"Unknown transaction fn \"))\n\n// };\n\n\n\n// Ok(String::from(\"200\"))\n\n// }\n", "file_path": "asset_transfer_rs/src/routing.md", "rank": 89, "score": 20.486280964295823 }, { "content": "// the following (left for reference) is what the [contract_impl] macro would logically add\n\n// to this code\n\n\n\n// impl Routing for AssetContract { \n\n \n\n// fn route2(&self, ctx: Context, tx_fn: String, args: Vec<String>) -> Result<String,String>{\n\n// ctx.log(format!(\"Inside the contract {} {:?}\",tx_fn,args));\n\n// let _r = match &tx_fn[..] {\n\n// \"create_asset\" => {\n\n \n\n// let a0 = match args.get(0) {\n\n// Some(a) => Ok(a),\n\n// None => Err(String::from(\"Missing argument 0\")),\n\n// };\n\n \n\n// let a1 = match args.get(1) {\n\n// Some(a) => Ok(a),\n\n// None => Err(String::from(\"Missing argument 1\")),\n\n// };\n\n \n\n// let _r=self.create_asset(ctx, a0.unwrap().to_string(), a1.unwrap().to_string());\n\n// Ok(String::from(\"\"))\n\n// },\n\n// \"read_asset\" => {\n\n\n\n// let a0 = match args.get(0) {\n\n// Some(a) => Ok(a),\n\n// None => Err(String::from(\"Missing argument 0\")),\n\n// };\n\n\n\n// let _r=self.read_asset(ctx, a0.unwrap().to_string());\n\n// Ok(String::from(\"\"))\n\n// },\n\n// _ => Err(String::from(\"Unknown transaction fn \"))\n\n// };\n\n\n\n// Ok(String::from(\"200\"))\n\n// }\n", "file_path": "asset_transfer_secure_private_rs/src/routing.md", "rank": 90, "score": 20.486280964295823 }, { "content": "// the following (left for reference) is what the [contract_impl] macro would logically add\n\n// to this code\n\n\n\n// impl Routing for AssetContract { \n\n \n\n// fn route2(&self, ctx: Context, tx_fn: String, args: Vec<String>) -> Result<String,String>{\n\n// ctx.log(format!(\"Inside the contract {} {:?}\",tx_fn,args));\n\n// let _r = match &tx_fn[..] {\n\n// \"create_asset\" => {\n\n \n\n// let a0 = match args.get(0) {\n\n// Some(a) => Ok(a),\n\n// None => Err(String::from(\"Missing argument 0\")),\n\n// };\n\n \n\n// let a1 = match args.get(1) {\n\n// Some(a) => Ok(a),\n\n// None => Err(String::from(\"Missing argument 1\")),\n\n// };\n\n \n\n// let _r=self.create_asset(ctx, a0.unwrap().to_string(), a1.unwrap().to_string());\n\n// Ok(String::from(\"\"))\n\n// },\n\n// \"read_asset\" => {\n\n\n\n// let a0 = match args.get(0) {\n\n// Some(a) => Ok(a),\n\n// None => Err(String::from(\"Missing argument 0\")),\n\n// };\n\n\n\n// let _r=self.read_asset(ctx, a0.unwrap().to_string());\n\n// Ok(String::from(\"\"))\n\n// },\n\n// _ => Err(String::from(\"Unknown transaction fn \"))\n\n// };\n\n\n\n// Ok(String::from(\"200\"))\n\n// }\n", "file_path": "basic_contract_rs/src/routing.md", "rank": 91, "score": 20.486280964295823 }, { "content": "/*\n\n * SPDX-License-Identifier: Apache-2.0\n\n */\n\n\n\nuse std::error;\n\nuse std::fmt;\n\n\n\n/// Contract Error is what the contract will return to indicate an error\n\n/// Typically this would be for contract developers to use to mark that a failure\n\n/// has occured, and give information about what that means in the context\n\n/// of their implementation\n\n#[derive(Debug)]\n\npub struct ContractError {\n\n msg: String,\n\n ledger_error: Option<LedgerError>,\n\n}\n\n\n\nimpl std::convert::From<String> for ContractError {\n\n fn from(msg: String) -> Self {\n\n Self {\n", "file_path": "fabric_contract/src/error.rs", "rank": 92, "score": 20.428923932301362 }, { "content": "pub mod prelude {\n\n pub use crate::runtimeapi::wapc::handle_wapc;\n\n pub use crate::runtimeapi::wapc::log as host_log;\n\n pub use wapc_guest::prelude::*;\n\n\n\n pub use crate::contractapi::contractdefn::ContractDefn;\n\n pub use crate::contractapi::transaction::TransactionFn;\n\n pub use crate::contractapi::transaction::TransactionFnBuilder;\n\n}\n\n\n\n/// Module to use to define the complex datatypes\n\npub mod data {\n\n pub use crate::dataapi::typeschema::TypeSchema;\n\n pub use crate::dataapi::wirebuffer::WireBuffer;\n\n pub use crate::dataapi::wirebuffer::WireBufferFromReturnType;\n\n pub use crate::ledgerapi::datatype::DataType;\n\n pub use fabric_contract_macros::property as Property;\n\n}\n\n\n\n/// Module to provide 'runtime' services.\n", "file_path": "fabric_contract/src/lib.rs", "rank": 93, "score": 20.007757396247854 }, { "content": "}\n\n\n\nimpl Asset {\n\n pub fn new(id: String, color: String, size: i32, owner: String, appraised_value: i32) -> Asset {\n\n Asset {\n\n id,\n\n color,\n\n size,\n\n owner,\n\n appraised_value,\n\n }\n\n }\n\n\n\n pub fn update_owner(&mut self, owner: String) -> () {\n\n self.owner = owner;\n\n }\n\n\n\n pub fn get_color(&self) -> String {\n\n return self.color.clone();\n\n }\n", "file_path": "asset_transfer_rs/src/types/asset.rs", "rank": 94, "score": 19.71112212164548 }, { "content": " None => Err(String::from(\"Unable to find tx\")),\n\n }\n\n }\n\n\n\n pub fn invoke(\n\n self: &ContractDefn,\n\n ctx: &Context,\n\n name: String,\n\n args: &[Vec<u8>],\n\n ) -> Result<WireBuffer, ContractError> {\n\n // trace!(\">> invoke {} {:#?}\",name, args);\n\n debug!(\"Invoking tx fn {} {:#?} {}\", name, args, args.len());\n\n\n\n let txfn = self.get_txfn(&name[..])?;\n\n let mut updated_args = Vec::<WireBuffer>::new();\n\n // got the tx fn, now to loop over the supplied args\n\n for (pos, p) in txfn.get_parameters().iter().enumerate() {\n\n debug!(\"{} {:?}\",pos,p);\n\n updated_args.push(WireBuffer::new(\n\n args[pos].clone(),\n", "file_path": "fabric_contract/src/contractapi/contractdefn.rs", "rank": 95, "score": 19.132463810118757 }, { "content": "impl MyAsset {\n\n\n\n pub fn new(id: String, value: String) -> MyAsset {\n\n MyAsset {\n\n id,\n\n value,\n\n }\n\n }\n\n\n\n pub fn get_value(&self) -> String {\n\n self.value.clone()\n\n }\n\n}\n\n\n\n/// The DataType trait must be implemented for this struct to be handled\n\n/// by the contract\n\nimpl DataType for MyAsset {\n\n fn to_state(&self) -> State {\n\n let json = serde_json::to_string(self).unwrap();\n\n let buffer = json.into_bytes();\n", "file_path": "basic_contract_rs/src/types/myasset.rs", "rank": 96, "score": 18.862672496174373 }, { "content": "\n\n let ret = match ContractManager::route(&ctx, fn_name.to_string(), args, transient_args) {\n\n Ok(r) => {\n\n let buffer = match r.buffer {\n\n Some(r) => r,\n\n None => Vec::new(),\n\n };\n\n response_msg.set_payload(buffer)\n\n }\n\n Err(e) => response_msg.set_payload(e.to_string().into_bytes()),\n\n };\n\n\n\n let buffer: Vec<u8> = response_msg.write_to_bytes()?;\n\n trace!(\"handler_tx_invoke<<\");\n\n Ok(buffer)\n\n}\n\n\n\nthread_local! {\n\n pub static CONTEXT: RefCell<Context>\n\n = RefCell::new( Default::default() );\n\n}\n\n\n", "file_path": "fabric_contract/src/runtimeapi/wapc.rs", "rank": 97, "score": 18.365416517456463 }, { "content": "/*\n\n * SPDX-License-Identifier: Apache-2.0\n\n */\n\n#![allow(dead_code)]\n\n#![allow(unused_imports)]\n\nuse crate::dataapi::{typeschema::ContractType, typeschema::Format, TypeSchema};\n\nuse std::fmt;\n\nuse std::str::FromStr;\n\nuse Format::Other;\n\nuse log::{debug, trace};\n\n#[derive(Debug, Clone, Copy)]\n\n/// Should this transaction be submitted or evaluated?\n\npub enum TxType {\n\n Submit,\n\n Evaluate,\n\n}\n\n#[derive(Debug, Clone)]\n\npub struct ParameterDefn {\n\n pub name: String,\n\n pub type_schema: TypeSchema,\n", "file_path": "fabric_contract/src/contractapi/transaction.rs", "rank": 98, "score": 17.97247348583516 }, { "content": " RangeTo(String),\n\n\n\n /// RangeAll(), All composite keys. use with caution\n\n RangeAll()\n\n}\n\n\n\n/// Specify the Rich Query Handler\n\npub enum RichQueryHandler {\n\n /// The query string to pass to the state database (currently only supported for CouchDB)\n\n Query(String),\n\n}\n\n\n\npub struct Collection{\n\n name: CollectionName,\n\n}\n\n\n\nimpl Collection {\n\n pub fn new(name: CollectionName) -> Self {\n\n Collection {\n\n name,\n", "file_path": "fabric_contract/src/ledgerapi/collection.rs", "rank": 99, "score": 17.6889479893286 } ]
Rust
src/imapw.rs
mordak/runt
03ac329c54d61a05d2390f28c963f2a7d8c9a590
use crate::config::Account; use imap::extensions::idle; use imap::types::{Fetch, Flag, Mailbox, Name, Uid, UnsolicitedResponse, ZeroCopy}; use imap::Session; use imap::{Client, ClientBuilder}; use rustls_connector::TlsStream as RustlsStream; use std::convert::From; use std::net::TcpStream; use std::ops::Deref; use std::time::Duration; use std::vec::Vec; pub enum FetchResult<'a> { Uid(UidResult<'a>), Other(&'a Fetch), } #[derive(Debug)] pub struct UidResult<'a> { fetch: &'a Fetch, } impl<'a> UidResult<'a> { pub fn uid(&self) -> Uid { self.fetch.uid.unwrap() } pub fn size(&self) -> u32 { self.fetch.size.unwrap() } pub fn internal_date_millis(&self) -> i64 { self.fetch.internal_date().unwrap().timestamp_millis() } pub fn flags(&self) -> &[Flag] { self.fetch.flags() } } impl<'a> From<&'a Fetch> for FetchResult<'a> { fn from(fetch: &'a Fetch) -> FetchResult<'a> { if fetch.uid.is_some() && fetch.size.is_some() && fetch.internal_date().is_some() { FetchResult::Uid(UidResult { fetch }) } else { FetchResult::Other(fetch) } } } pub struct Imap { session: Session<RustlsStream<TcpStream>>, mailbox: Option<String>, qresync: bool, } impl Imap { pub fn new(config: &Account) -> Result<Imap, String> { let client = Imap::connect(config)?; let mut session = client .login(config.username.as_str(), config.password.as_ref().unwrap()) .map_err(|e| format!("Login failed: {:?}", e.0))?; let capabilities = session .capabilities() .map_err(|e| format!("CAPABILITIES Error: {}", e))?; let mut missing = Vec::new(); if !capabilities.deref().has_str("ENABLE") { missing.push("ENABLE"); } if !capabilities.deref().has_str("UIDPLUS") { missing.push("UIDPLUS"); } if !capabilities.deref().has_str("IDLE") { missing.push("IDLE"); } if !missing.is_empty() { return Err(format!("Missing capability: {}", missing.join(" "))); } Ok(Imap { session, mailbox: None, qresync: capabilities.deref().has_str("QRESYNC"), }) } #[allow(dead_code)] pub fn debug(&mut self, enable: bool) { self.session.debug = enable; } fn connect(config: &Account) -> Result<Client<RustlsStream<TcpStream>>, String> { ClientBuilder::new(&config.server, config.port.unwrap()) .rustls() .map_err(|e| format!("Connection to {:?} failed: {}", &config.server, e)) } pub fn list( &mut self, reference_name: Option<&str>, mailbox_pattern: Option<&str>, ) -> Result<ZeroCopy<Vec<Name>>, String> { self.session .list(reference_name, mailbox_pattern) .map_err(|e| format!("LIST failed: {}", e)) } pub fn idle(&mut self) -> Result<(), String> { /* IDLE Builder - not released yet self.session .idle() .timeout(Duration::from_secs(10 * 60)) .wait_while(idle::stop_on_any) .map_err(|e| format!("{}", e)) .map(|_| ()) */ self.session .idle() .map_err(|e| format!("{}", e)) .and_then(|mut i| { i.set_keepalive(Duration::from_secs(10 * 60)); i.wait_keepalive_while(idle::stop_on_any) .map_err(|e| format!("{}", e)) }) .map(|_| ()) } pub fn fetch_uid(&mut self, uid: u32) -> Result<ZeroCopy<Vec<Fetch>>, String> { self.session .uid_fetch( format!("{}", uid), "(UID RFC822.SIZE INTERNALDATE FLAGS BODY.PEEK[])", ) .map_err(|e| format!("UID FETCH failed: {}", e)) } pub fn fetch_uid_meta(&mut self, uid: u32) -> Result<ZeroCopy<Vec<Fetch>>, String> { self.session .uid_fetch(format!("{}", uid), "(UID RFC822.SIZE INTERNALDATE FLAGS)") .map_err(|e| format!("UID FETCH failed: {}", e)) } pub fn fetch_uids( &mut self, first: u32, last: Option<u32>, changedsince: Option<u64>, ) -> Result<ZeroCopy<Vec<Fetch>>, String> { let range = match last { None => format!("{}:*", first), Some(n) if n > first => format!("{}:{}", first, n), _ => return Err(format!("Invalid range {}:{}", first, last.unwrap())), }; let qresync = match changedsince { None => "".to_string(), Some(n) => format!(" (CHANGEDSINCE {} VANISHED)", n), }; self.session .uid_fetch( range, format!("(UID RFC822.SIZE INTERNALDATE FLAGS){}", qresync), ) .map_err(|e| format!("UID FETCH failed: {}", e)) } pub fn enable_qresync(&mut self) -> Result<(), String> { self.session .run_command_and_check_ok("ENABLE QRESYNC") .map_err(|e| format!("ENABLE QRESYNC Error: {}", e)) } pub fn can_qresync(&self) -> bool { self.qresync } pub fn select_mailbox(&mut self, mailbox: &str) -> Result<Mailbox, String> { self.session .select(mailbox) .map_err(|e| format!("SELECT {} failed: {}", mailbox, e)) .map(|mbox| { self.mailbox = Some(mailbox.to_string()); mbox }) } pub fn logout(&mut self) -> Result<(), String> { self.session .logout() .map_err(|e| format!("LOGOUT failed: {}", e)) } pub fn delete_uid(&mut self, uid: u32) -> Result<(), String> { self.session .uid_store(format!("{}", uid), "+FLAGS (\\Deleted)") .map_err(|e| format!("STORE UID {} +Deleted failed: {}", uid, e))?; self.session .uid_expunge(format!("{}", uid)) .map_err(|e| format!("EXPUNGE UID {} failed: {}", uid, e))?; Ok(()) } pub fn append(&mut self, body: &[u8], flags: &[Flag]) -> Result<(), String> { if self.mailbox.is_none() { return Err("No mailbox selected".to_string()); } let r = self .session .append(self.mailbox.as_ref().unwrap(), body) .flags(flags.iter().cloned()) .finish() .map_err(|e| e.to_string()); r } /* pub fn replace_uid(&mut self, uid: u32, body: &[u8]) -> Result<(), String> { // Fetch the current flags so we can copy them to the new message. let zc_vec_fetch = self.fetch_uid_meta(uid)?; let mut uidres: Option<UidResult> = None; for fetch in zc_vec_fetch.deref() { if let FetchResult::Uid(res) = FetchResult::from(fetch) { if res.uid() == uid { uidres.replace(res); break; } } } if uidres.is_none() { return Err(format!("UID {} not found on server", uid)); } // Append first so if it fails we don't delete the original self.append(body, uidres.unwrap().flags())?; self.delete_uid(uid) } */ pub fn add_flags_for_uid(&mut self, uid: u32, flags: &[Flag]) -> Result<(), String> { let flagstr = flags .iter() .map(|f| f.to_string()) .collect::<Vec<String>>() .join(" "); self.session .uid_store(format!("{}", uid), format!("+FLAGS ({})", flagstr)) .map_err(|e| format!("STORE UID {} +FLAGS failed: {}", uid, e)) .map(|_| ()) } pub fn remove_flags_for_uid(&mut self, uid: u32, flags: &[Flag]) -> Result<(), String> { let flagstr = flags .iter() .map(|f| f.to_string()) .collect::<Vec<String>>() .join(" "); self.session .uid_store(format!("{}", uid), format!("-FLAGS ({})", flagstr)) .map_err(|e| format!("STORE UID {} -FLAGS failed: {}", uid, e)) .map(|_| ()) } pub fn for_each_unsolicited_response<F>(&mut self, mut f: F) where F: FnMut(UnsolicitedResponse), { while let Ok(u) = self.session.unsolicited_responses.try_recv() { f(u) } } }
use crate::config::Account; use imap::extensions::idle; use imap::types::{Fetch, Flag, Mailbox, Name, Uid, UnsolicitedResponse, ZeroCopy}; use imap::Session; use imap::{Client, ClientBuilder}; use rustls_connector::TlsStream as RustlsStream; use std::convert::From; use std::net::TcpStream; use std::ops::Deref; use std::time::Duration; use std::vec::Vec; pub enum FetchResult<'a> { Uid(UidResult<'a>), Other(&'a Fetch), } #[derive(Debug)] pub struct UidResult<'a> { fetch: &'a Fetch, } impl<'a> UidResult<'a> { pub fn uid(&self) -> Uid { self.fetch.uid.unwrap() } pub fn size(&self) -> u32 { self.fetch.size.unwrap() } pub fn internal_date_millis(&self) -> i64 { self.fetch.internal_date().unwrap().timestamp_millis() } pub fn flags(&self) -> &[Flag] { self.fetch.flags() } } impl<'a> From<&'a Fetch> for FetchResult<'a> { fn from(fetch: &'a Fetch) -> FetchResult<'a> { if fetch.uid.is_some() && fetch.size.is_some() && fetch.internal_date().is_some() { FetchResult::Uid(UidResult { fetch }) } else { FetchResult::Other(fetch) } } } pub struct Imap { session: Session<RustlsStream<TcpStream>>, mailbox: Option<String>, qresync: bool, } impl Imap { pub fn new(config: &Account) -> Result<Imap, String> { let client = Imap::connect(config)?; let mut session = client .login(config.username.as_str(), config.password.as_ref().unwrap()) .map_err(|e| format!("Login failed: {:?}", e.0))?; let capabilities = session .capabilities() .map_err(|e| format!("CAPABILITIES Error: {}", e))?; let mut missing = Vec::new(); if !capabilities.deref().has_str("ENABLE") { missing.push("ENABLE"); } if !capabilities.deref().has_str("UIDPLUS") { missing.push("UIDPLUS"); } if !capabilities.deref().has_str("IDLE") { missing.push("IDLE"); } if !missing.is_empty() { return Err(format!("Missing capability: {}", missing.join(" "))); } Ok(Imap { session, mailbox: None, qresync: capabilities.deref().has_str("QRESYNC"), }) } #[allow(dead_code)] pub fn debug(&mut self, enable: bool) { self.session.debug = enable; } fn connect(config: &Account) -> Result<Client<RustlsStream<TcpStream>>, String> { ClientBuilder::new(&config.server, config.port.unwrap()) .rustls() .map_err(|e| format!("Connection to {:?} failed: {}", &config.server, e)) } pub fn list( &mut self, reference_name: Option<&str>, mailbox_pattern: Option<&str>, ) -> Result<ZeroCopy<Vec<Name>>, String> { self.session .list(reference_name, mailbox_pattern) .map_err(|e| format!("LIST failed: {}", e)) } pub fn idle(&mut self) -> Result<(), String> { /* IDLE Builder - not released yet self.session .idle() .timeout(Duration::from_secs(10 * 60)) .wait_whi
last.unwrap())), }; let qresync = match changedsince { None => "".to_string(), Some(n) => format!(" (CHANGEDSINCE {} VANISHED)", n), }; self.session .uid_fetch( range, format!("(UID RFC822.SIZE INTERNALDATE FLAGS){}", qresync), ) .map_err(|e| format!("UID FETCH failed: {}", e)) } pub fn enable_qresync(&mut self) -> Result<(), String> { self.session .run_command_and_check_ok("ENABLE QRESYNC") .map_err(|e| format!("ENABLE QRESYNC Error: {}", e)) } pub fn can_qresync(&self) -> bool { self.qresync } pub fn select_mailbox(&mut self, mailbox: &str) -> Result<Mailbox, String> { self.session .select(mailbox) .map_err(|e| format!("SELECT {} failed: {}", mailbox, e)) .map(|mbox| { self.mailbox = Some(mailbox.to_string()); mbox }) } pub fn logout(&mut self) -> Result<(), String> { self.session .logout() .map_err(|e| format!("LOGOUT failed: {}", e)) } pub fn delete_uid(&mut self, uid: u32) -> Result<(), String> { self.session .uid_store(format!("{}", uid), "+FLAGS (\\Deleted)") .map_err(|e| format!("STORE UID {} +Deleted failed: {}", uid, e))?; self.session .uid_expunge(format!("{}", uid)) .map_err(|e| format!("EXPUNGE UID {} failed: {}", uid, e))?; Ok(()) } pub fn append(&mut self, body: &[u8], flags: &[Flag]) -> Result<(), String> { if self.mailbox.is_none() { return Err("No mailbox selected".to_string()); } let r = self .session .append(self.mailbox.as_ref().unwrap(), body) .flags(flags.iter().cloned()) .finish() .map_err(|e| e.to_string()); r } /* pub fn replace_uid(&mut self, uid: u32, body: &[u8]) -> Result<(), String> { // Fetch the current flags so we can copy them to the new message. let zc_vec_fetch = self.fetch_uid_meta(uid)?; let mut uidres: Option<UidResult> = None; for fetch in zc_vec_fetch.deref() { if let FetchResult::Uid(res) = FetchResult::from(fetch) { if res.uid() == uid { uidres.replace(res); break; } } } if uidres.is_none() { return Err(format!("UID {} not found on server", uid)); } // Append first so if it fails we don't delete the original self.append(body, uidres.unwrap().flags())?; self.delete_uid(uid) } */ pub fn add_flags_for_uid(&mut self, uid: u32, flags: &[Flag]) -> Result<(), String> { let flagstr = flags .iter() .map(|f| f.to_string()) .collect::<Vec<String>>() .join(" "); self.session .uid_store(format!("{}", uid), format!("+FLAGS ({})", flagstr)) .map_err(|e| format!("STORE UID {} +FLAGS failed: {}", uid, e)) .map(|_| ()) } pub fn remove_flags_for_uid(&mut self, uid: u32, flags: &[Flag]) -> Result<(), String> { let flagstr = flags .iter() .map(|f| f.to_string()) .collect::<Vec<String>>() .join(" "); self.session .uid_store(format!("{}", uid), format!("-FLAGS ({})", flagstr)) .map_err(|e| format!("STORE UID {} -FLAGS failed: {}", uid, e)) .map(|_| ()) } pub fn for_each_unsolicited_response<F>(&mut self, mut f: F) where F: FnMut(UnsolicitedResponse), { while let Ok(u) = self.session.unsolicited_responses.try_recv() { f(u) } } }
le(idle::stop_on_any) .map_err(|e| format!("{}", e)) .map(|_| ()) */ self.session .idle() .map_err(|e| format!("{}", e)) .and_then(|mut i| { i.set_keepalive(Duration::from_secs(10 * 60)); i.wait_keepalive_while(idle::stop_on_any) .map_err(|e| format!("{}", e)) }) .map(|_| ()) } pub fn fetch_uid(&mut self, uid: u32) -> Result<ZeroCopy<Vec<Fetch>>, String> { self.session .uid_fetch( format!("{}", uid), "(UID RFC822.SIZE INTERNALDATE FLAGS BODY.PEEK[])", ) .map_err(|e| format!("UID FETCH failed: {}", e)) } pub fn fetch_uid_meta(&mut self, uid: u32) -> Result<ZeroCopy<Vec<Fetch>>, String> { self.session .uid_fetch(format!("{}", uid), "(UID RFC822.SIZE INTERNALDATE FLAGS)") .map_err(|e| format!("UID FETCH failed: {}", e)) } pub fn fetch_uids( &mut self, first: u32, last: Option<u32>, changedsince: Option<u64>, ) -> Result<ZeroCopy<Vec<Fetch>>, String> { let range = match last { None => format!("{}:*", first), Some(n) if n > first => format!("{}:{}", first, n), _ => return Err(format!("Invalid range {}:{}", first,
random
[ { "content": "// FIXME: Move this to imapw?\n\n/// Convert imap flags to maildir flags\n\npub fn maildir_flags_from_imap(inflags: &[Flag]) -> String {\n\n let syncflags = SyncFlags::from(inflags);\n\n syncflags.to_string()\n\n}\n\n\n", "file_path": "src/cache/mod.rs", "rank": 0, "score": 125069.83149681691 }, { "content": "/// Determine if the given cache db entry for the message and the maildir\n\n/// entry for the message are equivalent.\n\nfn meta_equal(maildir_meta: &MailEntry, cache_meta: &MessageMeta) -> Result<bool, String> {\n\n if let Ok(fs_metadata) = maildir_meta.path().metadata() {\n\n if fs_metadata.len() != cache_meta.size() as u64 {\n\n return Ok(false);\n\n }\n\n } else {\n\n return Err(format!(\n\n \"Could not get filesystem meta for {}\",\n\n maildir_meta.id()\n\n ));\n\n }\n\n\n\n if maildir_meta.flags() != cache_meta.flags() {\n\n return Ok(false);\n\n }\n\n Ok(true)\n\n}\n\n\n\nimpl Maildir {\n\n /// Make a new Maildir for the given root directory, account, and mailbox.\n", "file_path": "src/maildirw.rs", "rank": 1, "score": 81837.87765901518 }, { "content": "/// Path to .state file for given account and mailbox\n\nfn statefile(account: &str, mailbox: &str) -> PathBuf {\n\n let mut cachefile = self::path(account, mailbox);\n\n cachefile.push(\"state\");\n\n cachefile\n\n}\n\n\n\npub struct Cache {\n\n db: Db,\n\n state: StateFile,\n\n}\n\n\n\nimpl Cache {\n\n pub fn new(account: &str, mailbox: &str) -> Result<Cache, String> {\n\n let db = Db::from_file(&self::db_path(account, mailbox))?;\n\n let state = StateFile::new(&self::statefile(account, mailbox))?;\n\n Ok(Cache { db, state })\n\n }\n\n\n\n pub fn is_valid(&self, mailbox: &Mailbox) -> bool {\n\n self.state.uid_validity() == mailbox.uid_validity.expect(\"No UIDVALIDITY in Mailbox\")\n", "file_path": "src/cache/mod.rs", "rank": 2, "score": 70412.77116561025 }, { "content": "/// Path to the cache directory for given account and mailbox\n\nfn path(account: &str, mailbox: &str) -> PathBuf {\n\n let mut cachefile = Config::dir();\n\n cachefile.push(\"cache\");\n\n cachefile.push(account);\n\n cachefile.push(mailbox);\n\n // Create the cache path if it doesn't exist\n\n std::fs::create_dir_all(&cachefile).ok();\n\n cachefile\n\n}\n\n\n", "file_path": "src/cache/mod.rs", "rank": 3, "score": 70412.77116561025 }, { "content": "/// Path to the db file for this cache\n\nfn db_path(account: &str, mailbox: &str) -> PathBuf {\n\n let mut dbfile = self::path(account, mailbox);\n\n dbfile.push(\"db.sqlite\");\n\n dbfile\n\n}\n\n\n", "file_path": "src/cache/mod.rs", "rank": 4, "score": 68266.11314300168 }, { "content": "struct SyncFlagsVisitor;\n\n\n\nimpl<'de> Visitor<'de> for SyncFlagsVisitor {\n\n type Value = SyncFlags;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n formatter.write_str(r#\"maildir: \"DFRST\" where all letters are optional\"#)\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<SyncFlags, E>\n\n where\n\n E: de::Error,\n\n {\n\n Ok(SyncFlags::from(value))\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for SyncFlags {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n", "file_path": "src/cache/syncflags.rs", "rank": 5, "score": 54789.311249716666 }, { "content": "fn main() {\n\n // set up signal handler for Ctrl-C\n\n unsafe {\n\n libc::signal(SIGINT, handle_sigint as usize);\n\n }\n\n\n\n let mut threads = vec![];\n\n let mut notifications = vec![];\n\n\n\n // Parse out config and set up sync jobs\n\n let configs = Config::new();\n\n for config in configs.accounts {\n\n let mut imap = Imap::new(&config).unwrap();\n\n let mut idle_mailboxes = Vec::new();\n\n let mut pool_mailboxes = Vec::new();\n\n match imap.list(None, Some(\"*\")) {\n\n Ok(listing) => {\n\n for mailbox in listing.iter() {\n\n if !mailbox\n\n .attributes()\n", "file_path": "src/main.rs", "rank": 6, "score": 31071.58743429742 }, { "content": "#[allow(dead_code)]\n\nfn handle_sigint(_signal: i32) {\n\n println!(\"Shutting down...\");\n\n SHUTDOWN.store(true, Ordering::Relaxed);\n\n}\n", "file_path": "src/main.rs", "rank": 7, "score": 25965.305921722225 }, { "content": " home\n\n }\n\n}\n\n\n\nimpl Account {\n\n /// Is this mailbox excluded from synchronization?\n\n pub fn is_mailbox_excluded(&self, name: &str) -> bool {\n\n if let Some(exclude) = &self.exclude {\n\n exclude.contains(&name.to_string())\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n /// Is this mailbox one we want to IDLE on?\n\n /// If the account has a `idle` member, then only mailboxes\n\n /// in that list are IDLEd. Otherwise everything that is not\n\n /// `exclude`d is IDLEd.\n\n pub fn is_mailbox_idled(&self, name: &str) -> bool {\n\n if let Some(idle) = &self.idle {\n\n idle.contains(&name.to_string())\n\n } else {\n\n true\n\n }\n\n }\n\n}\n", "file_path": "src/config.rs", "rank": 15, "score": 24.078999226342017 }, { "content": "use imap::types::{Flag, Uid};\n\n\n\nuse super::syncflags::{FlagValue, SyncFlags};\n\nuse crate::imapw::UidResult;\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct MessageMeta {\n\n id: String,\n\n size: u32,\n\n flags: SyncFlags,\n\n uid: Uid,\n\n internal_date_millis: i64,\n\n}\n\n\n\nimpl MessageMeta {\n\n pub fn new(\n\n id: &str,\n\n size: u32,\n\n flags: SyncFlags,\n\n uid: Uid,\n", "file_path": "src/cache/messagemeta.rs", "rank": 17, "score": 22.744376691133287 }, { "content": "# runt\n\n\n\nSynchronize IMAP and Maildir.\n\n\n\nBy default checks `~/.runt/config` for a toml formatted config file that specifies\n\none or more accounts.\n\n\n\nA config file looks like the following:\n\n\n\n```toml\n\n[[accounts]]\n\n# The account name. This is just a local identifier (\"work\", \"home\", etc.)\n\naccount = \"example\"\n\n\n\n# The imap server name and port\n\nserver = \"mail.example.com\"\n\nport = 993\n\n\n\n# The account username.\n\nusername = \"user\"\n\n\n\n# The password, either directly or using a program to fetch it from a password manager\n\n# Only one of password or password_command is required\n\npassword = \"accountpassword\"\n\npassword_command = \"pass mail.example.com\"\n\n\n\n# The path to where you want the maildir for this account\n\nmaildir = \"/path/to/your/maildir\"\n\n\n\n# Optional: Mailbox names to exclude from synchronization\n\nexclude = [\"Skip\", \"These\", \"Mailboxes\"]\n\n\n\n# Optional: Maximum number of threads to use for synchronization\n\nmax_concurrency = 8\n\n\n\n# Optional: Mailboxes to IDLE and monitor for changes.\n\n# All mailboxes not in the `exclude` list will be synchronized on startup\n\n# but only mailboxes in the `idle` list will be continuously monitored.\n\n# If not present, then all synchronized mailboxes will be monitored.\n\nidle = [\"INBOX\", \"Other\"]\n\n```\n\n\n\nMultiple `[[accounts]]` sections can be present to synchronize multiple IMAP\n\naccounts.\n\n\n\nOnce the config file is set up just execute the program to synchronize the IMAP\n\naccount to local maildir. Leave the program running and it will keep the Maildir\n\nand IMAP server in sync using IDLE and file system monitoring.\n\n\n\n# Requirements\n\n\n\nThe server must support the `UIDPLUS`, `IDLE` and `ENABLE` capabilities.\n\nIf one of these is missing, runt will exit with an error.\n\n\n\nIf the server supports the `QRESYNC` capability, then it will be used to synchronize\n\nquickly. Dovecot supports this capability, but Gmail does not.\n", "file_path": "README.md", "rank": 22, "score": 21.662772177670718 }, { "content": " /// Used to fetch new messages from the server.\n\n fn cache_message_for_uid(&mut self, imap: &mut Imap, uid: Uid) -> Result<(), String> {\n\n imap.fetch_uid(uid).and_then(|zc_vec_fetch| {\n\n for fetch in zc_vec_fetch.deref() {\n\n self.log(&format!(\"Fetching UID {}: {:?}\", uid, fetch.flags()));\n\n if let Err(e) = self.save_message_in_maildir(fetch) {\n\n return Err(format!(\"Save UID {} in maildir failed: {}\", uid, e));\n\n }\n\n }\n\n Ok(())\n\n })\n\n }\n\n\n\n /// Compare the given cache MessageMeta and IMAP UidResult, and decide if the\n\n /// cache version needs to be updated. If so, fetch the updated message and save\n\n /// it in the Maildir.\n\n ///\n\n /// Used to update cache entries for messages we already know about.\n\n fn update_cache_for_uid(\n\n &mut self,\n", "file_path": "src/syncdir.rs", "rank": 24, "score": 19.856004462448038 }, { "content": " }\n\n\n\n pub fn get_uid(&self, uid: u32) -> anyhow::Result<MessageMeta> {\n\n self.db.get_uid(uid)\n\n }\n\n\n\n pub fn delete_uid(&self, uid: u32) -> Result<(), String> {\n\n self.db.delete_uid(uid)\n\n }\n\n\n\n pub fn get_id(&self, id: &str) -> Result<MessageMeta, String> {\n\n self.db.get_id(id)\n\n }\n\n\n\n // FIXME: Clean up the expect() in here to just return Err\n\n pub fn add(&mut self, id: &str, fetch: &Fetch) -> Result<MessageMeta, String> {\n\n let uid = fetch.uid.expect(\"No UID in FETCH response\");\n\n let size = fetch.size.expect(\"No SIZE in FETCH response\");\n\n let flags = fetch.flags();\n\n let internal_date = fetch\n", "file_path": "src/cache/mod.rs", "rank": 25, "score": 19.73427888951895 }, { "content": " .contains(&imap::types::NameAttribute::NoSelect)\n\n && !config.is_mailbox_excluded(mailbox.name())\n\n {\n\n // select it and sync\n\n match SyncDir::new(&config, mailbox.name().to_string()) {\n\n Err(e) => panic!(\"Sync failed: {}\", e),\n\n Ok(sd) => {\n\n notifications.push(sd.sender.clone());\n\n if sd.should_idle() {\n\n idle_mailboxes.push(sd);\n\n } else {\n\n pool_mailboxes.push(sd);\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n Err(e) => println!(\"Error getting listing: {}\", e),\n\n };\n", "file_path": "src/main.rs", "rank": 27, "score": 18.13976791602895 }, { "content": " }\n\n\n\n pub fn update_imap_state(&mut self, mailbox: &Mailbox) -> Result<(), String> {\n\n self.state.update_imap(\n\n mailbox.uid_validity.expect(\"No UIDVALIDITY in Mailbox\"),\n\n mailbox.uid_next.expect(\"No UIDNEXT in Mailbox\"),\n\n mailbox\n\n .highest_mod_seq\n\n .expect(\"No HIGHESTMODSEQ in Mailbox\"),\n\n )\n\n }\n\n\n\n pub fn get_last_seen_uid(&self) -> u32 {\n\n self.state.last_seen_uid()\n\n }\n\n\n\n pub fn get_highest_mod_seq(&self) -> u64 {\n\n self.state.highest_mod_seq()\n\n }\n\n\n", "file_path": "src/cache/mod.rs", "rank": 28, "score": 17.858371198791055 }, { "content": " /// removes deleted items on the server and downloads new messages.\n\n ///\n\n /// This is the main Server -> Local routine for UIDs. After this completes\n\n /// anything on the server will be in the cache db and in the Maildir.\n\n fn quick_sync_cache_from_imap(\n\n &mut self,\n\n imap: &mut Imap,\n\n mailbox: &Mailbox,\n\n ) -> Result<(), String> {\n\n let modseq = if self.cache.is_valid(mailbox) {\n\n Some(self.cache.get_highest_mod_seq())\n\n } else {\n\n self.delete_imap_cache()?;\n\n None\n\n };\n\n\n\n imap.fetch_uids(1, None, modseq)\n\n .and_then(|zc_vec_fetch| self.cache_uids_from_imap(imap, &zc_vec_fetch))?;\n\n\n\n self.check_unsolicited_for_vanished(imap).map(|vanished| {\n", "file_path": "src/syncdir.rs", "rank": 29, "score": 17.839235255412756 }, { "content": " };\n\n\n\n // Updating existing cache entries\n\n imap.fetch_uids(1, end, None).and_then(|zc_vec_fetch| {\n\n if !self.cache.is_valid(mailbox) {\n\n // We have a new state, so delete the existing one\n\n self.delete_imap_cache()?;\n\n }\n\n self.cache_uids_from_imap(imap, &zc_vec_fetch)?;\n\n self.remove_imap_deleted_messages(&zc_vec_fetch)\n\n })?;\n\n\n\n // Fetch new messgaes\n\n imap.fetch_uids(last_seen_uid + 1, None, None)\n\n .and_then(|zc_vec_fetch| self.cache_uids_from_imap(imap, &zc_vec_fetch))?;\n\n\n\n self.cache.update_imap_state(mailbox)\n\n }\n\n\n\n /// Use QRESYNC to update the cache. This updates existing cache entries,\n", "file_path": "src/syncdir.rs", "rank": 30, "score": 17.81753476639448 }, { "content": " fn cache_uids_from_imap(\n\n &mut self,\n\n imap: &mut Imap,\n\n zc_vec_fetch: &ZeroCopy<Vec<Fetch>>,\n\n ) -> Result<(), String> {\n\n let mut err = false;\n\n for fetch in zc_vec_fetch.deref() {\n\n match FetchResult::from(fetch) {\n\n FetchResult::Uid(uidres) => {\n\n let uid = uidres.uid();\n\n let res = if let Ok(meta) = self.cache.get_uid(uid) {\n\n self.update_cache_for_uid(imap, &meta, &uidres)\n\n } else {\n\n self.cache_message_for_uid(imap, uid)\n\n };\n\n if let Err(e) = res {\n\n self.elog(&format!(\"Cache UID {} failed: {}\", uid, e));\n\n err = true;\n\n }\n\n }\n", "file_path": "src/syncdir.rs", "rank": 31, "score": 17.73841711491599 }, { "content": "use crate::cache::maildir_flags_from_imap;\n\nuse crate::cache::Cache;\n\nuse crate::cache::MessageMeta;\n\nuse crate::cache::SyncFlags;\n\nuse crate::config::Account;\n\nuse crate::imapw::{FetchResult, Imap, UidResult};\n\nuse crate::maildirw::Maildir;\n\nuse chrono::prelude::*;\n\nuse imap::types::{Fetch, Mailbox, Uid, UnsolicitedResponse, ZeroCopy};\n\nuse notify::{watcher, RecursiveMode, Watcher};\n\nuse std::collections::HashSet;\n\nuse std::fs;\n\nuse std::ops::Deref;\n\nuse std::sync::mpsc::{channel, Receiver, RecvError, Sender, TryRecvError};\n\nuse std::thread::{sleep, spawn, JoinHandle};\n\nuse std::time::Duration;\n\nuse std::vec::Vec;\n\n\n\n/// A enum used to pass messages between threads.\n\n#[derive(Debug)]\n", "file_path": "src/syncdir.rs", "rank": 32, "score": 17.480355612024145 }, { "content": " FetchResult::Other(f) => self.log(&format!(\"Got Other FETCH response: {:?}\", f)),\n\n }\n\n }\n\n if err {\n\n Err(\"Cache failed\".to_string())\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n\n\n /// Delete messages by UID from the cache and from the maildir.\n\n fn remove_uids_from_cache(&mut self, uids: &[u32]) -> Result<(), String> {\n\n for uid in uids {\n\n // Errors deleting from local usually mean the uid was not found\n\n // which can happen under some dual-edit conditions or when\n\n // we are told about a deleted message that we never downloded.\n\n if let Err(e) = self.delete_message_from_maildir(*uid) {\n\n self.elog(&format!(\"Error deleting UID {}: {}\", uid, e));\n\n }\n\n }\n", "file_path": "src/syncdir.rs", "rank": 33, "score": 17.237791103057976 }, { "content": "\n\nimpl SyncDir {\n\n /// Make a new SyncDir from the given config and mailbox name\n\n pub fn new(config: &Account, mailbox: String) -> Result<SyncDir, String> {\n\n let myconfig = config.clone();\n\n let cache = Cache::new(&myconfig.account, &mailbox).unwrap();\n\n let maildir = Maildir::new(&myconfig.maildir, &myconfig.account, &mailbox)?;\n\n let (sender, receiver) = channel();\n\n Ok(SyncDir {\n\n config: myconfig,\n\n mailbox,\n\n sender,\n\n receiver,\n\n cache,\n\n maildir,\n\n idlethread: None,\n\n fsthread: None,\n\n })\n\n }\n\n\n", "file_path": "src/syncdir.rs", "rank": 34, "score": 17.17443020387035 }, { "content": "mod db;\n\nmod messagemeta;\n\nmod statefile;\n\nmod syncflags;\n\n\n\nuse self::db::Db;\n\npub use self::messagemeta::MessageMeta;\n\nuse self::statefile::StateFile;\n\npub use self::syncflags::SyncFlags;\n\nuse crate::config::Config;\n\nuse crate::imapw::UidResult;\n\nuse imap::types::{Fetch, Flag, Mailbox};\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::path::PathBuf;\n\n\n\n// FIXME: Move this to imapw?\n\n/// Convert imap flags to maildir flags\n", "file_path": "src/cache/mod.rs", "rank": 35, "score": 17.171299431470338 }, { "content": " for range in vanished {\n\n for uid in range {\n\n if let Err(e) = self.delete_message_from_maildir(uid) {\n\n self.elog(&format!(\"Error deleting UID {}: {}\", uid, e));\n\n }\n\n }\n\n }\n\n })?;\n\n\n\n self.cache.update_imap_state(mailbox)\n\n }\n\n\n\n /// Delete the cache of the imap state.\n\n ///\n\n /// This is used when we have a cache validation failure, such as when\n\n /// the UIDVALIDITY does not match anymore.\n\n fn delete_imap_cache(&mut self) -> Result<(), String> {\n\n self.log(\"Deleting Cache of all IMAP messages\");\n\n self.remove_uids_from_cache(\n\n &self\n", "file_path": "src/syncdir.rs", "rank": 36, "score": 16.558718778358894 }, { "content": " } else {\n\n Ok(())\n\n }\n\n })\n\n }\n\n\n\n /// Perform a sync from IMAP to the cache. This updates existing cache entries,\n\n /// removes messages deleted on the server, and downloads new messages.\n\n ///\n\n /// This is the main Server -> Local routine for UIDs. After this completes anything\n\n /// on the server will be in the cache db and in the Maildir.\n\n fn slow_sync_cache_from_imap(\n\n &mut self,\n\n imap: &mut Imap,\n\n mailbox: &Mailbox,\n\n ) -> Result<(), String> {\n\n let last_seen_uid = self.cache.get_last_seen_uid();\n\n let end: Option<u32> = match last_seen_uid {\n\n 0 => None,\n\n x => Some(x),\n", "file_path": "src/syncdir.rs", "rank": 37, "score": 16.37726939645045 }, { "content": "use std::io::Write;\n\nuse std::path::{Path, PathBuf};\n\n\n\npub struct StateFile {\n\n path: PathBuf,\n\n state: StateFileFields,\n\n}\n\n\n\n#[derive(Deserialize, Serialize)]\n\npub struct StateFileFields {\n\n version: u64,\n\n imap_last: i64,\n\n maildir_last: i64,\n\n uid_validity: u32,\n\n uid_next: u32,\n\n last_seen_uid: u32,\n\n highest_mod_seq: u64,\n\n}\n\n\n\nimpl StateFile {\n", "file_path": "src/cache/statefile.rs", "rank": 38, "score": 16.34030143656839 }, { "content": " fn delete_message_from_maildir(&self, uid: u32) -> Result<(), String> {\n\n // It is ok if we can't find the message in our maildir, it\n\n // may be deleted from both sides.\n\n match self.cache.get_uid(uid) {\n\n Ok(meta) => {\n\n self.log(&format!(\"Deleting UID {} from maildir\", uid));\n\n if let Err(why) = self.maildir.delete_message(meta.id()) {\n\n self.elog(&format!(\"Error deleting UID {}: {}\", uid, why));\n\n }\n\n self.cache.delete_uid(uid)\n\n }\n\n Err(e) => match e.downcast_ref::<rusqlite::Error>() {\n\n Some(rusqlite::Error::QueryReturnedNoRows) => Ok(()),\n\n _ => Err(e.to_string()),\n\n },\n\n }\n\n }\n\n\n\n /// Fetch the given UID from IMAP and save it in the Maildir.\n\n ///\n", "file_path": "src/syncdir.rs", "rank": 39, "score": 15.66496534649588 }, { "content": "use imap::types::Flag;\n\nuse serde::de::{self, Deserialize, Deserializer, Visitor};\n\nuse serde::ser::{Serialize, Serializer};\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Clone, Copy, Debug)]\n\npub enum FlagValue {\n\n NoFlag = 0,\n\n Draft = 0x44,\n\n Flagged = 0x46,\n\n Replied = 0x52,\n\n Seen = 0x53,\n\n Trashed = 0x54,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct SyncFlags {\n\n maildir: [FlagValue; 5],\n\n}\n\n\n\nimpl Serialize for SyncFlags {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n serializer.serialize_str(&self.to_string())\n\n }\n\n}\n\n\n", "file_path": "src/cache/syncflags.rs", "rank": 40, "score": 15.5497611223813 }, { "content": " }\n\n\n\n /// Spawn a thread on this mailbox and IDLE it. When the IDLE\n\n /// ends, the thread will send a message to the main sync thread.\n\n fn idle(&self) -> Result<JoinHandle<()>, String> {\n\n let mut imap = Imap::new(&self.config)?;\n\n imap.select_mailbox(&self.mailbox.as_str())?;\n\n //imap.debug(true);\n\n let sender = self.sender.clone();\n\n let handle = spawn(move || {\n\n if let Err(why) = imap.idle() {\n\n sender.send(SyncMessage::ImapError(why)).ok();\n\n }\n\n imap.logout().ok();\n\n sender.send(SyncMessage::ImapChanged).ok();\n\n });\n\n Ok(handle)\n\n }\n\n\n\n /// Check if we want to IDLE this mailbox\n", "file_path": "src/syncdir.rs", "rank": 41, "score": 15.506635734892445 }, { "content": " internal_date_millis: i64,\n\n ) -> MessageMeta {\n\n MessageMeta {\n\n id: id.to_string(),\n\n size,\n\n flags,\n\n uid,\n\n internal_date_millis,\n\n }\n\n }\n\n\n\n pub fn from_fields(\n\n uid: u32,\n\n size: u32,\n\n internal_date_millis: i64,\n\n flags: String,\n\n id: String,\n\n ) -> MessageMeta {\n\n MessageMeta {\n\n id,\n", "file_path": "src/cache/messagemeta.rs", "rank": 42, "score": 15.35322985980177 }, { "content": " pub fn new(root: &str, account: &str, mailbox: &str) -> Result<Maildir, String> {\n\n let mut maildirpath = PathBuf::from(root);\n\n maildirpath.push(account);\n\n maildirpath.push(mailbox);\n\n let maildir = SubMaildir::from(maildirpath);\n\n maildir\n\n .create_dirs()\n\n .map_err(|e| format!(\"Could not create maildir structure: {}\", e))?;\n\n Ok(Maildir { maildir })\n\n }\n\n\n\n /// Get the path to the Maildir\n\n pub fn path(&self) -> PathBuf {\n\n self.maildir.path().to_path_buf()\n\n }\n\n\n\n /// Save a message in the maildir. On success, returns the ID of the new message.\n\n pub fn save_message(&mut self, body: &[u8], flags: &str) -> Result<String, String> {\n\n if flags.contains('S') {\n\n self.maildir.store_cur_with_flags(body, flags)\n", "file_path": "src/maildirw.rs", "rank": 43, "score": 15.302532176502025 }, { "content": " size,\n\n flags: SyncFlags::from(flags.as_str()),\n\n uid,\n\n internal_date_millis,\n\n }\n\n }\n\n\n\n pub fn update(&mut self, uidres: &UidResult) {\n\n self.uid = uidres.uid();\n\n self.size = uidres.size();\n\n self.internal_date_millis = uidres.internal_date_millis();\n\n self.flags = SyncFlags::from(uidres.flags());\n\n }\n\n\n\n pub fn flags_equal(&self, flags: &[Flag]) -> bool {\n\n let diff = self.flags.diff(SyncFlags::from(flags));\n\n diff.add.empty() && diff.sub.empty()\n\n }\n\n\n\n pub fn is_equal(&self, uidres: &UidResult) -> bool {\n", "file_path": "src/cache/messagemeta.rs", "rank": 44, "score": 15.163570486421523 }, { "content": " \"UPDATE v1 SET uid = (?1),\n\n size = (?2),\n\n internal_date_millis = (?3),\n\n flags = (?4),\n\n id = (?5)\n\n WHERE uid = (?1)\",\n\n params![\n\n meta.uid(),\n\n meta.size(),\n\n meta.internal_date_millis(),\n\n meta.flags(),\n\n meta.id()\n\n ],\n\n )\n\n })\n\n .map(|_| ())\n\n .map_err(|e| format!(\"UPDATE FAILED: {}\", e))\n\n }\n\n\n\n pub fn delete_uid(&self, uid: u32) -> Result<(), String> {\n", "file_path": "src/cache/db.rs", "rank": 45, "score": 15.114108840065438 }, { "content": " self.state.imap_last = chrono::offset::Utc::now().timestamp_millis();\n\n self.state.uid_validity = uid_validity;\n\n self.state.uid_next = uid_next;\n\n self.state.highest_mod_seq = highest_mod_seq;\n\n self.save()\n\n }\n\n\n\n pub fn update_maildir(&mut self) -> Result<(), String> {\n\n self.state.maildir_last = chrono::offset::Utc::now().timestamp_millis();\n\n self.save()\n\n }\n\n\n\n pub fn set_last_seen_uid(&mut self, uid: u32) -> Result<(), String> {\n\n self.state.last_seen_uid = uid;\n\n self.save()\n\n }\n\n\n\n /*\n\n pub fn set_highest_mod_seq(&mut self, seq: u64) -> Result<(), String> {\n\n self.state.highest_mod_seq = seq;\n", "file_path": "src/cache/statefile.rs", "rank": 46, "score": 15.101639781414137 }, { "content": " self.uid == uidres.uid()\n\n && self.size == uidres.size()\n\n && self.internal_date_millis == uidres.internal_date_millis()\n\n && self.flags_equal(uidres.flags())\n\n }\n\n\n\n pub fn needs_refetch(&self, uidres: &UidResult) -> bool {\n\n self.size != uidres.size() || self.internal_date_millis != uidres.internal_date_millis()\n\n }\n\n\n\n pub fn needs_move_from_new_to_cur(&self, uidres: &UidResult) -> bool {\n\n !self.flags.contains(FlagValue::Seen) && uidres.flags().contains(&Flag::Seen)\n\n }\n\n\n\n pub fn uid(&self) -> u32 {\n\n self.uid\n\n }\n\n\n\n pub fn id(&self) -> &str {\n\n &self.id\n", "file_path": "src/cache/messagemeta.rs", "rank": 47, "score": 15.082042661052208 }, { "content": "pub enum SyncMessage {\n\n Exit,\n\n ImapChanged,\n\n ImapError(String),\n\n MaildirChanged,\n\n MaildirError(String),\n\n}\n\n\n\n/// A struct representing a single mailbox to synchronize\n\n/// including the IMAP side and corresponding Maildir\n\npub struct SyncDir {\n\n pub config: Account,\n\n pub mailbox: String,\n\n pub sender: Sender<SyncMessage>,\n\n receiver: Receiver<SyncMessage>,\n\n cache: Cache,\n\n maildir: Maildir,\n\n idlethread: Option<JoinHandle<()>>,\n\n fsthread: Option<JoinHandle<()>>,\n\n}\n", "file_path": "src/syncdir.rs", "rank": 48, "score": 14.883823257910546 }, { "content": " }\n\n\n\n /// Run loop for the sync engine. Performs a full sync then waits on change\n\n /// events from the IMAP server or the Maildir.\n\n ///\n\n /// On each change, performs a sync between the server and the Mailfir.\n\n /// Each sync does a UID sync between the IMAP server and the cache db and\n\n /// Maildir. Then does a Maildir ID sync between the cache db and the IMAP\n\n /// server. The IMAP server knows about UIDs, and the Maildir knows about\n\n /// IDs. The cache db holds the mapping between these sets, and allows the\n\n /// sync engine to identify new and changed elements between each set.\n\n fn do_sync(&mut self) -> Result<(), String> {\n\n loop {\n\n let mut imap = Imap::new(&self.config)?;\n\n //imap.debug(true);\n\n if imap.can_qresync() {\n\n imap.enable_qresync().unwrap();\n\n }\n\n let mailbox = imap.select_mailbox(&self.mailbox.as_str())?;\n\n //imap.debug(false);\n", "file_path": "src/syncdir.rs", "rank": 49, "score": 14.701743055385677 }, { "content": " let sflags = SyncFlags::from(mail_v.flags());\n\n let flags = if let Some(f) = sflags.as_imap_flags() {\n\n f\n\n } else {\n\n Vec::new()\n\n };\n\n\n\n // Push to the server first, then delete the local copy\n\n imap.append(&fs::read(mail_v.path()).map_err(|e| e.to_string())?, &flags)?;\n\n // These will come back to us on the idle loop,\n\n // at which time they will get cache entries.\n\n self.maildir.delete_message(&id)?;\n\n }\n\n\n\n for uid in refetch {\n\n imap.fetch_uid_meta(uid)\n\n .and_then(|zc_vec_fetch| self.cache_uids_from_imap(imap, &zc_vec_fetch))?;\n\n }\n\n\n\n self.cache.update_maildir_state()\n", "file_path": "src/syncdir.rs", "rank": 50, "score": 14.528131668639102 }, { "content": "\n\n /// Compare the given IMAP FETCH results with the cache, and remove any entries\n\n /// from the cache that are no longer on the server.\n\n ///\n\n /// Called after processing the given fetch results and updating the\n\n /// cache db and Maildir. Any UIDs remaining in the cache db must have\n\n /// been deleted on the server and should be deleted from the cache db\n\n /// and the Maildir.\n\n fn remove_imap_deleted_messages(\n\n &mut self,\n\n zc_vec_fetch: &ZeroCopy<Vec<Fetch>>,\n\n ) -> Result<(), String> {\n\n let mut err = false;\n\n self.cache.get_known_uids().and_then(|mut cached_uids| {\n\n // Remove all the fetched uids from the cached values\n\n // leaving only uids that are in the cache but not on\n\n // the server anymore.\n\n for fetch in zc_vec_fetch.deref() {\n\n match FetchResult::from(fetch) {\n\n FetchResult::Uid(uidres) => {\n", "file_path": "src/syncdir.rs", "rank": 51, "score": 14.483774474176967 }, { "content": " }\n\n\n\n pub fn flags(&self) -> String {\n\n self.flags.to_string()\n\n }\n\n\n\n pub fn size(&self) -> u32 {\n\n self.size\n\n }\n\n\n\n pub fn internal_date_millis(&self) -> i64 {\n\n self.internal_date_millis\n\n }\n\n}\n", "file_path": "src/cache/messagemeta.rs", "rank": 52, "score": 14.276899386130268 }, { "content": " /*\n\n pub fn set_highest_mod_seq(&mut self, seq: u64) -> Result<(), String> {\n\n if seq > self.state.highest_mod_seq() {\n\n self.state.set_highest_mod_seq(seq)\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n */\n\n\n\n pub fn get_known_uids(&self) -> Result<HashSet<u32>, String> {\n\n self.db.get_uids()\n\n }\n\n\n\n pub fn get_known_ids(&self) -> Result<HashMap<String, MessageMeta>, String> {\n\n self.db.get_ids()\n\n }\n\n\n\n pub fn update_maildir_state(&mut self) -> Result<(), String> {\n\n self.state.update_maildir()\n", "file_path": "src/cache/mod.rs", "rank": 53, "score": 14.192476519394297 }, { "content": "#[derive(Deserialize, Clone)]\n\npub struct Config {\n\n pub accounts: Vec<Account>,\n\n}\n\n\n\nimpl Config {\n\n pub fn new() -> Config {\n\n let mut dir = Config::dir();\n\n dir.push(\"config\");\n\n let mut f = File::open(dir).unwrap();\n\n let mut buf: String = String::new();\n\n f.read_to_string(&mut buf).unwrap();\n\n let mut configs: Config = toml::from_str(&buf).unwrap();\n\n for config in &mut configs.accounts {\n\n if config.port.is_none() {\n\n config.port = Some(993);\n\n }\n\n if config.password_command.is_some() {\n\n let password = Command::new(\"sh\")\n\n .arg(\"-c\")\n", "file_path": "src/config.rs", "rank": 54, "score": 14.137543754091595 }, { "content": " self.save()\n\n }\n\n */\n\n\n\n pub fn save(&self) -> Result<(), String> {\n\n std::fs::File::create(&self.path)\n\n .and_then(|mut f| {\n\n f.write_all(\n\n &serde_json::to_string_pretty(&self.state)\n\n .unwrap()\n\n .as_bytes(),\n\n )\n\n })\n\n .map_err(|e| format!(\"{}\", e))\n\n }\n\n\n\n /*\n\n pub fn imap_last(&self) -> i64 {\n\n self.state.imap_last\n\n }\n", "file_path": "src/cache/statefile.rs", "rank": 55, "score": 14.10808844275746 }, { "content": " };\n\n blank.save().map(|_| blank)\n\n }\n\n\n\n fn from_file(path: &Path) -> Result<StateFile, String> {\n\n std::fs::read_to_string(path)\n\n .map_err(|e| format!(\"{}\", e))\n\n .and_then(|buf| serde_json::from_str(&buf).map_err(|e| format!(\"{}\", e)))\n\n .map(|state| StateFile {\n\n path: path.to_path_buf(),\n\n state,\n\n })\n\n }\n\n\n\n pub fn update_imap(\n\n &mut self,\n\n uid_validity: u32,\n\n uid_next: u32,\n\n highest_mod_seq: u64,\n\n ) -> Result<(), String> {\n", "file_path": "src/cache/statefile.rs", "rank": 56, "score": 14.095898833020785 }, { "content": " 100\n\n }\n\n }\n\n\n\n pub fn get_uids(&self) -> Result<HashSet<u32>, String> {\n\n let mut v = HashSet::with_capacity(self.expected_entries());\n\n let conn = Connection::open(&self.dbpath).map_err(|e| format!(\"Open DB: {}\", e))?;\n\n\n\n let mut stmt = conn\n\n .prepare(\"SELECT uid FROM v1\")\n\n .map_err(|e| format!(\"SELECT FAILED: {}\", e))?;\n\n\n\n let rows = stmt\n\n .query_map(params![], |r| r.get(0))\n\n .map_err(|e| format!(\"query_map: {}\", e))?;\n\n\n\n for r in rows {\n\n v.insert(r.map_err(|e| format!(\"fetch row: {}\", e))?);\n\n }\n\n Ok(v)\n", "file_path": "src/cache/db.rs", "rank": 57, "score": 13.969232395520022 }, { "content": " Ok(())\n\n }\n\n\n\n /// Check for VANISHED messages in the unsolicited responses channel\n\n fn check_unsolicited_for_vanished(\n\n &mut self,\n\n imap: &mut Imap,\n\n ) -> Result<Vec<std::ops::RangeInclusive<u32>>, String> {\n\n let mut vanished = Vec::new();\n\n imap.for_each_unsolicited_response(|u| {\n\n if let UnsolicitedResponse::Vanished {\n\n earlier: _,\n\n mut uids,\n\n } = u\n\n {\n\n vanished.append(&mut uids);\n\n }\n\n });\n\n Ok(vanished)\n\n }\n", "file_path": "src/syncdir.rs", "rank": 58, "score": 13.758482678318913 }, { "content": " imap: &mut Imap,\n\n meta: &MessageMeta,\n\n uidres: &UidResult,\n\n ) -> Result<(), String> {\n\n // Check if anything has changed\n\n if meta.is_equal(uidres) {\n\n return Ok(());\n\n }\n\n\n\n if meta.needs_refetch(uidres) {\n\n // Pull down a whole new copy of the message.\n\n self.delete_message_from_maildir(meta.uid())?;\n\n self.cache_message_for_uid(imap, meta.uid())\n\n } else {\n\n self.log(&format!(\n\n \"Updating UID {}: {:?} -> {:?}\",\n\n uidres.uid(),\n\n meta.flags(),\n\n uidres.flags()\n\n ));\n", "file_path": "src/syncdir.rs", "rank": 59, "score": 13.689839814446536 }, { "content": " } else {\n\n self.maildir.store_new(body)\n\n }\n\n .map_err(|e| format!(\"Message store failed: {}\", e))\n\n }\n\n\n\n /// Move a message ID to the cur Maildir directory and set its flags.\n\n pub fn move_message_to_cur(&mut self, id: &str, flags: &str) -> Result<(), String> {\n\n self.maildir\n\n .move_new_to_cur_with_flags(id, flags)\n\n .map_err(|e| format!(\"Move message to cur failed for id{}: {}\", id, e))\n\n }\n\n\n\n /// Set the flags for the given message ID.\n\n pub fn set_flags_for_message(&mut self, id: &str, flags: &str) -> Result<(), String> {\n\n self.maildir\n\n .set_flags(id, flags)\n\n .map_err(|e| format!(\"Setting flags failed for id {}: {}\", id, e))\n\n }\n\n\n", "file_path": "src/maildirw.rs", "rank": 60, "score": 13.636702570198645 }, { "content": "use std::fs::File;\n\nuse std::io::Read;\n\nuse std::path::PathBuf;\n\nuse std::process::Command;\n\nuse std::vec::Vec;\n\n\n\n#[derive(Deserialize, Clone)]\n\npub struct Account {\n\n pub account: String,\n\n pub server: String,\n\n pub port: Option<u16>,\n\n pub username: String,\n\n pub maildir: String,\n\n pub password_command: Option<String>,\n\n pub password: Option<String>,\n\n pub exclude: Option<Vec<String>>,\n\n pub idle: Option<Vec<String>>,\n\n pub max_concurrency: Option<usize>,\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 61, "score": 13.565825808988526 }, { "content": "\n\n self.log(&format!(\n\n \"Synchronizing ({})\",\n\n if imap.can_qresync() { \"quick\" } else { \"slow\" }\n\n ));\n\n let res = if imap.can_qresync() {\n\n self.quick_sync_cache_from_imap(&mut imap, &mailbox)\n\n .and_then(|_| self.sync_cache_from_maildir(&mut imap))\n\n .and_then(|_| imap.logout())\n\n } else {\n\n self.slow_sync_cache_from_imap(&mut imap, &mailbox)\n\n .and_then(|_| self.sync_cache_from_maildir(&mut imap))\n\n .and_then(|_| imap.logout())\n\n };\n\n\n\n self.log(\"Done\");\n\n\n\n if let Err(e) = res {\n\n break Err(format!(\"Error syncing: {}\", e));\n\n };\n", "file_path": "src/syncdir.rs", "rank": 62, "score": 13.519045850471072 }, { "content": " fetch\n\n .body()\n\n .ok_or_else(|| \"No BODY in FETCH result\".to_string())\n\n .and_then(|body| {\n\n self.maildir\n\n .save_message(body, &maildir_flags_from_imap(fetch.flags()))\n\n })\n\n .and_then(|id| self.cache.add(&id, &fetch))\n\n }\n\n\n\n /// Delete a given UID from the Maildir and clear its entry from cache.\n\n ///\n\n /// Unconditionally deletes the cache db entry for this message after\n\n /// attempting to delete the message from the maildir. The most common\n\n /// cause of delete errors is the message already being deleted from the\n\n /// Maildir, so erroring prevents the cache db from being updated. In the\n\n /// event that deleting the message fails for some other reason, it will\n\n /// appear to be a new message in the Maildir and will be resynced on\n\n /// next sync. This might annoy the user, but errs on the side of caution\n\n /// when things go wrong.\n", "file_path": "src/syncdir.rs", "rank": 63, "score": 13.499234927873367 }, { "content": "use crate::cache::messagemeta::MessageMeta;\n\nuse rusqlite::{params, Connection};\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::path::{Path, PathBuf};\n\n\n\npub struct Db {\n\n dbpath: PathBuf,\n\n}\n\n\n\nimpl Db {\n\n fn init_db(path: &Path) -> Result<(), String> {\n\n let conn = Connection::open(path)\n\n .map_err(|e| format!(\"DB Open failed at {}: {}\", path.display(), e))?;\n\n\n\n conn.execute(\n\n \"CREATE TABLE v1 (\n\n uid INTEGER PRIMARY KEY,\n\n size INTEGER,\n\n internal_date_millis INTEGER,\n\n flags TEXT,\n", "file_path": "src/cache/db.rs", "rank": 64, "score": 13.087364427637667 }, { "content": " imap.logout().ok();\n\n\n\n // Handle if the user has specified some maximum number of threads\n\n // to run with. We have to allocate one thread for every idle\n\n // mailbox, and remaining threads do all of the sync-once mailboxes.\n\n let mut pool_size = pool_mailboxes.len();\n\n if let Some(max_threads) = config.max_concurrency {\n\n if let Some(pool) = max_threads.checked_sub(idle_mailboxes.len()) {\n\n pool_size = pool;\n\n } else {\n\n pool_size = 0;\n\n }\n\n\n\n if pool_size == 0 && !pool_mailboxes.is_empty() {\n\n println!(\"Account {}.max_concurrency ({}) is too small for the number of idle mailboxes ({}) and non-idle mailboxes.\", config.account, max_threads, idle_mailboxes.len(), );\n\n println!(\"You may see errors from the server and some mailboxes may not be synchronized.\\nTo fix this, specify a number of mailboxes to idle that is smaller that max_concurrency, or increase max_concurrency if possible.\");\n\n pool_size = 1;\n\n }\n\n }\n\n\n", "file_path": "src/main.rs", "rank": 65, "score": 12.522496091303278 }, { "content": "\n\n pub fn update(&mut self, uidres: &UidResult) -> Result<MessageMeta, String> {\n\n let uid = uidres.uid();\n\n match self.get_uid(uid) {\n\n Ok(mut meta) => {\n\n if !meta.is_equal(uidres) {\n\n meta.update(uidres);\n\n self.db.update(&meta).map(|_| meta)\n\n } else {\n\n Ok(meta)\n\n }\n\n }\n\n Err(e) => Err(e.to_string()),\n\n }\n\n }\n\n}\n", "file_path": "src/cache/mod.rs", "rank": 66, "score": 12.339701413634941 }, { "content": " /// Delete a message ID.\n\n pub fn delete_message(&self, id: &str) -> Result<(), String> {\n\n self.maildir\n\n .delete(id)\n\n .map_err(|e| format!(\"Maildir delete failed for ID {}: {}\", id, e))\n\n }\n\n\n\n /// For the given cached entries map (id -> meta), remove entries\n\n /// that have not changed, and return a vector of new ids not present\n\n /// in the cache.\n\n pub fn get_updates(\n\n &self,\n\n cache: &mut HashMap<String, MessageMeta>,\n\n ) -> Result<(Vec<String>, Vec<String>), String> {\n\n let mut new = Vec::new();\n\n let mut changed = Vec::new();\n\n for mailentry_res in self.maildir.list_new().chain(self.maildir.list_cur()) {\n\n let mailentry = mailentry_res.map_err(|e| e.to_string())?;\n\n\n\n if let Some(cache_meta) = cache.get(mailentry.id()) {\n", "file_path": "src/maildirw.rs", "rank": 67, "score": 12.248919982669292 }, { "content": " }\n\n s\n\n }\n\n}\n\n\n\nimpl SyncFlags {\n\n pub fn contains(&self, other: FlagValue) -> bool {\n\n for flag in &self.maildir {\n\n if *flag == other {\n\n return true;\n\n }\n\n }\n\n false\n\n }\n\n\n\n pub fn diff(&self, other: SyncFlags) -> SyncFlagsDiff {\n\n let mut diff = SyncFlagsDiff::new();\n\n for i in 0..self.maildir.len() {\n\n match (self.maildir[i], other.maildir[i]) {\n\n (FlagValue::NoFlag, FlagValue::NoFlag) => (),\n", "file_path": "src/cache/syncflags.rs", "rank": 68, "score": 12.204365009280226 }, { "content": " .map_err(|e| format!(\"query_map: {}\", e))?;\n\n\n\n for meta in rows.flatten() {\n\n h.insert(meta.id().to_string(), meta);\n\n }\n\n Ok(h)\n\n }\n\n\n\n pub fn get_uid(&self, uid: u32) -> anyhow::Result<MessageMeta> {\n\n let conn = Connection::open(&self.dbpath)?;\n\n\n\n let mut stmt = conn.prepare(\n\n \"SELECT uid, size, internal_date_millis, flags, id\n\n FROM v1 WHERE uid = (?)\",\n\n )?;\n\n\n\n let res = stmt.query_row(params![uid], |r| {\n\n Ok(MessageMeta::from_fields(\n\n r.get_unwrap(0),\n\n r.get_unwrap(1),\n", "file_path": "src/cache/db.rs", "rank": 69, "score": 12.193651793444479 }, { "content": " Connection::open(&self.dbpath)\n\n .and_then(|conn| conn.execute(\"DELETE from v1 WHERE uid = (?1)\", params![uid]))\n\n .map(|_| ())\n\n .map_err(|e| format!(\"DELETE FAILED {}: {}\", uid, e))\n\n }\n\n\n\n pub fn num_entries(&self) -> Result<i64, String> {\n\n let conn = Connection::open(&self.dbpath).map_err(|e| format!(\"Open DB: {}\", e))?;\n\n let mut stmt = conn\n\n .prepare(\"SELECT count(uid) from v1\")\n\n .map_err(|e| format!(\"SELECT: {}\", e))?;\n\n\n\n stmt.query_row(params![], |r| Ok(r.get_unwrap(0)))\n\n .map_err(|e| format!(\"query_row: {}\", e))\n\n }\n\n\n\n pub fn expected_entries(&self) -> usize {\n\n if let Ok(n) = self.num_entries() {\n\n n as usize\n\n } else {\n", "file_path": "src/cache/db.rs", "rank": 70, "score": 11.95990928121534 }, { "content": " (FlagValue::NoFlag, x) => diff.add.maildir[i] = x,\n\n (x, FlagValue::NoFlag) => diff.sub.maildir[i] = x,\n\n _ => (),\n\n }\n\n }\n\n diff\n\n }\n\n\n\n pub fn empty(&self) -> bool {\n\n for flag in &self.maildir {\n\n if *flag != FlagValue::NoFlag {\n\n return false;\n\n }\n\n }\n\n true\n\n }\n\n\n\n pub fn as_imap_flags(&self) -> Option<Vec<Flag>> {\n\n let mut res = Vec::<Flag>::with_capacity(self.maildir.len());\n\n for flag in &self.maildir {\n", "file_path": "src/cache/syncflags.rs", "rank": 71, "score": 11.888876017044007 }, { "content": " idle_mailboxes.into_iter().for_each(|mut sd| {\n\n threads.push(spawn(move || sd.sync()));\n\n });\n\n\n\n if !pool_mailboxes.is_empty() {\n\n if let Ok(pool) = rayon::ThreadPoolBuilder::new()\n\n .num_threads(pool_size)\n\n .build()\n\n {\n\n pool_mailboxes.into_iter().for_each(|mut sd| {\n\n pool.spawn(move || {\n\n if let Err(e) = sd.sync() {\n\n eprintln!(\"Synchronize-once for mailbox {} failed: {}\", sd.mailbox, e);\n\n }\n\n })\n\n });\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/main.rs", "rank": 72, "score": 11.689739451496695 }, { "content": " pub fn maildir_last(&self) -> i64 {\n\n self.state.maildir_last\n\n }\n\n */\n\n pub fn uid_validity(&self) -> u32 {\n\n self.state.uid_validity\n\n }\n\n /*\n\n pub fn uid_next(&self) -> u32 {\n\n self.state.uid_next\n\n }\n\n */\n\n pub fn last_seen_uid(&self) -> u32 {\n\n self.state.last_seen_uid\n\n }\n\n\n\n pub fn highest_mod_seq(&self) -> u64 {\n\n self.state.highest_mod_seq\n\n }\n\n}\n", "file_path": "src/cache/statefile.rs", "rank": 73, "score": 11.658740654193238 }, { "content": " self.cache.update(uidres).and_then(|newmeta| {\n\n if meta.needs_move_from_new_to_cur(uidres)\n\n && self.maildir.message_is_in_new(meta.id())?\n\n {\n\n self.maildir\n\n .move_message_to_cur(meta.id(), &newmeta.flags())\n\n } else {\n\n self.maildir\n\n .set_flags_for_message(newmeta.id(), &newmeta.flags())\n\n }\n\n })\n\n }\n\n }\n\n\n\n /// For the given IMAP FETCH results, update the cache. Existing messages\n\n /// are updated if needed, and new messages are downloaded.\n\n ///\n\n /// Used to process a full set of IMAP FETCH results. Since the IMAP\n\n /// server is the source of truth, anything in the given FETCH results\n\n /// must be either existing / known or new and need to be downloaded.\n", "file_path": "src/syncdir.rs", "rank": 74, "score": 11.658023720410526 }, { "content": " conn.execute(\n\n \"INSERT INTO v1 (uid, size, internal_date_millis, flags, id)\n\n VALUES (?1, ?2, ?3, ?4, ?5)\",\n\n params![\n\n meta.uid(),\n\n meta.size(),\n\n meta.internal_date_millis(),\n\n meta.flags(),\n\n meta.id()\n\n ],\n\n )\n\n })\n\n .map(|_| ())\n\n .map_err(|e| format!(\"INSERT FAILED: {}\", e))\n\n }\n\n\n\n pub fn update(&self, meta: &MessageMeta) -> Result<(), String> {\n\n Connection::open(&self.dbpath)\n\n .and_then(|conn| {\n\n conn.execute(\n", "file_path": "src/cache/db.rs", "rank": 75, "score": 11.599607818237134 }, { "content": " }\n\n\n\n pub fn get_ids(&self) -> Result<HashMap<String, MessageMeta>, String> {\n\n let conn = Connection::open(&self.dbpath).map_err(|e| format!(\"Open DB: {}\", e))?;\n\n\n\n let mut stmt = conn\n\n .prepare(\"SELECT uid, size, internal_date_millis, flags, id FROM v1\")\n\n .map_err(|e| format!(\"SELECT FAILED: {}\", e))?;\n\n\n\n let mut h = HashMap::with_capacity(self.expected_entries());\n\n let rows = stmt\n\n .query_map(params![], |r| {\n\n Ok(MessageMeta::from_fields(\n\n r.get_unwrap(0),\n\n r.get_unwrap(1),\n\n r.get_unwrap(2),\n\n r.get_unwrap(3),\n\n r.get_unwrap(4),\n\n ))\n\n })\n", "file_path": "src/cache/db.rs", "rank": 76, "score": 11.364530433111142 }, { "content": "\n\n // If we are not IDLEing, then we're done\n\n if !self.should_idle() {\n\n break Ok(());\n\n }\n\n\n\n if self.idlethread.is_none() {\n\n match self.idle() {\n\n Ok(handle) => self.idlethread = Some(handle),\n\n Err(why) => {\n\n break Err(format!(\"Error in IDLE: {}\", why));\n\n }\n\n }\n\n }\n\n\n\n if self.fsthread.is_none() {\n\n match self.fswait() {\n\n Ok(handle) => self.fsthread = Some(handle),\n\n Err(why) => {\n\n break Err(format!(\"Error in watching file system: {}\", why));\n", "file_path": "src/syncdir.rs", "rank": 77, "score": 11.3085852285671 }, { "content": " .cache\n\n .get_known_uids()?\n\n .iter()\n\n .cloned()\n\n .collect::<Vec<u32>>(),\n\n )\n\n }\n\n\n\n /// Sync the Maildir with the cache. Locally deleted messages are deleted from\n\n /// the server, local changes are pushed to the server, and new messages are\n\n /// uploaded to the server.\n\n ///\n\n /// This is the main Local -> Server routine for Maildir IDs. Maildir entries\n\n /// are compared with the cache db and any changes in the Maildir are propagated\n\n /// to the server.\n\n fn sync_cache_from_maildir(&mut self, imap: &mut Imap) -> Result<(), String> {\n\n let mut ids = self.cache.get_known_ids()?;\n\n let (new, changed) = self.maildir.get_updates(&mut ids)?;\n\n let mut refetch = HashSet::<u32>::new();\n\n\n", "file_path": "src/syncdir.rs", "rank": 78, "score": 11.183005876895283 }, { "content": " let uid = uidres.uid();\n\n if !cached_uids.remove(&uid) {\n\n self.elog(&format!(\"UID {} exists on server but not in cache\", uid));\n\n err = true;\n\n }\n\n }\n\n FetchResult::Other(f) => self.log(&format!(\"Got Other: {:?}\", f)),\n\n }\n\n }\n\n\n\n // Remove uids from cache that have been removed on the server\n\n for uid in cached_uids {\n\n if let Err(e) = self.delete_message_from_maildir(uid) {\n\n self.elog(&format!(\"Error deleting UID {}: {}\", uid, e));\n\n err = true;\n\n }\n\n }\n\n\n\n if err {\n\n Err(\"Error removing absent UIDs\".to_string())\n", "file_path": "src/syncdir.rs", "rank": 79, "score": 10.876661813656531 }, { "content": "use crate::cache::MessageMeta;\n\nuse maildir::MailEntry;\n\nuse maildir::Maildir as SubMaildir;\n\nuse std::collections::HashMap;\n\nuse std::path::PathBuf;\n\n//use std::time::SystemTime;\n\n\n\n/// A wrapper around a maildir implementation\n\npub struct Maildir {\n\n maildir: SubMaildir,\n\n}\n\n\n\n/// A struct representing a mail message in the Maildir.\n\npub struct IdResult {\n\n //id: String,\n\n flags: String,\n\n size: u64,\n\n //modified_millis: u128,\n\n path: PathBuf,\n\n}\n", "file_path": "src/maildirw.rs", "rank": 80, "score": 10.700532985073647 }, { "content": " // If the meta is different then add it to the changed list\n\n if !meta_equal(&mailentry, &cache_meta)? {\n\n changed.push(mailentry.id().to_string());\n\n }\n\n\n\n // Remove the entry from the cachemap since it is still on disk.\n\n if cache.remove(mailentry.id()).is_none() {\n\n return Err(format!(\"Cache id mismatch: {}\", mailentry.id()));\n\n }\n\n } else {\n\n new.push(mailentry.id().to_string());\n\n }\n\n }\n\n Ok((new, changed))\n\n }\n\n\n\n /// Determine if a given message ID is in the Maildir 'new' folder.\n\n pub fn message_is_in_new(&self, id: &str) -> Result<bool, String> {\n\n for mailentry_res in self.maildir.list_new() {\n\n let mailentry = mailentry_res.map_err(|e| e.to_string())?;\n", "file_path": "src/maildirw.rs", "rank": 81, "score": 10.69841085357081 }, { "content": " pub fn should_idle(&self) -> bool {\n\n self.config.is_mailbox_idled(&self.mailbox)\n\n }\n\n\n\n /// Spawn a thread on this Maildir and wait for changes. On change,\n\n /// a message is sent to the parent the main sync thread.\n\n fn fswait(&self) -> Result<JoinHandle<()>, String> {\n\n let sender = self.sender.clone();\n\n let path = self.maildir.path();\n\n let handle = spawn(move || {\n\n let (tx, rx) = channel();\n\n let mut watcher = watcher(tx, Duration::from_secs(10)).unwrap();\n\n watcher.watch(path, RecursiveMode::Recursive).unwrap();\n\n loop {\n\n match rx.recv() {\n\n Ok(event) => {\n\n match event {\n\n notify::DebouncedEvent::Write(path) if path.is_dir() => {\n\n // trigger on dir writes only, which cover everything else\n\n sender.send(SyncMessage::MaildirChanged).ok();\n", "file_path": "src/syncdir.rs", "rank": 82, "score": 10.67145681080315 }, { "content": " // If we need to update flags then send changes.\n\n let cache_flags = SyncFlags::from(cache_v.flags().as_str());\n\n let maildir_flags = SyncFlags::from(mail_v.flags());\n\n let flags_diff = cache_flags.diff(maildir_flags);\n\n if let Some(flags) = flags_diff.add.as_imap_flags() {\n\n imap.add_flags_for_uid(cache_v.uid(), &flags)?;\n\n refetch.insert(cache_v.uid());\n\n }\n\n if let Some(flags) = flags_diff.sub.as_imap_flags() {\n\n imap.remove_flags_for_uid(cache_v.uid(), &flags)?;\n\n refetch.insert(cache_v.uid());\n\n }\n\n\n\n // If we need to push a new body.\n\n if cache_v.size() as u64 != mail_v.size() {\n\n // Sometimes we see the SIZE field in a fetch response to be\n\n // different from the BODY length.\n\n // When this happens, we end up constantly replacing the\n\n // message on server because it looks like it has changed.\n\n // Since IMAP requires messages to be immutable, and Maildir\n", "file_path": "src/syncdir.rs", "rank": 83, "score": 10.556450257648025 }, { "content": " }\n\n _ => (),\n\n }\n\n }\n\n Err(e) => {\n\n sender\n\n .send(SyncMessage::MaildirError(format!(\"{:?}\", e)))\n\n .ok();\n\n }\n\n }\n\n }\n\n });\n\n Ok(handle)\n\n }\n\n\n\n /// Save the given message in the Maildir.\n\n ///\n\n /// Updates the cache db on success. On failure, then we will\n\n /// refetch on the next loop.\n\n fn save_message_in_maildir(&mut self, fetch: &Fetch) -> Result<MessageMeta, String> {\n", "file_path": "src/syncdir.rs", "rank": 84, "score": 10.383931666608072 }, { "content": " pub fn new(path: &Path) -> Result<StateFile, String> {\n\n if path.exists() {\n\n StateFile::from_file(&path)\n\n } else {\n\n StateFile::make_new(&path)\n\n }\n\n }\n\n\n\n fn make_new(path: &Path) -> Result<StateFile, String> {\n\n let blank = StateFile {\n\n path: path.to_path_buf(),\n\n state: StateFileFields {\n\n version: 1,\n\n imap_last: 0,\n\n maildir_last: 0,\n\n uid_validity: 0,\n\n uid_next: 0,\n\n last_seen_uid: 0,\n\n highest_mod_seq: 0,\n\n },\n", "file_path": "src/cache/statefile.rs", "rank": 85, "score": 10.132719118968467 }, { "content": " Flag::Draft => flags.maildir[0] = FlagValue::Draft,\n\n _ => (),\n\n }\n\n }\n\n flags\n\n }\n\n}\n\n\n\nimpl ToString for SyncFlags {\n\n fn to_string(&self) -> String {\n\n let mut s = String::with_capacity(5);\n\n for i in 0..self.maildir.len() {\n\n match self.maildir[i] {\n\n FlagValue::Draft => s.push('D'),\n\n FlagValue::Flagged => s.push('F'),\n\n FlagValue::Replied => s.push('R'),\n\n FlagValue::Seen => s.push('S'),\n\n FlagValue::Trashed => s.push('T'),\n\n _ => (),\n\n }\n", "file_path": "src/cache/syncflags.rs", "rank": 86, "score": 9.977262453526977 }, { "content": "\n\n /// Public interface for the sync engine. Runs a sync loop until it exits.\n\n /// If the sync loop exited with an error, then it will respawn after a\n\n /// short delay.\n\n pub fn sync(&mut self) -> Result<(), String> {\n\n loop {\n\n match self.do_sync() {\n\n Err(why) => {\n\n self.elog(&format!(\"Sync exited with error: {}\", why));\n\n // sleep 10 to throttle retries\n\n sleep(Duration::from_secs(10));\n\n }\n\n Ok(_) => break Ok(()),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/syncdir.rs", "rank": 87, "score": 9.775051304967572 }, { "content": "\n\nimpl IdResult {\n\n /*\n\n pub fn id(&self) -> &str {\n\n &self.id\n\n }\n\n */\n\n pub fn flags(&self) -> &str {\n\n &self.flags\n\n }\n\n pub fn size(&self) -> u64 {\n\n self.size\n\n }\n\n /*\n\n pub fn modified_millis(&self) -> u128 {\n\n self.modified_millis\n\n }\n\n */\n\n pub fn path(&self) -> &PathBuf {\n\n &self.path\n\n }\n\n}\n\n\n\n/// Determine if the given cache db entry for the message and the maildir\n\n/// entry for the message are equivalent.\n", "file_path": "src/maildirw.rs", "rank": 88, "score": 9.672127339682639 }, { "content": " r.get_unwrap(2),\n\n r.get_unwrap(3),\n\n r.get_unwrap(4),\n\n ))\n\n })?;\n\n Ok(res)\n\n }\n\n\n\n pub fn get_id(&self, id: &str) -> Result<MessageMeta, String> {\n\n let conn = Connection::open(&self.dbpath).map_err(|e| format!(\"Open DB: {}\", e))?;\n\n\n\n let mut stmt = conn\n\n .prepare(\n\n \"SELECT uid, size, internal_date_millis, flags, id\n\n FROM v1 WHERE id = (?)\",\n\n )\n\n .map_err(|e| format!(\"SELECT: {}\", e))?;\n\n\n\n stmt.query_row(params![id], |r| {\n\n Ok(MessageMeta::from_fields(\n", "file_path": "src/cache/db.rs", "rank": 89, "score": 9.182059834928173 }, { "content": "use config::Config;\n\nuse imapw::Imap;\n\nuse libc::SIGINT;\n\nuse std::sync::atomic::{AtomicBool, Ordering};\n\nuse std::thread::{sleep, spawn};\n\nuse std::time;\n\nuse syncdir::{SyncDir, SyncMessage};\n\n\n\nstatic SHUTDOWN: AtomicBool = AtomicBool::new(false);\n\n\n", "file_path": "src/main.rs", "rank": 90, "score": 9.000469323853036 }, { "content": " b'F' => flags.maildir[1] = FlagValue::Flagged,\n\n b'R' => flags.maildir[2] = FlagValue::Replied,\n\n b'S' => flags.maildir[3] = FlagValue::Seen,\n\n b'T' => flags.maildir[4] = FlagValue::Trashed,\n\n _ => (),\n\n }\n\n }\n\n flags\n\n }\n\n}\n\n\n\nimpl From<&[Flag<'_>]> for SyncFlags {\n\n fn from(imap_flags: &[Flag]) -> SyncFlags {\n\n let mut flags = SyncFlags::new();\n\n for f in imap_flags {\n\n match f {\n\n Flag::Seen => flags.maildir[3] = FlagValue::Seen,\n\n Flag::Answered => flags.maildir[2] = FlagValue::Replied,\n\n Flag::Flagged => flags.maildir[1] = FlagValue::Flagged,\n\n Flag::Deleted => flags.maildir[4] = FlagValue::Trashed,\n", "file_path": "src/cache/syncflags.rs", "rank": 91, "score": 8.86131770982739 }, { "content": " D: Deserializer<'de>,\n\n {\n\n deserializer.deserialize_str(SyncFlagsVisitor)\n\n }\n\n}\n\n\n\nimpl SyncFlags {\n\n fn new() -> SyncFlags {\n\n SyncFlags {\n\n maildir: [FlagValue::NoFlag; 5],\n\n }\n\n }\n\n}\n\n\n\nimpl From<&str> for SyncFlags {\n\n fn from(s: &str) -> SyncFlags {\n\n let mut flags = SyncFlags::new();\n\n for b in s.bytes() {\n\n match b {\n\n b'D' => flags.maildir[0] = FlagValue::Draft,\n", "file_path": "src/cache/syncflags.rs", "rank": 92, "score": 8.256710250103602 }, { "content": " match *flag {\n\n FlagValue::NoFlag => (),\n\n FlagValue::Draft => res.push(Flag::Draft),\n\n FlagValue::Flagged => res.push(Flag::Flagged),\n\n FlagValue::Replied => res.push(Flag::Answered),\n\n FlagValue::Seen => res.push(Flag::Seen),\n\n FlagValue::Trashed => res.push(Flag::Deleted),\n\n }\n\n }\n\n if !res.is_empty() {\n\n Some(res)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\npub struct SyncFlagsDiff {\n\n pub add: SyncFlags,\n\n pub sub: SyncFlags,\n", "file_path": "src/cache/syncflags.rs", "rank": 93, "score": 8.069818746329565 }, { "content": " if mailentry.id() == id {\n\n return Ok(true);\n\n }\n\n }\n\n Ok(false)\n\n }\n\n\n\n /// Fetch the Maildir meta for the given message ID.\n\n pub fn get_id(&self, id: &str) -> Result<IdResult, String> {\n\n if let Some(entry) = self.maildir.find(id) {\n\n let meta = entry.path().metadata().map_err(|e| e.to_string())?;\n\n\n\n let size = meta.len();\n\n /*\n\n let modified_millis = meta\n\n .modified()\n\n .map_err(|e| e.to_string())?\n\n .duration_since(SystemTime::UNIX_EPOCH)\n\n .map_err(|e| e.to_string())?\n\n .as_millis();\n", "file_path": "src/maildirw.rs", "rank": 94, "score": 8.04563349728866 }, { "content": " */\n\n\n\n Ok(IdResult {\n\n //id: entry.id().to_string(),\n\n flags: entry.flags().to_string(),\n\n size,\n\n //modified_millis,\n\n path: entry.path().clone(),\n\n })\n\n } else {\n\n Err(format!(\"Not found: {}\", id))\n\n }\n\n }\n\n}\n", "file_path": "src/maildirw.rs", "rank": 95, "score": 7.645042013691542 }, { "content": " .internal_date()\n\n .expect(\"No INTERNALDATE in FETCH response\");\n\n\n\n let meta = MessageMeta::new(\n\n id,\n\n size,\n\n SyncFlags::from(flags),\n\n uid,\n\n internal_date.timestamp_millis(),\n\n );\n\n\n\n self.db.add(&meta).and_then(|_| {\n\n // We only remember the last seen uid after we have saved it\n\n if uid > self.state.last_seen_uid() {\n\n self.state.set_last_seen_uid(uid).map(|_| meta)\n\n } else {\n\n Ok(meta)\n\n }\n\n })\n\n }\n", "file_path": "src/cache/mod.rs", "rank": 96, "score": 7.643375001330425 }, { "content": " /// Log a message to the console\n\n fn log(&self, msg: &str) {\n\n println!(\n\n \"{} {}/{}: {}\",\n\n Local::now().format(\"%Y-%m-%d %H:%M:%S\"),\n\n self.config.account,\n\n self.mailbox,\n\n msg\n\n );\n\n }\n\n\n\n /// Log an error message to the console\n\n fn elog(&self, msg: &str) {\n\n eprintln!(\n\n \"{} {}/{}: {}\",\n\n Local::now().format(\"%Y-%m-%d %H:%M:%S\"),\n\n self.config.account,\n\n self.mailbox,\n\n msg\n\n );\n", "file_path": "src/syncdir.rs", "rank": 97, "score": 7.6348825377221585 }, { "content": " // mail clients typically treat the messages as immutable also\n\n // we choose here to ignore when the on disk size and the cache\n\n // size from the fatch response are different, in order to avoid\n\n // looping files over and over again.\n\n /*\n\n imap.replace_uid(\n\n cache_v.uid(),\n\n &fs::read(mail_v.path()).map_err(|e| e.to_string())?,\n\n )?;\n\n self.maildir.delete_message(&id)?;\n\n self.cache.delete_uid(cache_v.uid())?;\n\n */\n\n refetch.remove(&cache_v.uid());\n\n }\n\n }\n\n\n\n // new contains maildir entries that are on the file system\n\n // but not in the cache. These need to be sent to the server.\n\n for id in new {\n\n let mail_v = self.maildir.get_id(&id)?;\n", "file_path": "src/syncdir.rs", "rank": 98, "score": 7.439407123998099 }, { "content": " id TEXT\n\n )\",\n\n params![],\n\n )\n\n .map(|_| ())\n\n .map_err(|e| format!(\"CREATE TABLE: {}\", e))\n\n }\n\n\n\n pub fn from_file(path: &Path) -> Result<Db, String> {\n\n if !path.exists() {\n\n Db::init_db(path)?;\n\n }\n\n Ok(Db {\n\n dbpath: path.to_path_buf(),\n\n })\n\n }\n\n\n\n pub fn add(&self, meta: &MessageMeta) -> Result<(), String> {\n\n Connection::open(&self.dbpath)\n\n .and_then(|conn| {\n", "file_path": "src/cache/db.rs", "rank": 99, "score": 7.085615849129409 } ]
Rust
src/drivers/keyboard.rs
arbel03/os
ba061f795cc6e492dd752344e43d4d8e4896d5f3
use drivers::utils::inb; #[derive(Copy, Clone, PartialEq, Eq)] pub enum ScanCodeType { Digit(u8), Character(char), Shift, Backspace, Enter, Space, Quote, } pub struct ScanCode { pub released: bool, pub scan_code_type: ScanCodeType, } #[derive(PartialEq)] pub enum ScanCodeError { BackspaceScancode, InvalidScancode, } impl ScanCode { pub fn new(scan_code_type: ScanCodeType) -> Self { ScanCode { released: false, scan_code_type: scan_code_type, } } pub fn released(&self) -> Self { ScanCode { released: true, scan_code_type: self.scan_code_type.clone(), } } pub fn get_char(&self) -> Result<char, ScanCodeError> { let c = match self.scan_code_type { ScanCodeType::Digit(digit) => ('0' as u8 + digit) as char, ScanCodeType::Character(character) => { let character = character.to_string(); let character = if unsafe { IS_UPPERCASE } { character.to_uppercase() } else { character }; character.as_bytes()[0] as char }, ScanCodeType::Enter => '\n', ScanCodeType::Quote => if unsafe { IS_UPPERCASE } { '\"' } else { '\'' }, ScanCodeType::Space => ' ', ScanCodeType::Backspace => return Err(ScanCodeError::BackspaceScancode), _ => return Err(ScanCodeError::InvalidScancode), }; Ok(c) } } use core::fmt; use core::fmt::Write; use alloc::string::ToString; impl fmt::Display for ScanCode { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if let Ok(ch) = self.get_char() { f.write_char(ch as char) } else { Ok(()) } } } pub struct RawScanCode(u8); impl RawScanCode { pub fn get_scancode(&self) -> Option<ScanCode> { let scancode = match self.0 { 0x02 ... 0x0A => ScanCode::new(ScanCodeType::Digit(self.0 - 0x01)), 0x0B => ScanCode::new(ScanCodeType::Digit(0)), 0x0E => ScanCode::new(ScanCodeType::Backspace), 0x10 => ScanCode::new(ScanCodeType::Character('q')), 0x11 => ScanCode::new(ScanCodeType::Character('w')), 0x12 => ScanCode::new(ScanCodeType::Character('e')), 0x13 => ScanCode::new(ScanCodeType::Character('r')), 0x14 => ScanCode::new(ScanCodeType::Character('t')), 0x15 => ScanCode::new(ScanCodeType::Character('y')), 0x16 => ScanCode::new(ScanCodeType::Character('u')), 0x17 => ScanCode::new(ScanCodeType::Character('i')), 0x18 => ScanCode::new(ScanCodeType::Character('o')), 0x19 => ScanCode::new(ScanCodeType::Character('p')), 0x1E => ScanCode::new(ScanCodeType::Character('a')), 0x1F => ScanCode::new(ScanCodeType::Character('s')), 0x20 => ScanCode::new(ScanCodeType::Character('d')), 0x21 => ScanCode::new(ScanCodeType::Character('f')), 0x22 => ScanCode::new(ScanCodeType::Character('g')), 0x23 => ScanCode::new(ScanCodeType::Character('h')), 0x24 => ScanCode::new(ScanCodeType::Character('j')), 0x25 => ScanCode::new(ScanCodeType::Character('k')), 0x26 => ScanCode::new(ScanCodeType::Character('l')), 0x28 => ScanCode::new(ScanCodeType::Quote), 0x2A => ScanCode::new(ScanCodeType::Shift), 0x2B => ScanCode::new(ScanCodeType::Character('\\')), 0x2C => ScanCode::new(ScanCodeType::Character('z')), 0x2D => ScanCode::new(ScanCodeType::Character('x')), 0x2E => ScanCode::new(ScanCodeType::Character('c')), 0x2F => ScanCode::new(ScanCodeType::Character('v')), 0x30 => ScanCode::new(ScanCodeType::Character('b')), 0x31 => ScanCode::new(ScanCodeType::Character('n')), 0x32 => ScanCode::new(ScanCodeType::Character('m')), 0x33 => ScanCode::new(ScanCodeType::Character(',')), 0x34 => ScanCode::new(ScanCodeType::Character('.')), 0x35 => ScanCode::new(ScanCodeType::Character('/')), 0x0C => ScanCode::new(ScanCodeType::Character('-')), 0x36 => ScanCode::new(ScanCodeType::Shift), 0xAA => ScanCode::new(ScanCodeType::Shift).released(), 0xB6 => ScanCode::new(ScanCodeType::Shift).released(), 0x1C => ScanCode::new(ScanCodeType::Enter), 0x39 => ScanCode::new(ScanCodeType::Space), _ => return None, }; Some(scancode) } } static mut IS_UPPERCASE: bool = false; pub fn set_uppercased(is_uppercased: bool) { unsafe { IS_UPPERCASE = is_uppercased; } } pub fn get_scancode() -> Option<ScanCode> { let scancode_value = read_scancode_value(); let raw_scancode = RawScanCode(scancode_value); raw_scancode.get_scancode() } pub fn read_scancode_value() -> u8 { unsafe { while inb(0x64) & 1 != 1 {} inb(0x60) } } pub fn getc() -> usize { loop { if let Some(c) = get_scancode() { use drivers::keyboard::ScanCodeType; if ScanCodeType::Shift == c.scan_code_type { set_uppercased(!c.released); } else { match c.get_char() { Ok(character) => return character as usize, Err(scan_code_error) => { if scan_code_error == ScanCodeError::BackspaceScancode { return 0xffffffff; } } } } } } }
use drivers::utils::inb; #[derive(Copy, Clone, PartialEq, Eq)] pub enum ScanCodeType { Digit(u8), Character(char), Shift, Backspace, Enter, Space, Quote, } pub struct ScanCode { pub released: bool, pub scan_code_type: ScanCodeType, } #[derive(PartialEq)] pub enum ScanCodeError { BackspaceScancode, InvalidScancode, } impl ScanCode { pub fn new(scan_code_type: ScanCodeType) -> Self { ScanCode { released: false, scan_code_type: scan_code_type, } } pub fn released(&self) -> Self { ScanCode { released: true, scan_code_type: self.scan_code_type.clone(), } } pub fn get_char(&self) -> Result<char, ScanCodeError> { let c = match self.scan_code_type { ScanCodeType::Digit(digit) => ('0' as u8 + digit) as char, ScanCodeType::Character(character) => { let character = character.to_string(); let character = if unsafe { IS_UPPERCASE } { character.to_uppercase() } else { character }; character.as_bytes()[0] as char }, ScanCodeType::Enter => '\n', ScanCodeType::Quote => if unsafe { IS_UPPERCASE } { '\"' } else { '\'' }, ScanCodeType::Space => ' ', ScanCodeType::Backspace => return Err(ScanCodeError::BackspaceScancode), _ => return Err(ScanCodeError::InvalidScancode), }; Ok(c) } } use core::fmt; use core::fmt::Write; use alloc::string::ToString; impl fmt::Display for ScanCode { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if let Ok(ch) = self.get_char() { f.write_char(ch as char) } else { Ok(()) } } } pub struct RawScanCode(u8); impl RawScanCode { pub fn get_scancode(&self) -> Option<ScanCode> { let scancode = match self.0 { 0x02 ... 0x0A => ScanCode::new(ScanCodeType::Digit(self.0 - 0x01)), 0x0B => ScanCode::new(ScanCodeType::Digit(0)), 0x0E => ScanCode::new(ScanCodeType::Backspace), 0x10 => ScanCode::new(ScanCodeType::Character('q')), 0x11 => ScanCode::new(ScanCodeType::Character('w')), 0x12 => ScanCode::new(ScanCodeType::Character('e')), 0x13 => ScanCode::new(ScanCodeType::Character('r')), 0x14 => ScanCode::new(ScanCodeType::Character('t')), 0x15 => ScanCode::new(ScanCodeType::Character('y')), 0x16 => ScanCode::new(ScanCodeType::Character('u')), 0x17 => ScanCode::new(ScanCodeType::Character('i')), 0x18 => ScanCode::new(ScanCodeType::Character('o')), 0x19 => ScanCode::new(ScanCodeType::Character('p')), 0x1E => ScanCode::new(ScanCodeType::Character('a')), 0x1F => ScanCode::new(ScanCodeType::Character('s')), 0x20 => ScanCode::new(ScanCodeType::Character('d')), 0x21 => ScanCode::new(ScanCodeType::Character('f')), 0x22 => ScanCode::new(ScanCodeType::Character('g')), 0x23 => ScanCode::new(ScanCodeType::Character('h')), 0x24 => ScanCode::new(ScanCodeType::Character('j')), 0x25 => ScanCode::new(ScanCodeType::Character('k')), 0x26 => ScanCode::new(ScanCodeType::Character('l')), 0x28 => ScanCode::new(ScanCodeType::Quote), 0x2A => ScanCode::new(ScanCodeType::Shift), 0x2B => ScanCode::new(ScanCodeType::Character('\\')), 0x2C => ScanCode::new(ScanCodeType::Character('z')), 0x2D => ScanCode::new(ScanCodeType::Character('x')), 0x2E => ScanCode::new(ScanCodeType::Character('c')), 0x2F => ScanCode::new(ScanCodeType::Character('v')), 0x30 => ScanCode::new(ScanCodeType::Character('b')), 0x31 => ScanCode::new(ScanCodeType::Character('n')), 0x32 => ScanCode::new(ScanCodeType::Character('m')), 0x33 => ScanCode::new(ScanCodeType::Character(',')), 0x34 => ScanCode::new(ScanCodeType::Character('.')), 0x35 => ScanCode::new(ScanCodeType::Character('/')), 0x0C => ScanCode::new(ScanCodeType::Character('-')), 0x36 => ScanCode::new(ScanCodeType::Shift), 0xAA => ScanCode::new(ScanCodeType::Shift).released(), 0xB6 => ScanCode::new(ScanCodeType::Shift).released(), 0x1C => ScanCode::new(ScanCodeType::Enter), 0x39 => ScanCode::new(ScanCodeType::Space), _ => return None, }; Some(scancode) } } static mut IS_UPPERCASE: bool = false; pub fn set_uppercased(is_uppercased: bool) { unsafe { IS_UPPERCASE = is_uppercased; } } pub fn get_scancode() -> Option<ScanCode> { let scancode_value = read_scancode_value(); let raw_scancode = RawScanCode(scancode_value); raw_scancode.get_scancode() } pub fn read_scancode_value() -> u8 { unsafe { while inb(0x64) & 1 != 1 {} inb(0x60) } } pub fn getc() -> usize { loop { if let Some(c) = get_scancode() { use drivers::keyboard::ScanCodeType; if ScanCodeType::Shift == c.scan_code_type { set_uppercased(!c.released); } else {
} } } }
match c.get_char() { Ok(character) => return character as usize, Err(scan_code_error) => { if scan_code_error == ScanCodeError::BackspaceScancode { return 0xffffffff; } } }
if_condition
[ { "content": "pub fn proc_info(info_struct_ptr: *mut u8, proc_number: usize) -> usize {\n\n #[repr(packed)]\n\n #[derive(Debug)]\n\n pub struct ProcInfo {\n\n pub process_index: u32,\n\n pub process_name_length: u32,\n\n pub process_base: u32,\n\n pub process_total_size: u32,\n\n pub arguments_count: u32,\n\n pub process_stack_size: u32,\n\n }\n\n\n\n if let Some(process) = ::task::get_process_at_index(proc_number) {\n\n let load_request = process.get_load_information().get_load_request();\n\n let proc_info = ProcInfo {\n\n process_index: proc_number as u32,\n\n process_name_length: process.executable_file.get_process_name().len() as u32,\n\n process_base: process.get_load_information().process_base as u32,\n\n process_total_size: load_request.get_total_process_size() as u32,\n\n arguments_count: load_request.arguments_count as u32,\n", "file_path": "src/syscall/task.rs", "rank": 0, "score": 185992.5803125521 }, { "content": "pub fn read(fd: usize, buffer: &mut [u8]) -> usize {\n\n use syscalls::syscall::syscall3;\n\n // SYSCALL(FS_READ, fd, ptr, size)\n\n unsafe {\n\n syscall3(0x3, fd, buffer.as_ptr() as usize, buffer.len())\n\n }\n\n}\n\n\n", "file_path": "filesystem/std/src/syscalls/fs.rs", "rank": 1, "score": 171690.89368011386 }, { "content": "pub fn getc() -> usize {\n\n use syscalls::syscall::syscall0;\n\n // SYSCALL(IO_GETC)\n\n unsafe {\n\n syscall0(0x5)\n\n }\n\n}\n\n\n", "file_path": "filesystem/std/src/syscalls/io.rs", "rank": 3, "score": 153194.37341906416 }, { "content": "pub fn read_name(parent_directory: &str, read_buffer: &mut [u8], child_node: usize) {\n\n // SYSCALL(FS_DIR_NAME, ptr, size, read_buffer_ptr, read_buffer_size, child_node)\n\n use syscalls::syscall::syscall5;\n\n\n\n unsafe {\n\n syscall5(0x08, parent_directory.as_ptr() as usize, parent_directory.len(), read_buffer.as_ptr() as usize, read_buffer.len(), child_node);\n\n }\n\n}", "file_path": "filesystem/std/src/syscalls/fs.rs", "rank": 5, "score": 145183.44935840598 }, { "content": "#[no_mangle]\n\npub fn main(argc: usize, argv: *const *const u8) {\n\n let args = &unsafe { std::args::get_args(argc, argv) };\n\n if args.len() == 1 {\n\n println!(\"Usage:\\n\\t{0} -r\\n\\t\\tPrints the filesystem recursively.\\n\\t{0} FOLDER_NAME\\n\\t\\tPrints the folder contents.\", args[0]);\n\n return;\n\n }\n\n let second_param = args[1].trim().to_string().to_lowercase();\n\n if second_param == \"-r\" {\n\n println!(\"Printing filesystem recursively.\");\n\n recursive_ls(\".\", 0);\n\n } else {\n\n let stat = std::syscalls::stat(&second_param, 0);\n\n for i in 0..stat.child_nodes_count as usize {\n\n println!(\"{}\", read_name(&second_param, i).1);\n\n }\n\n }\n\n}\n\n\n", "file_path": "filesystem/ls.rs", "rank": 6, "score": 132723.5585615057 }, { "content": "#[no_mangle]\n\npub fn main(argc: usize, argv: *const *const u8) {\n\n let args = &unsafe { std::args::get_args(argc, argv) };\n\n loop {\n\n print!(\"{} $ \", args[0]);\n\n use alloc::string::ToString;\n\n let input = io::read_string();\n\n let command: Vec<&str> = input.trim().split(' ').collect();\n\n if command.len() > 0 {\n\n match command[0] {\n\n \"echo\" => {\n\n let command = input.trim();\n\n if command.len() >= 5 {\n\n println!(\"{}\", &command[5..]);\n\n }\n\n },\n\n \"help\" => {\n\n println!(\"Available commands:\\n\\t-echo\\n\\t-help\\n\\t-printheap\\n\\t-bin/ls\\n\\t-bin/cat\\n\\t-bin/stat\\n\\t-bin/ps\");\n\n },\n\n \"printheap\" => {\n\n println!(\"Enter heap read start and read size.\");\n", "file_path": "filesystem/shell.rs", "rank": 7, "score": 132723.5585615057 }, { "content": "#[no_mangle]\n\npub fn main(argc: usize, argv: *const *const u8) {\n\n use core::str;\n\n\n\n let args = &unsafe { std::args::get_args(argc, argv) };\n\n if argc != 2 {\n\n let file_name = args[0];\n\n println!(\"Usage:\\n\\t{} <file_name>\", file_name);\n\n return;\n\n }\n\n\n\n let file_name = args[1];\n\n let fd = std::syscalls::open(file_name);\n\n if fd == 0xffffffff {\n\n println!(\"Error opening file \\\"{}\\\"\", file_name);\n\n } else if fd == 0xfffffffe {\n\n println!(\"Invalid argument - \\\"{}\\\" if a folder.\", file_name);\n\n } else {\n\n println!(\"Printing contents of \\\"{}\\\":\", file_name);\n\n let file_stat = std::syscalls::stat(file_name, 0);\n\n let mut vector = vec![0u8;file_stat.directory_size as usize];\n\n std::syscalls::read(fd, &mut vector);\n\n println!(\"{}\", unsafe { str::from_utf8_unchecked(&vector) });\n\n }\n\n}", "file_path": "filesystem/cat.rs", "rank": 8, "score": 132723.5585615057 }, { "content": "// PIC end of interrupt function\n\npub fn send_eoi(slave_irq: bool) {\n\n Pic::MASTER.send_eoi(); // send to master- always required\n\n if slave_irq {\n\n\t\tPic::SLAVE.send_eoi(); // send to slave\n\n }\n\n}", "file_path": "src/drivers/mod.rs", "rank": 10, "score": 121482.21846342646 }, { "content": "pub fn get_current_process<'a>() -> &'a mut Process {\n\n unsafe {\n\n PROCESS_LIST.as_mut().unwrap().last_mut().unwrap()\n\n }\n\n}\n\n\n", "file_path": "src/task/mod.rs", "rank": 11, "score": 115739.86849502547 }, { "content": "pub fn get_parent_process<'a>() -> Option<&'a mut Process> {\n\n let list = unsafe { PROCESS_LIST.as_mut().unwrap() };\n\n let n = list.len();\n\n if n >= 2 {\n\n Some(&mut list[n-2])\n\n } else {\n\n None\n\n }\n\n}\n\n\n\npub unsafe fn execv(file_name: &str, args: &[&str]) -> usize {\n\n set_old_process_state_wrapper();\n\n\n\n let process = match loader::create_process(file_name) {\n\n Ok(process) => process,\n\n Err(load_error) => match load_error {\n\n CreationError::ExecutableNotFound => return 0xffffffff,\n\n CreationError::InvalidElfHeader => return 0xfffffffe,\n\n }\n\n };\n", "file_path": "src/task/mod.rs", "rank": 12, "score": 110954.6749865464 }, { "content": "/// Align downwards. Returns the greatest x with alignment `align`\n\n/// so that x <= addr. The alignment must be a power of 2.\n\npub fn align_down(addr: usize, align: usize) -> usize {\n\n if align.is_power_of_two() {\n\n addr & !(align - 1)\n\n } else if align == 0 {\n\n addr\n\n } else {\n\n panic!(\"`align` must be a power of 2\");\n\n }\n\n}\n\n\n", "file_path": "bitmap_allocator/src/lib.rs", "rank": 13, "score": 104648.8258469039 }, { "content": "/// Align upwards. Returns the smallest x with alignment `align`\n\n/// so that x >= addr. The alignment must be a power of 2.\n\npub fn align_up(addr: usize, align: usize) -> usize {\n\n align_down(addr + align - 1, align)\n\n}", "file_path": "bitmap_allocator/src/lib.rs", "rank": 14, "score": 104648.8258469039 }, { "content": "#[derive(Clone, Copy)]\n\nstruct ColorCode(u8);\n\n\n\nimpl ColorCode {\n\n const fn new(foreground: Color, background: Color) -> ColorCode {\n\n ColorCode((background as u8) << 4 | (foreground as u8))\n\n }\n\n}\n\n\n", "file_path": "src/vga_buffer.rs", "rank": 15, "score": 104596.96543108983 }, { "content": "pub fn proc_memory_size() -> usize {\n\n use task::PROCESS_ALLOCATOR;\n\n unsafe {\n\n PROCESS_ALLOCATOR.get_block_size() * PROCESS_ALLOCATOR.get_block_count()\n\n }\n\n}", "file_path": "src/syscall/task.rs", "rank": 16, "score": 104164.81920612484 }, { "content": "pub fn input_number(prompt: &str) -> usize {\n\n loop {\n\n print!(\"{}: \", prompt);\n\n if let Ok(number) = io::read_string().parse() {\n\n return number;\n\n } else {\n\n println!(\"Please enter a valid number.\");\n\n }\n\n }\n\n}\n\n\n\npub unsafe fn print_heap(start: usize, size: usize) {\n\n use std::HEAP_SIZE;\n\n use std::HEAP_AREA;\n\n // use bitmap_allocator::CellState;\n\n // let allocator = &HEAP;\n\n // println!(\"Printing bitmap:\");\n\n // let bitmap_size = allocator.get_block_count();\n\n // for index in 0..bitmap_size {\n\n // let block = allocator.get_cell(index).clone();\n", "file_path": "filesystem/shell.rs", "rank": 17, "score": 100857.78219950461 }, { "content": "pub fn to_str<'a>(ptr: usize, size: usize) -> &'a str {\n\n unsafe {\n\n let slice = slice::from_raw_parts(ptr as *const u8, size);\n\n return str::from_utf8_unchecked(slice);\n\n }\n\n}\n\n\n\npub unsafe fn terminated_string<'a>(start: *const u8) -> &'a str {\n\n use core::{ str, slice, ptr };\n\n\n\n let mut length: isize = 0;\n\n loop {\n\n let current = start.offset(length);\n\n if ptr::read(current) == 0u8 {\n\n break;\n\n }\n\n length += 1;\n\n }\n\n return str::from_utf8_unchecked(slice::from_raw_parts(start, length as usize));\n\n}\n", "file_path": "src/syscall/mod.rs", "rank": 19, "score": 99975.58674695705 }, { "content": "pub fn recursive_ls(path: &str, level: usize) {\n\n let current_status = std::syscalls::stat(path, 0);\n\n for child in 0..current_status.child_nodes_count as usize {\n\n let (stat, name) = read_name(path, child);\n\n println!(\"{}{}\", \"\\t\".repeat(level) ,name);\n\n if stat.is_folder && name != \".\" && name != \"..\" {\n\n let mut current_path = if path == \".\" {\n\n String::new()\n\n } else {\n\n let mut new_path = path.to_string();\n\n new_path.push_str(\"/\");\n\n new_path\n\n };\n\n current_path.push_str(&name);\n\n recursive_ls(&current_path, level + 1);\n\n }\n\n }\n\n}\n\n\n", "file_path": "filesystem/ls.rs", "rank": 20, "score": 96732.80224856202 }, { "content": "pub fn get_proccess_area_size() -> usize {\n\n // SYSCALL(PROC_SIZE)\n\n use syscalls::syscall::syscall0;\n\n unsafe {\n\n syscall0(0x10)\n\n }\n\n}", "file_path": "filesystem/std/src/syscalls/task.rs", "rank": 21, "score": 96475.45191159127 }, { "content": "pub fn printf(string: &str) -> usize {\n\n use syscalls::syscall::syscall2;\n\n // SYSCALL(SYS_FOPEN, ptr, size)\n\n unsafe {\n\n syscall2(0x2, string.as_ptr() as usize, string.len())\n\n }\n\n}\n\n\n", "file_path": "filesystem/std/src/syscalls/io.rs", "rank": 22, "score": 95553.0223605988 }, { "content": "pub fn open(file_path: &str) -> usize {\n\n use syscalls::syscall::syscall2;\n\n // SYSCALL(FS_OPEN, ptr, size)\n\n unsafe {\n\n syscall2(0x1, file_path.as_ptr() as usize, file_path.len())\n\n }\n\n}\n\n\n", "file_path": "filesystem/std/src/syscalls/fs.rs", "rank": 23, "score": 93168.41490497104 }, { "content": "pub fn execv(file_name: &str, args: &[&str]) -> usize {\n\n unsafe {\n\n ::task::execv(file_name, args)\n\n }\n\n}\n\n\n", "file_path": "src/syscall/task.rs", "rank": 24, "score": 90560.06182730185 }, { "content": "pub fn get_process_at_index<'a>(index: usize) -> Option<&'a Process> {\n\n unsafe {\n\n let processes = PROCESS_LIST.as_ref().unwrap();\n\n if index >= processes.len() {\n\n None\n\n } else {\n\n Some(&processes[index])\n\n }\n\n } \n\n}\n\n\n", "file_path": "src/task/mod.rs", "rank": 25, "score": 89452.16981127503 }, { "content": "pub fn init() {\n\n unsafe {\n\n FILESYSTEM = Some(ManagedFilesystem::new(fat32::Fat32::new(&Ata::PRIMARY), &Ata::PRIMARY));\n\n };\n\n}", "file_path": "src/filesystem/mod.rs", "rank": 26, "score": 86791.56701789767 }, { "content": "pub fn init() {\n\n unsafe {\n\n // Exceptions\n\n IDT.exceptions.double_fault = define_interrupt_with_error_code!(double_fault, 0);\n\n IDT.exceptions.general_protection_fault = define_interrupt_with_error_code!(general_protection_fault, 0);\n\n IDT.exceptions.breakpoint = define_interrupt!(breakpoint_exception, 0);\n\n\n\n // Setup syscalls\n\n syscall::init();\n\n IDT.interrupts[0x42] = define_interrupt!(process_unwind_handler, 3);\n\n IDT.load();\n\n }\n\n}\n\n\n\n#[naked]\n\npub extern \"C\" fn process_unwind_handler(_stack_frame: &idt::ExceptionStackFrame) {\n\n unsafe {\n\n ::task::unwind_process();\n\n ::core::intrinsics::unreachable();\n\n }\n\n}\n\n\n", "file_path": "src/interrupts/mod.rs", "rank": 27, "score": 86791.56701789767 }, { "content": "pub fn configure() {\n\n // Initializing master PIC as master\n\n Pic::MASTER.init(0x20, true);\n\n Pic::SLAVE.init(0x28, false);\n\n\n\n Pic::MASTER.disable_irq(0); // Disable timer for now\n\n Pic::MASTER.disable_irq(1); // Keyboard\n\n Pic::MASTER.disable_irq(2); // Slave PIC\n\n}\n\n\n", "file_path": "src/drivers/mod.rs", "rank": 28, "score": 86791.56701789767 }, { "content": "pub fn disable() {\n\n unsafe {\n\n asm!(\"cli\");\n\n }\n\n}", "file_path": "src/interrupts/mod.rs", "rank": 29, "score": 86791.56701789767 }, { "content": "pub fn enable() {\n\n unsafe {\n\n asm!(\"sti\");\n\n }\n\n}\n\n\n", "file_path": "src/interrupts/mod.rs", "rank": 30, "score": 86791.56701789767 }, { "content": "pub fn execv(path_name: &str, args: &[&str]) -> usize {\n\n // SYSCALL(PROC_EXECV, path_ptr, path_len, args_ptr, args_len)\n\n use syscalls::syscall::syscall4;\n\n use alloc::{ Vec, String };\n\n let mut arguments: Vec<String> = Vec::new();\n\n let mut ptr_list: Vec<*const u8> = Vec::with_capacity(args.len());\n\n for arg in args {\n\n use alloc::string::ToString;\n\n let mut string_arg = arg.to_string();\n\n string_arg.push('\\x00');\n\n arguments.push(string_arg);\n\n ptr_list.push(arguments[arguments.len()-1].as_ptr() as *const u8);\n\n }\n\n\n\n unsafe {\n\n syscall4(0x07, path_name.as_ptr() as usize, path_name.len(), ptr_list.as_ptr() as usize, args.len())\n\n }\n\n}\n\n\n", "file_path": "filesystem/std/src/syscalls/task.rs", "rank": 31, "score": 86241.96377997586 }, { "content": "pub fn proc_info(proc_index: usize) -> Option<ProcInfo> {\n\n // SYSCALL(PROC_INFO, proc_info_ptr, proc_index)\n\n use syscalls::syscall::syscall2;\n\n\n\n let proc_info = ProcInfo::default();\n\n\n\n let result = unsafe { syscall2(0x09, &proc_info as *const ProcInfo as usize, proc_index) };\n\n if result == 0xffffffff {\n\n None\n\n } else {\n\n Some(proc_info)\n\n }\n\n}\n\n\n", "file_path": "filesystem/std/src/syscalls/task.rs", "rank": 32, "score": 85512.34686436312 }, { "content": "#[allow(dead_code)]\n\n#[derive(Clone, Copy)]\n\nstruct ScreenChar {\n\n ascii_character: u8,\n\n color_code: ColorCode,\n\n}\n\n\n\npub const BUFFER_HEIGHT: usize = 25;\n\npub const BUFFER_WIDTH: usize = 80;\n\n\n", "file_path": "src/vga_buffer.rs", "rank": 33, "score": 85292.20186608986 }, { "content": "pub fn read_name(parent_directory: &str, child_node: usize) -> (Stat, String) {\n\n let child_status = std::syscalls::stat(parent_directory, child_node+1);\n\n let mut name = vec![0u8;child_status.directory_name_length as usize];\n\n unsafe {\n\n std::syscalls::read_name(parent_directory, &mut name, child_node);\n\n let string = ::core::str::from_utf8_unchecked(&name);\n\n (child_status, string.to_string())\n\n }\n\n}", "file_path": "filesystem/ls.rs", "rank": 34, "score": 85134.07176394903 }, { "content": "pub fn clear_screen() {\n\n unsafe {\n\n for row in 0..BUFFER_HEIGHT {\n\n WRITER.clear_row(row);\n\n }\n\n WRITER.column_position = 0;\n\n WRITER.row_position = 0;\n\n }\n\n}", "file_path": "src/vga_buffer.rs", "rank": 35, "score": 84494.15003764498 }, { "content": "pub fn stat(parent_directory: &str, child_node: usize) -> Stat {\n\n // SYSCALL(FS_STAT, ptr, size, stat_structure_ptr, child_node)\n\n use syscalls::syscall::syscall4;\n\n\n\n let mut stat = Stat {\n\n directory_name_length: 0,\n\n directory_size: 0,\n\n is_folder: false,\n\n child_nodes_count: 0,\n\n };\n\n\n\n unsafe {\n\n syscall4(0x04, parent_directory.as_ptr() as usize, parent_directory.len(), &stat as *const Stat as usize, child_node);\n\n }\n\n stat\n\n}\n\n\n", "file_path": "filesystem/std/src/syscalls/fs.rs", "rank": 36, "score": 84280.89547810536 }, { "content": "pub fn delc() {\n\n use syscalls::syscall::syscall0;\n\n // SYSCALL(IO_DELC)\n\n unsafe {\n\n syscall0(0x6);\n\n }\n\n}", "file_path": "filesystem/std/src/syscalls/io.rs", "rank": 37, "score": 82375.40878989396 }, { "content": "pub fn pop_process() -> Process {\n\n unsafe {\n\n PROCESS_LIST.as_mut().unwrap().pop().unwrap()\n\n }\n\n}\n\n\n", "file_path": "src/task/mod.rs", "rank": 38, "score": 80365.85976600312 }, { "content": "pub fn get_free_areas(current_memory_map: &[MemoryArea; 10], occupied_area_iter: &mut ::core::slice::Iter<MemoryArea>) -> [MemoryArea; 10] {\n\n if let Some(current_occupied_area) = occupied_area_iter.next() {\n\n let mut new_memory_map = [MemoryArea::EMPTY; 10];\n\n let mut insertion_index = 0;\n\n for area in current_memory_map {\n\n let result = MemoryArea::from(area.clone()).subtract(current_occupied_area);\n\n if let Some(before) = result.0 {\n\n new_memory_map[insertion_index] = before;\n\n insertion_index += 1;\n\n }\n\n\n\n if let Some(after) = result.1 {\n\n new_memory_map[insertion_index] = after;\n\n insertion_index += 1;\n\n }\n\n\n\n if result.0.is_none() && result.1.is_none() {\n\n new_memory_map[insertion_index] = area.clone();\n\n insertion_index += 1;\n\n }\n\n // println!(\"{:#x} -> {:#x}\", (area.base) as u32, (area.base + area.size) as u32);\n\n }\n\n return get_free_areas(&new_memory_map, occupied_area_iter);\n\n } else {\n\n return current_memory_map.clone();\n\n }\n\n}\n\n\n\nuse alloc::Vec;\n", "file_path": "src/memory/mod.rs", "rank": 39, "score": 79197.75082323028 }, { "content": "pub fn read_string() -> String {\n\n use syscalls::{ getc,delc };\n\n use core::fmt::Write;\n\n\n\n let mut input_string = String::new();\n\n loop {\n\n let result = getc();\n\n let character = result as u8 as char;\n\n if character == '7' {\n\n continue;\n\n }\n\n if character == '\\n' {\n\n print!(\"\\n\");\n\n return input_string;\n\n } else if result == 0xffffffff {\n\n if input_string.len() != 0 {\n\n delc();\n\n input_string.pop();\n\n }\n\n } else {\n\n print!(\"{}\", character);\n\n input_string.write_char(character);\n\n }\n\n }\n\n return input_string;\n\n}", "file_path": "filesystem/std/src/io.rs", "rank": 40, "score": 78405.7303861521 }, { "content": "pub fn add_process(process: Process) {\n\n unsafe {\n\n PROCESS_LIST.as_mut().unwrap().push(process);\n\n }\n\n}\n\n\n", "file_path": "src/task/mod.rs", "rank": 41, "score": 76648.432075372 }, { "content": "pub fn init(free_memory_areas: Vec<MemoryArea>) {\n\n // Set up an allocator for the process area\n\n if free_memory_areas.len() > 0 {\n\n let process_area = free_memory_areas[0];\n\n println!(\"Allocating processes from {:#x} to {:#x}.\", process_area.base, process_area.base+process_area.size);\n\n unsafe {\n\n PROCESS_ALLOCATOR.set_bitmap_start(process_area.base);\n\n PROCESS_ALLOCATOR.set_block_size(process_area.size/100);\n\n PROCESS_ALLOCATOR.set_size(process_area.size);\n\n PROCESS_ALLOCATOR.init();\n\n PROCESS_LIST = Some(Vec::new());\n\n }\n\n } else {\n\n panic!(\"No space for process allocator.\");\n\n }\n\n}\n\n\n", "file_path": "src/task/mod.rs", "rank": 42, "score": 70009.8489696071 }, { "content": "pub fn init(bootloader_info: &BootloaderInfo) -> Vec<MemoryArea> {\n\n use HEAP;\n\n \n\n println!(\"Kernel loaded from {:#x} to {:#x}\", bootloader_info.kernel_start, bootloader_info.kernel_end);\n\n\n\n let kernel_area = MemoryArea::new(bootloader_info.kernel_start as usize, bootloader_info.kernel_end as usize - bootloader_info.kernel_start as usize);\n\n let memory_map = MemoryMap::new(bootloader_info);\n\n \n\n let mut free_memory_areas = [MemoryArea::EMPTY; 10];\n\n let mut current_index = 0;\n\n for entry in memory_map.memory_map.iter() {\n\n if entry.get_region_type() as u32 == MemoryAreaType::Free as u32 {\n\n free_memory_areas[current_index] = MemoryArea::from(entry.clone());\n\n current_index += 1;\n\n }\n\n }\n\n\n\n let occupied_areas = [kernel_area];\n\n let free_memory_areas = get_free_areas(&free_memory_areas, &mut occupied_areas.iter());\n\n\n", "file_path": "src/memory/mod.rs", "rank": 43, "score": 67157.56419900525 }, { "content": "fn run_exec(path_name: &str, args: &[&str]) -> usize {\n\n std::syscalls::execv(path_name, args)\n\n}\n\n\n", "file_path": "filesystem/shell.rs", "rank": 44, "score": 59070.57714594816 }, { "content": "#[allow(dead_code)]\n\n#[repr(u8)]\n\nenum Flags {\n\n Present = 0b10000000,\n\n DPL0 = 0b00000000,\n\n DPL1 = 0b00100000,\n\n DPL2 = 0b01000000,\n\n DPL3 = 0b01100000,\n\n Storage = 0b00010000,\n\n GateTask32 = 0x5,\n\n GateInterrupt16 = 0x6,\n\n GateTrap16 = 0x7,\n\n GateInterrupt32 = 0xE,\n\n GateTrap32 = 0xF,\n\n}\n\n\n\n#[repr(C, packed)]\n\n#[derive(Copy, Clone, Debug)]\n\npub struct IdtEntry {\n\n base_low: u16, // Lower address of ISR\n\n selector: u16,\n\n zero: u8,\n", "file_path": "src/interrupts/idt.rs", "rank": 45, "score": 57177.7776985463 }, { "content": "struct Buffer {\n\n chars: [[ScreenChar; BUFFER_WIDTH]; BUFFER_HEIGHT],\n\n}\n\n\n\npub struct Writer {\n\n column_position: usize,\n\n row_position: usize,\n\n color_code: ColorCode,\n\n}\n\n\n\nimpl Writer {\n\n pub fn write_byte(&mut self, byte: u8) {\n\n use core::fmt::Write;\n\n match byte {\n\n b'\\t' => {\n\n self.write_str(\" \");\n\n },\n\n b'\\n' => {\n\n self.new_line(); \n\n \n", "file_path": "src/vga_buffer.rs", "rank": 46, "score": 57002.102938829295 }, { "content": "#[derive(Copy, Clone)]\n\n#[repr(u16)]\n\n#[allow(dead_code)]\n\nenum RegisterType {\n\n ErrorInformation = 1,\n\n SectorCount = 2,\n\n LbaLow = 3,\n\n LbaMid = 4,\n\n LbaHigh = 5,\n\n Drive = 6,\n\n Command = 7,\n\n Status,\n\n}\n\n\n\nimpl Ata {\n\n pub const PRIMARY: Ata = Ata::new(PortRange::new(0x1F0, 0x1F7), 0x3F6);\n\n\n\n pub const fn new(control_ports: PortRange, status_port: u16) -> Self {\n\n Ata {\n\n control_ports: control_ports,\n\n status_port: status_port,\n\n }\n\n }\n", "file_path": "src/drivers/ata.rs", "rank": 47, "score": 55876.504082910455 }, { "content": "pub trait Gdt {\n\n fn init(&mut self);\n\n fn set_tss(&mut self, tss: &TaskStateSegment);\n\n fn set_ldt(&mut self, ldt: &SegmentDescriptorTable);\n\n fn get_selector(&self, segment_type: DescriptorType, privilege_level: usize) -> u16;\n\n fn set_descriptor(&mut self, segment_type: DescriptorType, descriptor: SegmentDescriptor);\n\n unsafe fn load(&self);\n\n}\n\n\n\n// Null\n\n// Code Segment- PL0 \n\n// Data Segment- PL0\n\n// TSS\n\n// LDT\n\n// Code Segment- PL3\n\n// Data Segment- PL3\n\nimpl Gdt for SegmentDescriptorTable {\n\n fn init(&mut self) {\n\n self.init_with_length(9);\n\n }\n", "file_path": "src/memory/gdt.rs", "rank": 48, "score": 43395.783508948836 }, { "content": "pub trait Filesystem {\n\n type EntryType: File;\n\n\n\n fn get_root_directory(&self) -> Self::EntryType;\n\n fn get_child_directories(&self, drive: &Disk, directory: &Self::EntryType) -> Vec<Self::EntryType>;\n\n fn get_directory(&self, drive: &Disk, directory_path: &str) -> Option<Self::EntryType>;\n\n fn get_file(&self, drive: &Disk, file_path: &str) -> Result<Self::EntryType, OpenError>;\n\n fn read_file(&self, drive: &Disk, file_pointer: &FilePointer<Self::EntryType>, buffer: &mut [u8]) -> Option<usize>;\n\n} \n\n\n\n// Fat Filesystem of the main disk.\n\npub static mut FILESYSTEM: Option<ManagedFilesystem<fat32::Fat32>> = None;\n\n\n", "file_path": "src/filesystem/mod.rs", "rank": 49, "score": 43395.783508948836 }, { "content": "// Have Ata implement this\n\npub trait Disk {\n\n unsafe fn read(&self, block: u64, buffer: &mut [u8]) -> Result<u8, &str>;\n\n unsafe fn write_at(&self, block: u64, buffer: &[u8]) -> Result<u8, &str>;\n\n}", "file_path": "src/filesystem/disk.rs", "rank": 50, "score": 43395.783508948836 }, { "content": "pub trait File {\n\n fn get_name(&self) -> String;\n\n fn get_size(&self) -> usize; \n\n}\n\n\n\npub struct FilePointer<T: File> {\n\n current: usize,\n\n file: T,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl <T: File> FilePointer<T> {\n\n pub fn new(current: usize, file: T) -> Self {\n\n FilePointer {\n\n current: current,\n\n file: file,\n\n }\n\n }\n\n\n\n pub fn get_current(&self) -> usize {\n", "file_path": "src/filesystem/descriptor.rs", "rank": 51, "score": 43395.783508948836 }, { "content": "pub trait Encodable<T> {\n\n fn encode(&self) -> T;\n\n}\n\n\n\npub struct DescriptorTable<T: Default> {\n\n table: Option<Box<[T]>>\n\n}\n\n\n\nimpl <T: Default> DescriptorTable<T> {\n\n pub const fn new() -> Self {\n\n DescriptorTable { \n\n table: None \n\n }\n\n }\n\n\n\n pub fn insert<S: Encodable<T>>(&mut self, index: usize, element: S) {\n\n if index > self.table.as_ref().unwrap().len() {\n\n panic!(\"Index out of table range.\");\n\n }\n\n self.table.as_mut().unwrap()[index] = element.encode();\n", "file_path": "src/dtables.rs", "rank": 52, "score": 42391.00899700342 }, { "content": "#[test]\n\nfn test_multiple_allocation() {\n\n use alloc::allocator::Alloc;\n\n let mut heap = get_allocator();\n\n print_bitmap(&heap);\n\n let size = size_of::<usize>()*6;\n\n let mut addresses: Vec<usize> = Vec::new();\n\n let mut layouts: Vec<Layout> = Vec::new();\n\n for _ in 0..10 {\n\n let layout = Layout::from_size_align(size, align_of::<usize>()).unwrap();\n\n let addr = unsafe { Alloc::alloc(&mut heap, layout.clone()) };\n\n assert!(addr.is_ok());\n\n let addr = addr.unwrap() as usize;\n\n println!(\"allocated at {}, layout: {:?}\", addr, layout.clone());\n\n addresses.push(addr);\n\n layouts.push(layout);\n\n print_bitmap(&heap);\n\n }\n\n\n\n for i in 0..addresses.len() {\n\n unsafe { Alloc::dealloc(&mut heap, addresses[i] as *mut u8, layouts[i].clone()) };\n", "file_path": "bitmap_allocator/src/test.rs", "rank": 53, "score": 42347.86101183553 }, { "content": "#[test]\n\nfn test_single_allocation() {\n\n use alloc::allocator::Alloc;\n\n let mut heap = get_allocator();\n\n let size = size_of::<usize>();\n\n let layout = Layout::from_size_align(size, align_of::<usize>());\n\n let addr = unsafe { Alloc::alloc(&mut heap, layout.clone().unwrap()) };\n\n assert!(addr.is_ok());\n\n let addr = addr.unwrap() as usize;\n\n println!(\"\");\n\n println!(\"allocating layout: {:?}\", layout.clone().unwrap());\n\n println!(\"allocated at: {}\", addr);\n\n print_bitmap(&heap);\n\n println!(\"deallocating at: {}\", addr);\n\n unsafe { Alloc::dealloc(&mut heap, addr as *mut u8, layout.clone().unwrap()) };\n\n print_bitmap(&heap);\n\n assert!(addr == align_up(heap.get_data_start(), align_of::<usize>()));\n\n}\n\n\n", "file_path": "bitmap_allocator/src/test.rs", "rank": 54, "score": 42347.86101183553 }, { "content": "fn get_allocator() -> BitmapAllocator {\n\n const HEAP_SIZE: usize = 1000;\n\n let heap_space = Box::into_raw(Box::new([0u8; HEAP_SIZE]));\n\n let mut allocator = BitmapAllocator::new(heap_space as usize, HEAP_SIZE);\n\n allocator.init();\n\n return allocator;\n\n}\n\n\n", "file_path": "bitmap_allocator/src/test.rs", "rank": 55, "score": 40207.63970502147 }, { "content": "fn print_bitmap(allocator: &BitmapAllocator) {\n\n println!(\"Printing bitmap:\");\n\n let bitmap_size = allocator.block_count;\n\n for index in 0..bitmap_size {\n\n let block = allocator.get_cell(index).clone();\n\n let block_string = match block {\n\n CellState::Free => \"_\",\n\n CellState::Boundary => \"*\",\n\n CellState::Allocated => \">\",\n\n };\n\n print!(\"{} \", block_string);\n\n if (index+1) % 10 == 0 {\n\n print!(\"\\n\");\n\n }\n\n }\n\n print!(\"\\n\");\n\n}\n\n\n", "file_path": "bitmap_allocator/src/test.rs", "rank": 56, "score": 38293.52232041911 }, { "content": "use filesystem::FILESYSTEM;\n\nuse filesystem::File;\n\n\n\npub unsafe fn open(file_name: &str) -> usize {\n\n match FILESYSTEM.as_mut().unwrap().open_file(file_name) {\n\n Ok(file_descriptor) => file_descriptor,\n\n Err(open_error) => open_error as usize,\n\n }\n\n}\n\n\n\npub unsafe fn seek(fd: usize, new_current: usize) -> usize {\n\n FILESYSTEM.as_mut().unwrap().seek(fd, new_current);\n\n 1\n\n}\n\n\n\npub unsafe fn stat(directory_path: &str, stat_ptr: *mut u8, child_node: usize) -> usize {\n\n use core::ptr;\n\n use filesystem::File;\n\n let filesystem = FILESYSTEM.as_mut().unwrap();\n\n\n", "file_path": "src/syscall/fs.rs", "rank": 60, "score": 21.13814767578298 }, { "content": "\n\n unsafe fn poll<F>(&self, register: RegisterType, condition: F) -> u8 \n\n where F: Fn(u8) -> bool {\n\n \n\n let mut reg_value: u8;\n\n loop {\n\n reg_value = self.read_register(register);\n\n if condition(reg_value) {\n\n return reg_value;\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Disk for Ata {\n\n unsafe fn read(&self, block: u64, buffer: &mut [u8]) -> Result<u8, &str> {\n\n // Transform buffer into byte array\n\n // let new_len = buffer.len() * size_of::<T>() / size_of::<u8>();\n\n // let buffer = slice::from_raw_parts_mut(buffer.as_ptr() as *mut u8, new_len);\n\n\n", "file_path": "src/drivers/ata.rs", "rank": 62, "score": 20.699166204125312 }, { "content": "pub static mut HEAP_AREA: [u8;HEAP_SIZE] = [0u8;HEAP_SIZE];\n\n\n\n#[global_allocator]\n\nstatic mut HEAP: BitmapAllocator = BitmapAllocator::new(0, HEAP_SIZE, ::core::mem::size_of::<usize>());\n\n\n\n#[no_mangle]\n\n#[start]\n\npub unsafe extern \"C\" fn _start(argc: isize, argv: *const *const u8) -> isize {\n\n HEAP.set_bitmap_start(&HEAP_AREA as *const u8 as usize);\n\n HEAP.init();\n\n\n\n extern \"Rust\" {\n\n fn main(argc: usize, args: *const *const u8);\n\n }\n\n\n\n main(argc as usize, argv);\n\n exit();\n\n 0\n\n}\n\n\n", "file_path": "filesystem/std/src/lib.rs", "rank": 63, "score": 18.618012819530527 }, { "content": "\n\n fn get_data_start(&self) -> usize {\n\n return self.block_count * mem::size_of::<CellState>() + self.bitmap_start as usize;\n\n }\n\n\n\n pub fn get_block_count(&self) -> usize {\n\n return self.block_count;\n\n }\n\n\n\n pub fn get_block_size(&self) -> usize {\n\n return self.block_size;\n\n }\n\n}\n\n\n\nunsafe impl<'a> Alloc for &'a BitmapAllocator {\n\n unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {\n\n let requested_size = layout.size() + layout.align();\n\n\n\n let mut cell_index: Option<usize> = None;\n\n let mut continuous_count = 0;\n", "file_path": "bitmap_allocator/src/lib.rs", "rank": 64, "score": 18.087118034198784 }, { "content": "use bitmap_allocator::BitmapAllocator;\n\nuse spin::Mutex;\n\nuse alloc::allocator::{ Layout, Alloc, AllocErr };\n\nuse core::ops::Deref;\n\n\n\npub struct Heap {\n\n heap: Mutex<BitmapAllocator>,\n\n}\n\n\n\nimpl Heap {\n\n pub const fn new(allocator: BitmapAllocator) -> Self {\n\n Heap {\n\n heap: Mutex::new(allocator),\n\n }\n\n }\n\n}\n\n\n\nunsafe impl <'a> Alloc for &'a Heap {\n\n unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {\n\n (&*self.heap.lock()).alloc(layout)\n", "file_path": "src/memory/heap.rs", "rank": 65, "score": 18.078086885256774 }, { "content": " pub base: usize,\n\n pub size: usize,\n\n}\n\n\n\nimpl MemoryArea {\n\n pub const EMPTY: MemoryArea = MemoryArea {\n\n base: 0,\n\n size: 0,\n\n };\n\n\n\n pub fn new(base: usize, size: usize) -> Self {\n\n MemoryArea {\n\n base: base,\n\n size: size,\n\n }\n\n }\n\n\n\n pub fn subtract(&self, other: &MemoryArea) -> (Option<MemoryArea>, Option<MemoryArea>) {\n\n let mut before = None;\n\n let mut after = None;\n", "file_path": "src/memory/memory_map.rs", "rank": 67, "score": 17.595836189317133 }, { "content": "use super::utils::outb;\n\n\n\npub struct Cursor;\n\n\n\nimpl Cursor {\n\n pub fn update_location(&self, row: usize, column: usize) {\n\n use vga_buffer::BUFFER_WIDTH;\n\n let pos = row * BUFFER_WIDTH + column;\n\n\n\n unsafe {\n\n outb(0x3D4, 0x0F);\n\n outb(0x3D5, (pos & 0xFF) as u8);\n\n outb(0x3D4, 0x0E);\n\n outb(0x3D5, ((pos >> 8) & 0xFF) as u8);\n\n }\n\n }\n\n}", "file_path": "src/drivers/cursor.rs", "rank": 68, "score": 17.56923544170402 }, { "content": "pub unsafe fn terminated_string<'a>(start: *const u8) -> &'a str {\n\n use core::{ str, slice, ptr };\n\n\n\n let mut length: isize = 0;\n\n loop {\n\n let current = start.offset(length);\n\n // println!(\"{:?} - {}\", current, ptr::read(current));\n\n if ptr::read(current) == 0u8 {\n\n break;\n\n }\n\n length += 1;\n\n }\n\n return str::from_utf8_unchecked(slice::from_raw_parts(start, length as usize));\n\n}\n\n\n\nuse alloc::Vec;\n\npub unsafe fn get_args<'a>(argc: usize, argv: *const *const u8) -> Vec<&'a str> {\n\n use core::slice;\n\n let str_pointer_slice = slice::from_raw_parts(argv, argc);\n\n let mut str_array: Vec<&str> = Vec::new();\n\n for str_pointer in str_pointer_slice.iter() {\n\n let string = terminated_string(str_pointer.clone());\n\n // println!(\"{} at {:?}\", string, str_pointer);\n\n str_array.push(string);\n\n }\n\n return str_array;\n\n}", "file_path": "filesystem/std/src/args.rs", "rank": 69, "score": 17.031221608142356 }, { "content": "use super::PROCESS_ALLOCATOR;\n\nuse super::elf::*;\n\nuse core::slice;\n\nuse alloc::Vec;\n\nuse alloc::allocator::{ Layout, Alloc };\n\nuse memory::segmentation::SegmentDescriptor;\n\nuse super::process::Process;\n\n\n\n#[derive(Debug)]\n\npub struct LoadInformation {\n\n pub process_base: *const u8,\n\n pub stack_pointer: *mut u8,\n\n pub argument_pointers_start: *const *const u8,\n\n pub arguments_count: usize,\n\n pub ldt_entries: Vec<SegmentDescriptor>,\n\n load_request: LoadRequest,\n\n}\n\n\n\nimpl LoadInformation {\n\n pub fn translate_virtual_to_physical_address(&self, address: *const u8) -> *const u8 {\n", "file_path": "src/task/loader.rs", "rank": 70, "score": 15.805906043909715 }, { "content": " : \"={eax}\"(a)\n\n : \"{eax}\"(a)\n\n : \"memory\"\n\n : \"intel\", \"volatile\");\n\n\n\n a\n\n}\n\n\n\npub unsafe fn syscall1(mut a: usize, b: usize) -> usize {\n\n asm!(\"int 0x80\"\n\n : \"={eax}\"(a)\n\n : \"{eax}\"(a), \"{ebx}\"(b)\n\n : \"memory\"\n\n : \"intel\", \"volatile\");\n\n\n\n a\n\n}\n\n\n\n// Clobbers all registers - special for clone\n\npub unsafe fn syscall1_clobber(mut a: usize, b: usize) -> usize {\n", "file_path": "filesystem/std/src/syscalls/syscall.rs", "rank": 71, "score": 15.795818718736323 }, { "content": " }\n\n\n\n pub unsafe fn read_elf_header(fd: usize) -> ElfHeader {\n\n use syscall::{ read, seek };\n\n use core::slice::from_raw_parts_mut;\n\n\n\n let mut header = ElfHeader::default();\n\n let read_buff = from_raw_parts_mut(&mut header as *mut ElfHeader as *mut u8, 52);\n\n seek(fd, 0);\n\n read(fd, read_buff);\n\n return header;\n\n }\n\n\n\n pub unsafe fn read_program_header_entries(file_descriptor: usize, header: &ElfHeader) -> Vec<ProgramHeaderEntry> {\n\n use syscall::{ read, seek };\n\n use core::slice::from_raw_parts_mut;\n\n\n\n let ph_entries = vec![ProgramHeaderEntry::empty(); header.phnum as usize];\n\n seek(file_descriptor, header.phoff as usize);\n\n \n\n let buff_slice = from_raw_parts_mut(ph_entries.as_ptr() as *mut u8, (header.phentsize*header.phnum) as usize);\n\n read(file_descriptor, buff_slice);\n\n\n\n return ph_entries;\n\n }\n\n}", "file_path": "src/task/elf.rs", "rank": 72, "score": 15.671816560130678 }, { "content": "pub mod process;\n\nmod loader;\n\nmod elf;\n\n\n\nuse BitmapAllocator;\n\nuse self::process::*;\n\nuse self::loader::*;\n\nuse memory::MemoryArea;\n\nuse alloc::Vec;\n\n\n\npub static mut PROCESS_ALLOCATOR: BitmapAllocator = BitmapAllocator::new(0x0, 0x0, 0x0);\n\npub static mut PROCESS_LIST: Option<Vec<Process>> = None;\n\n\n", "file_path": "src/task/mod.rs", "rank": 73, "score": 15.30592874951709 }, { "content": "#[derive(PartialEq, Debug, Copy, Clone)]\n\npub enum FatEntry {\n\n Node(Cluster),\n\n End,\n\n BadBlock,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Copy, Clone)]\n\npub struct Cluster(pub usize);\n\n\n\npub struct ClusterChain<'a> {\n\n current_entry: FatEntry,\n\n fat: &'a Fat32,\n\n drive: &'a Disk,\n\n}\n\n\n\nimpl <'a> ClusterChain<'a> {\n\n pub const fn new(cluster: Cluster, fat: &'a Fat32, drive: &'a Disk) -> Self {\n\n ClusterChain {\n\n current_entry: FatEntry::Node(cluster),\n", "file_path": "src/filesystem/fat32/table.rs", "rank": 74, "score": 15.084054384428942 }, { "content": " pub fn is_folder(&self) -> bool {\n\n self.attributes as u8 & FileAttributes::Directory as u8 == FileAttributes::Directory as u8\n\n }\n\n\n\n pub unsafe fn get_long_name(&self) -> String {\n\n use core::slice;\n\n // Slice of 32 bytes\n\n let bytes = slice::from_raw_parts(self as *const FatDirectory as *const u8, 32);\n\n let name_first = slice::from_raw_parts(&bytes[1] as *const u8 as *const u16, 5);\n\n let name_middle = slice::from_raw_parts(&bytes[14] as *const u8 as *const u16, 6);\n\n let name_final = slice::from_raw_parts(&bytes[28] as *const u8 as *const u16, 2);\n\n\n\n let mut buff = vec![0u16; 13];\n\n buff[..5].clone_from_slice(name_first);\n\n buff[5..11].clone_from_slice(name_middle);\n\n buff[11..].clone_from_slice(name_final);\n\n\n\n let mut last_index = buff.len();\n\n for (index, b) in buff.iter().enumerate() {\n\n if *b == 0xffff || *b == 0 {\n\n last_index = index;\n\n break;\n\n }\n\n }\n\n\n\n String::from_utf16_lossy(&buff[..last_index])\n\n }\n\n}\n", "file_path": "src/filesystem/fat32/directory.rs", "rank": 75, "score": 14.938097636791472 }, { "content": "}\n\n\n\n// Reading contents of file to buffer\n\npub unsafe fn read(fd: usize, read_buffer: &mut [u8]) -> usize {\n\n FILESYSTEM.as_mut().unwrap().read_file(fd, read_buffer)\n\n}\n\n\n\npub unsafe fn read_dir_name(parent_dir_name: &str, name_buffer: &mut [u8], child_node: usize) -> usize {\n\n let filesystem = FILESYSTEM.as_mut().unwrap();\n\n let parent = if parent_dir_name == \".\" {\n\n filesystem.get_root_directory()\n\n } else if let Some(directory) = filesystem.get_directory(parent_dir_name) {\n\n directory\n\n } else {\n\n return 0xffffffff;\n\n };\n\n let dirs = FILESYSTEM.as_mut().unwrap().get_child_directories(&parent);\n\n let name = dirs[child_node].get_name();\n\n let name_bytes = name.as_bytes();\n\n &name_buffer.clone_from_slice(name_bytes);\n\n 0\n\n}", "file_path": "src/syscall/fs.rs", "rank": 76, "score": 14.758947616844905 }, { "content": "pub mod heap;\n\npub mod gdt;\n\npub mod segmentation;\n\npub mod utils;\n\nmod memory_map;\n\n\n\npub use self::memory_map::*;\n\nuse BootloaderInfo;\n\nuse dtables;\n\n\n\npub static mut GDT: gdt::SegmentDescriptorTable = dtables::DescriptorTable::new();\n\n\n\npub unsafe fn setup_descriptors(_bootloader_info: &BootloaderInfo) {\n\n use self::gdt::{ DescriptorType, Gdt };\n\n use self::segmentation::{ SegmentDescriptor, Flags, AccessFlags };\n\n GDT.init();\n\n \n\n // Dividing limit by 4K since granularity flag is on\n\n let flags = Flags::Size as u8; // | Flags::Granularity as u8;\n\n let limit = 0x13e9a3;\n", "file_path": "src/memory/mod.rs", "rank": 77, "score": 14.422238724416161 }, { "content": "}\n\n\n\nimpl <T: File> FileDescriptor<T> {\n\n pub fn new(id: usize, pointer: FilePointer<T>) -> Self {\n\n FileDescriptor {\n\n id: id, \n\n pointer: pointer\n\n }\n\n }\n\n pub fn get_id(&self) -> usize {\n\n self.id\n\n }\n\n\n\n pub fn get_pointer(&self) -> &FilePointer<T> {\n\n &self.pointer\n\n }\n\n\n\n pub fn get_pointer_mut(&mut self) -> &mut FilePointer<T> {\n\n &mut self.pointer\n\n }\n\n}", "file_path": "src/filesystem/descriptor.rs", "rank": 78, "score": 14.250688846211675 }, { "content": "\n\n#[allow(unused_variables)]\n\npub unsafe fn syscall(a: usize, b: usize, c: usize, d: usize, e: usize, f: usize) -> usize {\n\n let current_process = ::task::get_current_process();\n\n match a {\n\n FS_OPEN => { \n\n let ptr = current_process.get_load_information().translate_virtual_to_physical_address(b as *const u8);\n\n open(to_str(ptr as usize, c)) \n\n },\n\n FS_PRINT => {\n\n let ptr = current_process.get_load_information().translate_virtual_to_physical_address(b as *const u8);\n\n let string = to_str(ptr as usize, c);\n\n print!(\"{}\", string);\n\n 0\n\n },\n\n FS_READ => {\n\n let ptr = current_process.get_load_information().translate_virtual_to_physical_address(c as *const u8);\n\n let slice = slice::from_raw_parts_mut(ptr as *mut u8, d);\n\n read(b, slice)\n\n },\n", "file_path": "src/syscall/mod.rs", "rank": 79, "score": 14.158116342135251 }, { "content": "use syscall;\n\n\n\nextern {\n\n fn syscall_handler();\n\n}\n\n\n\npub unsafe fn init() {\n\n use interrupts::{ idt, IDT };\n\n // Set handler for interrupt 64,\n\n // Syscall Interrupt[0x80] - (Exceptions[32d] + Hardware Interrupts[32d]) = 0x40\n\n IDT.interrupts[0x40] = idt::IdtEntry::new(syscall_handler as u32, 3);\n\n}\n\n\n\n#[derive(Debug)]\n\n#[repr(packed, C)]\n\npub struct SyscallStack {\n\n pub edi: usize,\n\n pub esi: usize,\n\n pub edx: usize,\n\n pub ecx: usize,\n", "file_path": "src/interrupts/syscall.rs", "rank": 80, "score": 13.905313639629304 }, { "content": " }\n\n\n\n fn clear_row(&mut self, row: usize) {\n\n let blank = ScreenChar {\n\n ascii_character: b' ',\n\n color_code: self.color_code,\n\n };\n\n for col in 0..BUFFER_WIDTH {\n\n self.buffer().chars[row][col] = blank;\n\n }\n\n }\n\n\n\n fn update_cursor(&self) {\n\n ::drivers::cursor::Cursor.update_location(self.row_position, self.column_position);\n\n }\n\n\n\n pub fn delete_char(&mut self) {\n\n let blank = ScreenChar {\n\n ascii_character: b' ',\n\n color_code: self.color_code,\n", "file_path": "src/vga_buffer.rs", "rank": 81, "score": 13.821608223107413 }, { "content": " unsafe { &mut *(0xb8000 as *mut Buffer) }\n\n }\n\n\n\n fn new_line(&mut self) {\n\n if self.row_position == BUFFER_HEIGHT-1 {\n\n for row in 1..BUFFER_HEIGHT {\n\n for col in 0..BUFFER_WIDTH {\n\n let buffer = self.buffer();\n\n let character = buffer.chars[row][col];\n\n buffer.chars[row-1][col] = character;\n\n }\n\n }\n\n self.clear_row(BUFFER_HEIGHT-1);\n\n }\n\n\n\n use core::cmp::min;\n\n self.row_position = min(self.row_position+1, BUFFER_HEIGHT-1);\n\n self.column_position = 0;\n\n\n\n self.update_cursor();\n", "file_path": "src/vga_buffer.rs", "rank": 82, "score": 13.416160274682952 }, { "content": "pub struct PortRange {\n\n start: u16,\n\n end: u16,\n\n}\n\n\n\nimpl PortRange {\n\n pub const fn new(start: u16, end: u16) -> Self {\n\n PortRange { start: start, end: end }\n\n }\n\n\n\n pub fn get(&self, index: u16) -> u16 {\n\n if self.end-self.start < index {\n\n panic!(\"Port out of range.\");\n\n }\n\n self.start + index\n\n }\n\n}\n\n\n\npub unsafe fn inb(port: u16) -> u8 {\n\n let result: u8;\n", "file_path": "src/drivers/utils.rs", "rank": 83, "score": 13.348468642802482 }, { "content": " pub unsafe fn new(disk: &Disk) -> Self {\n\n let mut x: [u8;512] = [0u8;512];\n\n disk.read(0, &mut x).expect(\"Error reading EBPB from disk.\"); // Read the first sector into x\n\n let ebpb = (*(x.as_ptr() as *const Ebpb)).clone();\n\n Fat32 {\n\n ebpb: ebpb,\n\n }\n\n }\n\n\n\n fn get_first_data_sector(&self) -> u64 {\n\n self.ebpb.bpb.reserved_sectors_count as u64 + (self.ebpb.bpb.table_count as u32 * self.ebpb.sectors_per_fat) as u64\n\n }\n\n\n\n fn get_bytes_in_cluster(&self) -> usize {\n\n //self.ebpb.bpb.sectors_per_cluster as u32 * self.ebpb.bpb.bytes_per_sector as u32\n\n self.ebpb.bpb.sectors_per_cluster as usize * self.ebpb.bpb.bytes_per_sector as usize\n\n }\n\n\n\n fn first_sector_of_cluster(&self, cluster: usize) -> u64 {\n\n ((cluster-2) * (self.ebpb.bpb.sectors_per_cluster as usize)) as u64 + self.get_first_data_sector()\n", "file_path": "src/filesystem/fat32/mod.rs", "rank": 84, "score": 13.324648196641245 }, { "content": " }\n\n }\n\n\n\n pub fn get_short_name(&self) -> String {\n\n use alloc::string::ToString;\n\n String::from_utf8(self.name.to_vec()).expect(\"Invalid UTF-8.\").trim().to_string()\n\n }\n\n\n\n pub fn get_cluster(&self) -> u32 {\n\n (self.first_cluster_high as u32) << 16 | self.first_cluster_low as u32\n\n }\n\n\n\n // pub fn get_size(&self) -> usize {\n\n // self.file_size as usize\n\n // }\n\n\n\n pub fn is_lfn(&self) -> bool {\n\n self.attributes as u8 == FileAttributes::LongName as u8\n\n }\n\n\n", "file_path": "src/filesystem/fat32/directory.rs", "rank": 85, "score": 13.306974216717975 }, { "content": " return self.name.to_string();\n\n }\n\n\n\n fn get_size(&self) -> usize {\n\n self.fat_directory.file_size as usize\n\n }\n\n}\n\n\n\n#[repr(packed, C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct FatDirectory {\n\n pub name: [u8; 11],\n\n attributes: u8,\n\n flags_nt: u8,\n\n creation_time_precise: u8,\n\n creation_time: u16,\n\n creation_date: u16,\n\n last_accessed: u16,\n\n first_cluster_high: u16,\n\n last_modified_time: u16,\n", "file_path": "src/filesystem/fat32/directory.rs", "rank": 86, "score": 13.268830475765432 }, { "content": " process_base: process_base as *const u8,\n\n stack_pointer: stack_pointer,\n\n argument_pointers_start: pointers_start as *const *const u8,\n\n arguments_count: args.len(),\n\n ldt_entries: ldt_entries,\n\n load_request: load_request,\n\n })\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct LoadRequest {\n\n pub process_area_size: usize,\n\n pub stack_area_size: usize,\n\n pub arguments_count: usize,\n\n pub arguments_area_size: usize,\n\n}\n\n\n\nimpl LoadRequest {\n\n pub fn get_total_process_size(&self) -> usize {\n\n use core::mem;\n", "file_path": "src/task/loader.rs", "rank": 87, "score": 13.245842347870882 }, { "content": " }\n\n\n\n fn read_directories_from_cluster(&self, drive: &Disk, cluster: Cluster, directories: &mut Vec<Directory>) {\n\n let mut temp_name: Option<String> = None;\n\n let mut buffer = vec![0u8; self.get_bytes_in_cluster() as usize];\n\n\n\n let sectors_read = unsafe { drive.read(self.first_sector_of_cluster(cluster.0), &mut buffer) }.expect(\"Error reading from disk.\") as usize;\n\n let directories_slice = unsafe { slice::from_raw_parts(buffer.as_ptr() as *const FatDirectory, (sectors_read * self.ebpb.bpb.bytes_per_sector as usize / 32) as usize) };\n\n\n\n for directory in directories_slice {\n\n // If the first byte of the directory entry is 0, there are no more directories.\n\n // If the first byte of the directory entry is 0xE5, the directory is not used.\n\n if directory.name[0] == 0 {\n\n break;\n\n } else if directory.name[0] == 0xE5 {\n\n continue;\n\n }\n\n\n\n if directory.is_lfn() {\n\n let long_file_name = unsafe { directory.get_long_name() };\n", "file_path": "src/filesystem/fat32/mod.rs", "rank": 88, "score": 13.180818183219419 }, { "content": "}\n\n\n\npub(in super) unsafe fn load_process(process: &Process, args: &[&str], load_request: LoadRequest) -> Result<LoadInformation, LoadError> { \n\n use syscall::{ read, seek };\n\n let process_size_total = load_request.get_total_process_size();\n\n \n\n // Allocating process\n\n let layout = Layout::from_size_align_unchecked(process_size_total, 1);\n\n let process_base: *mut u8;\n\n match (&PROCESS_ALLOCATOR).alloc(layout) {\n\n Ok(ptr) => process_base = ptr,\n\n Err(error) => {\n\n use alloc::allocator::AllocErr;\n\n match error {\n\n AllocErr::Exhausted { request } => return Err(LoadError::NoMemory),\n\n _ => panic!(\"Process allocator error.\"),\n\n };\n\n }\n\n };\n\n\n", "file_path": "src/task/loader.rs", "rank": 89, "score": 13.011176961827472 }, { "content": " asm!(\"int 0x80\"\n\n : \"={eax}\"(a)\n\n : \"{eax}\"(a), \"{ebx}\"(b)\n\n : \"memory\", \"ebx\", \"ecx\", \"edx\", \"esi\", \"edi\"\n\n : \"intel\", \"volatile\");\n\n\n\n a\n\n}\n\n\n\npub unsafe fn syscall2(mut a: usize, b: usize, c: usize) -> usize {\n\n asm!(\"int 0x80\"\n\n : \"={eax}\"(a)\n\n : \"{eax}\"(a), \"{ebx}\"(b), \"{ecx}\"(c)\n\n : \"memory\"\n\n : \"intel\", \"volatile\");\n\n\n\n a\n\n}\n\n\n\npub unsafe fn syscall3(mut a: usize, b: usize, c: usize, d: usize) -> usize {\n", "file_path": "filesystem/std/src/syscalls/syscall.rs", "rank": 90, "score": 12.939587250335581 }, { "content": "#![feature(alloc)]\n\n#![feature(start)]\n\n#![no_main]\n\n#![no_std]\n\n\n\n#[macro_use]\n\nextern crate std;\n\n\n\nuse std::io;\n\n\n\n#[no_mangle]\n\npub unsafe fn main(_argc: usize, _argv: *const *const u8) {\n\n let mut occupied_size = 0;\n\n for i in 0..100 {\n\n if let Some(proc_info) = std::syscalls::proc_info(i) {\n\n println!(\"Process number {}\", i);\n\n println!(\"\\tProcess start: {:#x}\", proc_info.process_base);\n\n println!(\"\\tProcess end: {:#x}\", proc_info.process_total_size);\n\n occupied_size += proc_info.process_total_size as usize;\n\n } else {\n", "file_path": "filesystem/ps.rs", "rank": 91, "score": 12.816492646615782 }, { "content": "\n\nunsafe fn read_args<'a>(args: &[*const u8]) -> Vec<&'a str> {\n\n let mut arguments: Vec<&str> = Vec::with_capacity(args.len());\n\n for ptr in args.iter().cloned() {\n\n arguments.push(terminated_string(ptr));\n\n }\n\n return arguments;\n\n}\n\n\n\nconst FS_OPEN: usize = 0x01;\n\nconst FS_PRINT: usize = 0x02;\n\nconst FS_READ: usize = 0x03;\n\nconst FS_STAT: usize = 0x04;\n\nconst IO_GETC: usize = 0x05;\n\nconst IO_DELC: usize = 0x06;\n\nconst PROC_EXECV: usize = 0x07;\n\nconst FS_DIR_NAME: usize = 0x08;\n\nconst PROC_INFO: usize = 0x09;\n\nconst PROC_SIZE: usize = 0x10;\n\nconst UNDEFINED_SYSCALL: usize = 0xff;\n", "file_path": "src/syscall/mod.rs", "rank": 92, "score": 12.773843748997953 }, { "content": " block_count: usize,\n\n block_size: usize,\n\n}\n\n\n\nimpl BitmapAllocator {\n\n // total_size = block_count * (cell_size + block_size)\n\n pub const fn new(start: usize, size: usize, block_size: usize) -> Self {\n\n BitmapAllocator {\n\n bitmap_start: start,\n\n block_count: size / (mem::size_of::<CellState>() + block_size),\n\n block_size: block_size,\n\n }\n\n }\n\n\n\n pub fn set_bitmap_start(&mut self, bitmap_start: usize) {\n\n self.bitmap_start = bitmap_start;\n\n }\n\n\n\n pub fn set_block_size(&mut self, block_size: usize) {\n\n self.block_size = block_size;\n", "file_path": "bitmap_allocator/src/lib.rs", "rank": 93, "score": 12.544840458481666 }, { "content": "use BootloaderInfo;\n\nuse core::slice;\n\nuse alloc::Vec;\n\nuse core::fmt;\n\n\n\n#[allow(dead_code)]\n\n#[repr(u32)]\n\n#[derive(Debug)]\n\npub enum MemoryAreaType {\n\n Free = 1,\n\n Reserved = 2,\n\n AcpiReclaimable = 3,\n\n AcpiNonVolatile = 4,\n\n Bad = 5,\n\n}\n\n\n\nimpl From<u32> for MemoryAreaType {\n\n fn from(x: u32) -> Self {\n\n match x {\n\n 1 => MemoryAreaType::Free,\n", "file_path": "src/memory/memory_map.rs", "rank": 94, "score": 12.372596647469729 }, { "content": "#[repr(u8)]\n\n#[derive(PartialEq, Clone, Copy)]\n\npub enum CellState { \n\n Free,\n\n Boundary,\n\n Allocated,\n\n}", "file_path": "bitmap_allocator/src/cell.rs", "rank": 95, "score": 12.259420295443514 }, { "content": " self.process_area_size + self.stack_area_size + self.arguments_area_size + self.arguments_count * mem::size_of::<*const u8>()\n\n }\n\n\n\n pub fn get_arguments_start(&self) -> *mut u8 {\n\n return (self.process_area_size + self.stack_area_size) as *mut u8;\n\n }\n\n\n\n pub fn get_pointer_array_start(&self) -> *mut *const u8 {\n\n return (self.process_area_size + self.stack_area_size + self.arguments_area_size) as *mut *const u8;\n\n }\n\n\n\n pub fn get_stack_pointer(&self) -> *mut u8 {\n\n (self.process_area_size + self.stack_area_size) as *mut u8\n\n }\n\n\n\n pub fn get_argument_count(&self) -> usize {\n\n self.arguments_count\n\n }\n\n}\n\n\n", "file_path": "src/task/loader.rs", "rank": 96, "score": 12.225553107242813 }, { "content": "use super::{ Disk, Fat32 };\n\n\n\n#[repr(packed, C)]\n\n#[derive(Debug, Clone, Copy)]\n\npub struct Bpb {\n\n pub skip_code: [u8; 3],\n\n pub oem_identifier: [u8;8],\n\n pub bytes_per_sector: u16,\n\n pub sectors_per_cluster: u8,\n\n pub reserved_sectors_count: u16,\n\n pub table_count: u8,\n\n pub root_entry_count: u16,\n\n pub total_sectors: u16,\n\n pub media_descriptor_type: u8,\n\n pub sectors_per_fat: u16,\n\n pub sectors_per_track: u16,\n\n pub head_size_count: u16,\n\n pub hidden_sectors_count: u32,\n\n pub total_sectors_large: u32,\n\n}\n", "file_path": "src/filesystem/fat32/table.rs", "rank": 97, "score": 12.167230739151329 }, { "content": " asm!(\"int 0x80\"\n\n : \"={eax}\"(a)\n\n : \"{eax}\"(a), \"{ebx}\"(b), \"{ecx}\"(c), \"{edx}\"(d)\n\n : \"memory\"\n\n : \"intel\", \"volatile\");\n\n\n\n a\n\n}\n\n\n\npub unsafe fn syscall4(mut a: usize, b: usize, c: usize, d: usize, e: usize) -> usize {\n\n asm!(\"int 0x80\"\n\n : \"={eax}\"(a)\n\n : \"{eax}\"(a), \"{ebx}\"(b), \"{ecx}\"(c), \"{edx}\"(d), \"{esi}\"(e)\n\n : \"memory\"\n\n : \"intel\", \"volatile\");\n\n\n\n a\n\n}\n\n\n\npub unsafe fn syscall5(mut a: usize, b: usize, c: usize, d: usize, e: usize, f: usize) -> usize {\n\n asm!(\"int 0x80\"\n\n : \"={eax}\"(a)\n\n : \"{eax}\"(a), \"{ebx}\"(b), \"{ecx}\"(c), \"{edx}\"(d), \"{esi}\"(e), \"{edi}\"(f)\n\n : \"memory\"\n\n : \"intel\", \"volatile\");\n\n a\n\n}", "file_path": "filesystem/std/src/syscalls/syscall.rs", "rank": 98, "score": 12.124277807410678 }, { "content": " };\n\n\n\n // Returning value\n\n if table_value >= 0x0FFFFFF8 {\n\n return FatEntry::End;\n\n } else if table_value == 0x0FFFFFF7 {\n\n return FatEntry::BadBlock;\n\n } else {\n\n // FatEntry pointing to the next index in the table\n\n return FatEntry::Node(Cluster(table_value as usize));\n\n }\n\n }\n\n}\n\n\n\nimpl <'a> Iterator for ClusterChain <'a> {\n\n type Item = Cluster;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if self.current_entry == FatEntry::End || self.current_entry == FatEntry::BadBlock {\n\n return None;\n", "file_path": "src/filesystem/fat32/table.rs", "rank": 99, "score": 12.110305578316165 } ]
Rust
benches/hash.rs
flier/rust-t1ha
163460ad90b424e75a5480cfe3c94edafc4883ef
#![allow(deprecated)] #[macro_use] extern crate lazy_static; #[macro_use] extern crate criterion; use std::collections::hash_map::DefaultHasher; use std::hash::{Hasher, BuildHasher}; use std::hash::SipHasher; use std::io::BufReader; use std::mem; use std::slice; use criterion::{black_box, Criterion, ParameterizedBenchmark, Throughput}; use ahash::ABuildHasher; use farmhash::{hash32_with_seed as farmhash32, hash64_with_seed as farmhash64}; use fnv::FnvHasher; use fxhash::{hash32 as fxhash32, hash64 as fxhash64}; use meowhash::MeowHasher; use metrohash::{MetroHash128, MetroHash64}; use murmur3::{murmur3_32, murmur3_x64_128, murmur3_x86_128}; use rustc_hash::FxHasher; use seahash::hash_seeded as seahash64; use t1ha::{t1ha0_32, t1ha1, t1ha2_atonce, t1ha2_atonce128}; use twox_hash::{XxHash as XxHash64, XxHash32}; use xxhash2::{hash32 as xxhash32, hash64 as xxhash64}; #[cfg(target_feature = "aes")] use t1ha::t1ha0_ia32aes_noavx; #[cfg(not(target_feature = "aes"))] fn t1ha0_ia32aes_noavx(_data: &[u8], _seed: u64) -> u64 { 0 } #[cfg(target_feature = "avx2")] use t1ha::t1ha0_ia32aes_avx2; #[cfg(not(target_feature = "avx2"))] fn t1ha0_ia32aes_avx2(_data: &[u8], _seed: u64) -> u64 { 0 } #[cfg(target_feature = "avx")] use t1ha::t1ha0_ia32aes_avx; #[cfg(not(target_feature = "avx"))] fn t1ha0_ia32aes_avx(_data: &[u8], _seed: u64) -> u64 { 0 } const KB: usize = 1024; const SEED: u64 = 0x0123456789ABCDEF; const PARAMS: [usize; 7] = [7, 8, 32, 256, KB, 4 * KB, 16 * KB]; lazy_static! { static ref DATA: Vec<u8> = (0..16 * KB).map(|b| b as u8).collect::<Vec<_>>(); } fn bench_memory(c: &mut Criterion) { c.bench( "memory", ParameterizedBenchmark::new( "sum", move |b, &&size| { let s = unsafe { slice::from_raw_parts(DATA.as_ptr() as *mut u32, size / mem::size_of::<u32>()) }; b.iter(|| { black_box(s.iter().fold(0u64, |acc, &x| acc + x as u64)); }) }, &PARAMS, ) .throughput(|&&size| Throughput::Bytes(size as u32)), ); } fn bench_hash32(c: &mut Criterion) { c.bench( "hash32", ParameterizedBenchmark::new( "t1ha0_32", move |b, &&size| { b.iter(|| t1ha0_32(&DATA[..size], SEED)); }, &PARAMS, ) .with_function("murmur3_32", move |b, &&size| { b.iter(|| { let mut r = BufReader::new(&DATA[..size]); murmur3_32(&mut r, SEED as u32) }); }) .with_function("farmhash32", move |b, &&size| { b.iter(|| farmhash32(&DATA[..size], SEED as u32)); }) .with_function("xxhash32", move |b, &&size| { b.iter(|| xxhash32(&DATA[..size], SEED as u32)); }) .with_function("twox_hash::XxHash32", move |b, &&size| { b.iter(|| { let mut h = XxHash32::with_seed(SEED as u32); h.write(&DATA[..size]); h.finish() }); }) .with_function("fxhash32", move |b, &&size| { b.iter(|| fxhash32(&DATA[..size])); }) .throughput(|&&size| Throughput::Bytes(size as u32)), ); } fn bench_hash64(c: &mut Criterion) { let mut bench = ParameterizedBenchmark::new( "t1ha1", move |b, &&size| { b.iter(|| t1ha1(&DATA[..size], SEED)); }, &PARAMS, ) .with_function("t1ha2_atonce", move |b, &&size| { b.iter(|| t1ha2_atonce(&DATA[..size], SEED)); }); if cfg!(target_feature = "aes") { bench = bench.with_function("t1ha0_ia32aes_noavx", move |b, &&size| { b.iter(|| t1ha0_ia32aes_noavx(&DATA[..size], SEED)); }); } if cfg!(target_feature = "avx") { bench = bench.with_function("t1ha0_ia32aes_avx", move |b, &&size| { b.iter(|| t1ha0_ia32aes_avx(&DATA[..size], SEED)); }); } if cfg!(target_feature = "avx2") { bench = bench.with_function("t1ha0_ia32aes_avx2", move |b, &&size| { b.iter(|| t1ha0_ia32aes_avx2(&DATA[..size], SEED)); }); } c.bench( "hash64", bench .with_function("hash_map::DefaultHasher", move |b, &&size| { b.iter(|| { let mut h = DefaultHasher::new(); h.write(&DATA[..size]); h.finish() }); }) .with_function("siphash", move |b, &&size| { b.iter(|| { let mut h = SipHasher::new_with_keys(SEED, SEED); h.write(&DATA[..size]); h.finish() }); }) .with_function("metrohash64", move |b, &&size| { b.iter(|| { let mut h = MetroHash64::with_seed(SEED); h.write(&DATA[..size]); h.finish() }); }) .with_function("farmhash64", move |b, &&size| { b.iter(|| farmhash64(&DATA[..size], SEED)); }) .with_function("fnv64", move |b, &&size| { b.iter(|| { let mut h = FnvHasher::with_key(SEED); h.write(&DATA[..size]); h.finish() }); }) .with_function("xxhash64", move |b, &&size| { b.iter(|| xxhash64(&DATA[..size], SEED)); }) .with_function("twox_hash::XxHash", move |b, &&size| { b.iter(|| { let mut h = XxHash64::with_seed(SEED); h.write(&DATA[..size]); h.finish() }); }) .with_function("seahash", move |b, &&size| { b.iter(|| seahash64(&DATA[..size], SEED, SEED, SEED, SEED)); }) .with_function("fxhash64", move |b, &&size| { b.iter(|| fxhash64(&DATA[..size])); }) .with_function("ahash", move |b, &&size| { let builder = ABuildHasher::new(); b.iter(|| { let mut h = builder.build_hasher(); h.write(&DATA[..size]); h.finish() }); }) .with_function("rustc_hash::FxHasher", move |b, &&size| { b.iter(|| { let mut h = FxHasher::default(); h.write(&DATA[..size]); h.finish() }); }) .throughput(|&&size| Throughput::Bytes(size as u32)), ); } fn bench_hash128(c: &mut Criterion) { let mut bench = ParameterizedBenchmark::new( "t1ha2_atonce128", move |b, &&size| { b.iter(|| t1ha2_atonce128(&DATA[..size], SEED)); }, &PARAMS, ) .with_function("metrohash128", move |b, &&size| { b.iter(|| { let mut h = MetroHash128::with_seed(SEED); h.write(&DATA[..size]); h.finish128() }); }); if cfg!(target_arch = "x86_64") { bench = bench.with_function("murmur3_x64_128", move |b, &&size| { b.iter(|| { let mut r = BufReader::new(&DATA[..size]); let mut out = [0; 16]; murmur3_x64_128(&mut r, SEED as u32, &mut out); }); }); } if cfg!(target_arch = "x86") { bench = bench.with_function("murmur3_x86_128", move |b, &&size| { b.iter(|| { let mut r = BufReader::new(&DATA[..size]); let mut out = [0; 16]; murmur3_x86_128(&mut r, SEED as u32, &mut out); }); }); } if cfg!(target_feature = "aes") { bench = bench.with_function("meowhash128", move |b, &&size| { b.iter(|| MeowHasher::digest_with_seed(SEED as u128, &DATA[..size])); }); } c.bench( "hash128", bench.throughput(|&&size| Throughput::Bytes(size as u32)), ); } criterion_group!( benches, bench_memory, bench_hash32, bench_hash64, bench_hash128 ); criterion_main!(benches);
#![allow(deprecated)] #[macro_use] extern crate lazy_static; #[macro_use] extern crate criterion; use std::collections::hash_map::DefaultHasher; use std::hash::{Hasher, BuildHasher}; use std::hash::SipHasher; use std::io::BufReader; use std::mem; use std::slice; use criterion::{black_box, Criterion, ParameterizedBenchmark, Throughput}; use ahash::ABuildHasher; use farmhash::{hash32_with_seed as farmhash32, hash64_with_seed as farmhash64}; use fnv::FnvHasher; use fxhash::{hash32 as fxhash32, hash64 as fxhash64}; use meowhash::MeowHasher; use metrohash::{MetroHash128, MetroHash64}; use murmur3::{murmur3_32, murmur3_x64_128, murmur3_x86_128}; use rustc_hash::FxHasher; use seahash::hash_seeded as seahash64; use t1ha::{t1ha0_32, t1ha1, t1ha2_atonce, t1ha2_atonce128}; use twox_hash::{XxHash as XxHash64, XxHash32}; use xxhash2::{hash32 as xxhash32, hash64 as xxhash64}; #[cfg(target_feature = "aes")] use t1ha::t1ha0_ia32aes_noavx; #[cfg(not(target_feature = "aes"))] fn t1ha0_ia32aes_noavx(_data: &[u8], _seed: u64) -> u64 { 0 } #[cfg(target_feature = "avx2")] use t1ha::t1ha0_ia32aes_avx2; #[cfg(not(target_feature = "avx2"))] fn t1ha0_ia32aes_avx2(_data: &[u8], _seed: u64) -> u64 { 0 } #[cfg(target_feature = "avx")] use t1ha::t1ha0_ia32aes_avx; #[cfg(not(target_feature = "avx"))] fn t1ha0_ia32aes_avx(_data: &[u8], _seed: u64) -> u64 { 0 } const KB: usize = 1024; const SEED: u64 = 0x0123456789ABCDEF; const PARAMS: [usize; 7] = [7, 8, 32, 256, KB, 4 * KB, 16 * KB]; lazy_static! { static ref DATA: Vec<u8> = (0..16 * KB).map(|b| b as u8).collect::<Vec<_>>(); }
fn bench_hash32(c: &mut Criterion) { c.bench( "hash32", ParameterizedBenchmark::new( "t1ha0_32", move |b, &&size| { b.iter(|| t1ha0_32(&DATA[..size], SEED)); }, &PARAMS, ) .with_function("murmur3_32", move |b, &&size| { b.iter(|| { let mut r = BufReader::new(&DATA[..size]); murmur3_32(&mut r, SEED as u32) }); }) .with_function("farmhash32", move |b, &&size| { b.iter(|| farmhash32(&DATA[..size], SEED as u32)); }) .with_function("xxhash32", move |b, &&size| { b.iter(|| xxhash32(&DATA[..size], SEED as u32)); }) .with_function("twox_hash::XxHash32", move |b, &&size| { b.iter(|| { let mut h = XxHash32::with_seed(SEED as u32); h.write(&DATA[..size]); h.finish() }); }) .with_function("fxhash32", move |b, &&size| { b.iter(|| fxhash32(&DATA[..size])); }) .throughput(|&&size| Throughput::Bytes(size as u32)), ); } fn bench_hash64(c: &mut Criterion) { let mut bench = ParameterizedBenchmark::new( "t1ha1", move |b, &&size| { b.iter(|| t1ha1(&DATA[..size], SEED)); }, &PARAMS, ) .with_function("t1ha2_atonce", move |b, &&size| { b.iter(|| t1ha2_atonce(&DATA[..size], SEED)); }); if cfg!(target_feature = "aes") { bench = bench.with_function("t1ha0_ia32aes_noavx", move |b, &&size| { b.iter(|| t1ha0_ia32aes_noavx(&DATA[..size], SEED)); }); } if cfg!(target_feature = "avx") { bench = bench.with_function("t1ha0_ia32aes_avx", move |b, &&size| { b.iter(|| t1ha0_ia32aes_avx(&DATA[..size], SEED)); }); } if cfg!(target_feature = "avx2") { bench = bench.with_function("t1ha0_ia32aes_avx2", move |b, &&size| { b.iter(|| t1ha0_ia32aes_avx2(&DATA[..size], SEED)); }); } c.bench( "hash64", bench .with_function("hash_map::DefaultHasher", move |b, &&size| { b.iter(|| { let mut h = DefaultHasher::new(); h.write(&DATA[..size]); h.finish() }); }) .with_function("siphash", move |b, &&size| { b.iter(|| { let mut h = SipHasher::new_with_keys(SEED, SEED); h.write(&DATA[..size]); h.finish() }); }) .with_function("metrohash64", move |b, &&size| { b.iter(|| { let mut h = MetroHash64::with_seed(SEED); h.write(&DATA[..size]); h.finish() }); }) .with_function("farmhash64", move |b, &&size| { b.iter(|| farmhash64(&DATA[..size], SEED)); }) .with_function("fnv64", move |b, &&size| { b.iter(|| { let mut h = FnvHasher::with_key(SEED); h.write(&DATA[..size]); h.finish() }); }) .with_function("xxhash64", move |b, &&size| { b.iter(|| xxhash64(&DATA[..size], SEED)); }) .with_function("twox_hash::XxHash", move |b, &&size| { b.iter(|| { let mut h = XxHash64::with_seed(SEED); h.write(&DATA[..size]); h.finish() }); }) .with_function("seahash", move |b, &&size| { b.iter(|| seahash64(&DATA[..size], SEED, SEED, SEED, SEED)); }) .with_function("fxhash64", move |b, &&size| { b.iter(|| fxhash64(&DATA[..size])); }) .with_function("ahash", move |b, &&size| { let builder = ABuildHasher::new(); b.iter(|| { let mut h = builder.build_hasher(); h.write(&DATA[..size]); h.finish() }); }) .with_function("rustc_hash::FxHasher", move |b, &&size| { b.iter(|| { let mut h = FxHasher::default(); h.write(&DATA[..size]); h.finish() }); }) .throughput(|&&size| Throughput::Bytes(size as u32)), ); } fn bench_hash128(c: &mut Criterion) { let mut bench = ParameterizedBenchmark::new( "t1ha2_atonce128", move |b, &&size| { b.iter(|| t1ha2_atonce128(&DATA[..size], SEED)); }, &PARAMS, ) .with_function("metrohash128", move |b, &&size| { b.iter(|| { let mut h = MetroHash128::with_seed(SEED); h.write(&DATA[..size]); h.finish128() }); }); if cfg!(target_arch = "x86_64") { bench = bench.with_function("murmur3_x64_128", move |b, &&size| { b.iter(|| { let mut r = BufReader::new(&DATA[..size]); let mut out = [0; 16]; murmur3_x64_128(&mut r, SEED as u32, &mut out); }); }); } if cfg!(target_arch = "x86") { bench = bench.with_function("murmur3_x86_128", move |b, &&size| { b.iter(|| { let mut r = BufReader::new(&DATA[..size]); let mut out = [0; 16]; murmur3_x86_128(&mut r, SEED as u32, &mut out); }); }); } if cfg!(target_feature = "aes") { bench = bench.with_function("meowhash128", move |b, &&size| { b.iter(|| MeowHasher::digest_with_seed(SEED as u128, &DATA[..size])); }); } c.bench( "hash128", bench.throughput(|&&size| Throughput::Bytes(size as u32)), ); } criterion_group!( benches, bench_memory, bench_hash32, bench_hash64, bench_hash128 ); criterion_main!(benches);
fn bench_memory(c: &mut Criterion) { c.bench( "memory", ParameterizedBenchmark::new( "sum", move |b, &&size| { let s = unsafe { slice::from_raw_parts(DATA.as_ptr() as *mut u32, size / mem::size_of::<u32>()) }; b.iter(|| { black_box(s.iter().fold(0u64, |acc, &x| acc + x as u64)); }) }, &PARAMS, ) .throughput(|&&size| Throughput::Bytes(size as u32)), ); }
function_block-full_function
[ { "content": "#[cfg(not(feature = \"unaligned_access\"))]\n\npub fn t1ha1_be(data: &[u8], seed: u64) -> u64 {\n\n if !aligned_to::<u64, _>(data.as_ptr()) {\n\n unsafe { t1h1_body::<BigEndianUnaligned<u64>>(data, seed) }\n\n } else {\n\n unsafe { t1h1_body::<BigEndianAligned<u64>>(data, seed) }\n\n }\n\n}\n\n\n\n#[inline(always)]\n\nunsafe fn t1h1_body<T>(data: &[u8], seed: u64) -> u64\n\nwhere\n\n T: MemoryModel<Item = u64>,\n\n{\n\n let mut len = data.len() as u64;\n\n let mut a = seed;\n\n let mut b = len;\n\n let mut v = data.as_ptr() as *const u64;\n\n\n\n if unlikely(len > 32) {\n\n let mut c = rot64(len, 17).wrapping_add(seed);\n", "file_path": "src/t1ha1.rs", "rank": 0, "score": 218955.15203135682 }, { "content": "pub fn t1ha0_ia32aes_avx2(data: &[u8], seed: u64) -> u64 {\n\n let mut len = data.len();\n\n let mut a = seed;\n\n let mut b = len as u64;\n\n let mut p = data.as_ptr();\n\n\n\n unsafe {\n\n if unlikely(len > 32) {\n\n let mut x = _mm_set_epi64x(a as i64, b as i64);\n\n let mut y = _mm_aesenc_si128(x, _mm_set_epi64x(PRIME_0 as i64, PRIME_1 as i64));\n\n let mut v = p as *const __m128i;\n\n let detent = v.add(len >> 4);\n\n\n\n p = detent as *const _;\n\n\n\n if (len & 16) != 0 {\n\n x = _mm_add_epi64(x, _mm_loadu_si128(v));\n\n y = _mm_aesenc_si128(x, y);\n\n v = v.add(1);\n\n }\n", "file_path": "src/t1ha0_aes.rs", "rank": 1, "score": 215114.09729650943 }, { "content": "#[cfg(not(feature = \"unaligned_access\"))]\n\npub fn t1ha1_le(data: &[u8], seed: u64) -> u64 {\n\n if !aligned_to::<u64, _>(data.as_ptr()) {\n\n unsafe { t1h1_body::<LittenEndianUnaligned<u64>>(data, seed) }\n\n } else {\n\n unsafe { t1h1_body::<LittenEndianAligned<u64>>(data, seed) }\n\n }\n\n}\n\n\n\n/// The big-endian variant for 64-bit CPU.\n", "file_path": "src/t1ha1.rs", "rank": 2, "score": 213972.9816717637 }, { "content": "pub fn t1ha0_ia32aes(data: &[u8], seed: u64) -> u64 {\n\n let mut len = data.len();\n\n let mut a = seed;\n\n let mut b = len as u64;\n\n let mut p = data.as_ptr();\n\n\n\n unsafe {\n\n if unlikely(len > 32) {\n\n let mut x = _mm_set_epi64x(a as i64, b as i64);\n\n let mut y = _mm_aesenc_si128(x, _mm_set_epi64x(PRIME_5 as i64, PRIME_6 as i64));\n\n let mut v = p as *const __m128i;\n\n let detent = p as usize + len - 127;\n\n\n\n while likely((v as usize) < detent) {\n\n let v0 = _mm_loadu_si128(v.offset(0));\n\n let v1 = _mm_loadu_si128(v.offset(1));\n\n let v2 = _mm_loadu_si128(v.offset(2));\n\n let v3 = _mm_loadu_si128(v.offset(3));\n\n let v4 = _mm_loadu_si128(v.offset(4));\n\n let v5 = _mm_loadu_si128(v.offset(5));\n", "file_path": "src/t1ha0_aes.rs", "rank": 3, "score": 200408.4476646569 }, { "content": "#[cfg(not(target_feature = \"avx\"))]\n\nfn t1ha0_ia32aes_avx(_data: &[u8], _seed: u64) -> u64 {\n\n 0\n\n}\n\n\n\nconst KB: usize = 1024;\n\nconst SEED: u64 = 0x0123456789ABCDEF;\n\nconst PARAMS: [usize; 11] = [7, 8, 32, 64, 256, 512, KB, 2 * KB, 4 * KB, 8 * KB, 16 * KB];\n\n\n\nlazy_static! {\n\n static ref DATA: Vec<u8> = (0..16 * KB).map(|b| b as u8).collect::<Vec<_>>();\n\n}\n\n\n", "file_path": "benches/t1ha.rs", "rank": 5, "score": 192585.64909093783 }, { "content": "#[cfg(not(target_feature = \"avx2\"))]\n\nfn t1ha0_ia32aes_avx2(_data: &[u8], _seed: u64) -> u64 {\n\n 0\n\n}\n\n\n\n#[cfg(target_feature = \"avx\")]\n\nuse t1ha::t1ha0_ia32aes_avx;\n", "file_path": "benches/t1ha.rs", "rank": 6, "score": 192560.73842514286 }, { "content": "#[cfg(feature = \"unaligned_access\")]\n\npub fn t1ha2_atonce128(data: &[u8], seed: u64) -> u128 {\n\n t1ha2_atonce128_body::<LittenEndianUnaligned<u64>>(data, seed)\n\n}\n\n\n\n/// The at-once variant with 128-bit result.\n", "file_path": "src/t1ha2.rs", "rank": 8, "score": 191181.0475741045 }, { "content": "/// `t1ha` library offers the t1ha0() function as the fastest for current CPU.\n\n///\n\n/// But actual CPU's features/capabilities and may be significantly different,\n\n/// especially on x86 platform. Therefore, internally, t1ha0() may require\n\n/// dynamic dispatching for choice best implementation.\n\npub fn t1ha0(data: &[u8], seed: u64) -> u64 {\n\n prefer::HASH(data, seed)\n\n}\n\n\n\nmod prefer {\n\n cfg_if! {\n\n if #[cfg(target_pointer_width = \"64\")] {\n\n pub use super::t1ha1 as t1ha0_fallback;\n\n } else {\n\n pub use super::t1ha0_32 as t1ha0_fallback;\n\n }\n\n }\n\n\n\n cfg_if! {\n\n if #[cfg(all(any(target_arch = \"x86\", target_arch = \"x86_64\"), feature = \"runtime_select\", feature = \"std\"))] {\n\n use std::is_x86_feature_detected;\n\n\n\n use lazy_static::lazy_static;\n\n\n\n use super::*;\n", "file_path": "src/lib.rs", "rank": 9, "score": 189311.8527331804 }, { "content": "#[cfg(not(feature = \"unaligned_access\"))]\n\npub fn t1ha0_32be(data: &[u8], seed: u64) -> u64 {\n\n if !aligned_to::<u32, _>(data.as_ptr()) {\n\n unsafe { t1h0_body::<BigEndianUnaligned<u32>>(data, seed) }\n\n } else {\n\n unsafe { t1h0_body::<BigEndianAligned<u32>>(data, seed) }\n\n }\n\n}\n\n\n\n#[inline(always)]\n\nunsafe fn t1h0_body<T>(data: &[u8], seed: u64) -> u64\n\nwhere\n\n T: MemoryModel<Item = u32>,\n\n{\n\n let mut len = data.len();\n\n let mut a = rot32(len as u32, 17).wrapping_add(seed as u32);\n\n let mut b = (len as u32) ^ ((seed >> 32) as u32);\n\n let mut v = data.as_ptr() as *const u32;\n\n\n\n if unlikely(len > 16) {\n\n let mut c = !a;\n", "file_path": "src/t1ha0.rs", "rank": 10, "score": 189311.85273318042 }, { "content": "#[cfg(not(feature = \"unaligned_access\"))]\n\npub fn t1ha2_atonce(data: &[u8], seed: u64) -> u64 {\n\n if !aligned_to::<u64, _>(data.as_ptr()) {\n\n t1ha2_atonce_body::<LittenEndianUnaligned<u64>>(data, seed)\n\n } else {\n\n t1ha2_atonce_body::<LittenEndianAligned<u64>>(data, seed)\n\n }\n\n}\n\n\n", "file_path": "src/t1ha2.rs", "rank": 11, "score": 185178.0828277166 }, { "content": "#[cfg(not(feature = \"unaligned_access\"))]\n\npub fn t1ha0_32le(data: &[u8], seed: u64) -> u64 {\n\n if !aligned_to::<u32, _>(data.as_ptr()) {\n\n unsafe { t1h0_body::<LittenEndianUnaligned<u32>>(data, seed) }\n\n } else {\n\n unsafe { t1h0_body::<LittenEndianAligned<u32>>(data, seed) }\n\n }\n\n}\n\n\n\n/// The big-endian variant for 32-bit CPU.\n", "file_path": "src/t1ha0.rs", "rank": 12, "score": 185178.0828277166 }, { "content": "fn probe<H>(hash: &H, reference: u64, data: &[u8], seed: u64)\n\nwhere\n\n H: Fn(&[u8], u64) -> u64,\n\n{\n\n let h = hash(data, seed);\n\n\n\n assert_eq!(\n\n h, reference,\n\n \"hash(data = {:?}, seed = 0x{:x}) = 0x{:x}, right = 0x{:x}\",\n\n data, seed, h, reference\n\n );\n\n}\n\n\n", "file_path": "src/selfcheck.rs", "rank": 14, "score": 169303.18120104802 }, { "content": "fn t1ha2_atonce_body<T: MemoryModel<Item = u64>>(mut data: &[u8], seed: u64) -> u64 {\n\n let mut state = State::default();\n\n let len = data.len();\n\n\n\n state.init_ab(seed, len as u64);\n\n\n\n if unlikely(len > 32) {\n\n state.init_cd(seed, len as u64);\n\n unsafe {\n\n data = t1ha2_loop::<T>(&mut state, data);\n\n }\n\n state.squash();\n\n }\n\n unsafe { t1ha2_tail_ab::<T>(&mut state, data) }\n\n}\n\n\n\n/// The at-once variant with 128-bit result.\n", "file_path": "src/t1ha2.rs", "rank": 15, "score": 162510.48139126948 }, { "content": "fn t1ha2_atonce128_body<T: MemoryModel<Item = u64>>(mut data: &[u8], seed: u64) -> u128 {\n\n let mut state = State::default();\n\n let len = data.len();\n\n\n\n state.init_ab(seed, len as u64);\n\n state.init_cd(seed, len as u64);\n\n\n\n if unlikely(len > 32) {\n\n unsafe {\n\n data = t1ha2_loop::<T>(&mut state, data);\n\n }\n\n }\n\n unsafe { t1ha2_tail_abcd::<T>(&mut state, data) }\n\n}\n\n\n\nimpl T1ha2Hasher {\n\n pub fn with_seeds(seed_x: u64, seed_y: u64) -> Self {\n\n let mut h = Self::default();\n\n\n\n h.state.init_ab(seed_x, seed_y);\n", "file_path": "src/t1ha2.rs", "rank": 16, "score": 154808.41970385343 }, { "content": "#[inline(always)]\n\nfn final_weak_avalanche(a: u64, b: u64) -> u64 {\n\n mux64(rot64(a.wrapping_add(b), 17), PRIME_4).wrapping_add(mix64(a ^ b, PRIME_0))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::selfcheck::selfcheck;\n\n\n\n const T1HA_REFVAL_64LE: [u64; 81] = [\n\n 0,\n\n 0x6A580668D6048674,\n\n 0xA2FE904AFF0D0879,\n\n 0xE3AB9C06FAF4D023,\n\n 0x6AF1C60874C95442,\n\n 0xB3557E561A6C5D82,\n\n 0x0AE73C696F3D37C0,\n\n 0x5EF25F7062324941,\n\n 0x9B784F3B4CE6AF33,\n\n 0x6993BB206A74F070,\n", "file_path": "src/t1ha1.rs", "rank": 17, "score": 126418.02783806887 }, { "content": "#[cfg(feature = \"nightly\")]\n\n#[inline(always)]\n\npub fn prefetch<T>(data: *const T) {\n\n prefetch_read_data(data, 2) // locality (0) - no locality, to (3) - extremely local keep in cache.\n\n}\n\n\n", "file_path": "src/nightly.rs", "rank": 18, "score": 110250.57890377055 }, { "content": "#[inline(always)]\n\npub fn final64(a: u64, b: u64) -> u64 {\n\n let x = a.wrapping_add(rot64(b, 41)).wrapping_mul(PRIME_0);\n\n let y = rot64(a, 23).wrapping_add(b).wrapping_mul(PRIME_6);\n\n mux64(x ^ y, PRIME_5)\n\n}\n\n\n\n/// xor-mul-xor mixer\n", "file_path": "src/bits.rs", "rank": 19, "score": 104595.4836594435 }, { "content": "#[inline(always)]\n\nfn mul_64x64_128(a: u64, b: u64, h: &mut u64) -> u64 {\n\n let r = u128::from(a).wrapping_mul(u128::from(b));\n\n *h = (r >> 64) as u64;\n\n r as u64\n\n}\n", "file_path": "src/bits.rs", "rank": 20, "score": 102664.48786852189 }, { "content": "#[inline(always)]\n\npub fn mixup64(a: &mut u64, b: &mut u64, v: u64, prime: u64) {\n\n let mut h = 0;\n\n *a ^= mul_64x64_128(b.wrapping_add(v), prime, &mut h);\n\n *b = b.wrapping_add(h);\n\n}\n\n\n", "file_path": "src/bits.rs", "rank": 22, "score": 96134.1603182233 }, { "content": "#[cfg(not(feature = \"nightly\"))]\n\n#[inline(always)]\n\npub fn prefetch<T>(_data: *const T) {}\n", "file_path": "src/nightly.rs", "rank": 23, "score": 92129.93506680345 }, { "content": "#[inline(always)]\n\nfn mul_32x32_64(a: u32, b: u32) -> u64 {\n\n u64::from(a).wrapping_mul(u64::from(b))\n\n}\n\n\n", "file_path": "src/bits.rs", "rank": 24, "score": 86244.30509068523 }, { "content": "#[inline(always)]\n\npub fn mix64(v: u64, p: u64) -> u64 {\n\n let v = v.wrapping_mul(p);\n\n v ^ rot64(v, 41)\n\n}\n\n\n\n/// xor high and low parts of full 128-bit product\n", "file_path": "src/bits.rs", "rank": 25, "score": 85969.62545431327 }, { "content": "#[inline(always)]\n\npub fn final32(a: u32, b: u32) -> u64 {\n\n let mut l = u64::from(b ^ rot32(a, 13)) | u64::from(a) << 32;\n\n l = l.wrapping_mul(PRIME_0 as u64);\n\n l ^= l.wrapping_shr(41);\n\n l = l.wrapping_mul(PRIME_4 as u64);\n\n l ^= l.wrapping_shr(47);\n\n l = l.wrapping_mul(PRIME_6 as u64);\n\n l\n\n}\n\n\n", "file_path": "src/bits.rs", "rank": 26, "score": 84375.20737731038 }, { "content": "#[inline(always)]\n\npub fn mux64(v: u64, prime: u64) -> u64 {\n\n let mut h = 0;\n\n let l = mul_64x64_128(v, prime, &mut h);\n\n l ^ h\n\n}\n\n\n", "file_path": "src/bits.rs", "rank": 27, "score": 84195.72083902123 }, { "content": "#[inline(always)]\n\nfn can_read_underside<T>(ptr: *const T, size: usize) -> bool {\n\n ((PAGESIZE - size) & (ptr as usize)) != 0\n\n}\n\n\n", "file_path": "src/bits.rs", "rank": 28, "score": 80748.49595633795 }, { "content": "#[inline(always)]\n\npub fn rot64(v: u64, n: u32) -> u64 {\n\n v.rotate_right(n)\n\n}\n\n\n", "file_path": "src/bits.rs", "rank": 29, "score": 79086.3442647782 }, { "content": "fn bench_t1ha(c: &mut Criterion) {\n\n c.bench(\n\n \"memory\",\n\n ParameterizedBenchmark::new(\n\n \"sum\",\n\n move |b, &&size| {\n\n let s = unsafe {\n\n slice::from_raw_parts(DATA.as_ptr() as *mut u32, size / mem::size_of::<u32>())\n\n };\n\n\n\n b.iter(|| {\n\n black_box(s.iter().fold(0u64, |acc, &x| acc + x as u64));\n\n })\n\n },\n\n &PARAMS,\n\n )\n\n .throughput(|&&size| Throughput::Bytes(size as u32)),\n\n );\n\n\n\n let mut bench = ParameterizedBenchmark::new(\n", "file_path": "benches/t1ha.rs", "rank": 32, "score": 77229.44796135601 }, { "content": "pub fn aligned_to<T, P>(p: *const P) -> bool {\n\n (p as usize) % mem::size_of::<T>() == 0\n\n}\n\n\n\n// 'magic' primes\n\npub const PRIME_0: u64 = 0xEC99_BF0D_8372_CAAB;\n\npub const PRIME_1: u64 = 0x8243_4FE9_0EDC_EF39;\n\npub const PRIME_2: u64 = 0xD4F0_6DB9_9D67_BE4B;\n\npub const PRIME_3: u64 = 0xBD9C_ACC2_2C6E_9571;\n\npub const PRIME_4: u64 = 0x9C06_FAF4_D023_E3AB;\n\npub const PRIME_5: u64 = 0xC060_724A_8424_F345;\n\npub const PRIME_6: u64 = 0xCB5A_F53A_E3AA_AC31;\n\n\n", "file_path": "src/bits.rs", "rank": 34, "score": 66328.22246282696 }, { "content": "pub fn selfcheck<H>(hash: H, reference_values: &[u64])\n\nwhere\n\n H: Fn(&[u8], u64) -> u64,\n\n{\n\n let mut iter = reference_values.iter();\n\n\n\n probe(&hash, *iter.next().unwrap(), &[][..], 0);\n\n probe(&hash, *iter.next().unwrap(), &[][..], !0);\n\n probe(&hash, *iter.next().unwrap(), &T1HA_TEST_PATTERN[..], 0);\n\n\n\n for i in 1..64 {\n\n probe(\n\n &hash,\n\n *iter.next().unwrap(),\n\n &T1HA_TEST_PATTERN[..i],\n\n 1 << i - 1,\n\n );\n\n }\n\n\n\n for i in 1..=7 {\n", "file_path": "src/selfcheck.rs", "rank": 35, "score": 60426.942359658744 }, { "content": "#[cfg(not(feature = \"nightly\"))]\n\n#[inline(always)]\n\npub fn unlikely(b: bool) -> bool {\n\n b\n\n}\n\n\n", "file_path": "src/nightly.rs", "rank": 36, "score": 50927.50411317864 }, { "content": "#[cfg(not(feature = \"nightly\"))]\n\n#[inline(always)]\n\npub fn likely(b: bool) -> bool {\n\n b\n\n}\n\n\n", "file_path": "src/nightly.rs", "rank": 37, "score": 50927.50411317864 }, { "content": "#[inline(always)]\n\npub fn mixup32(a: &mut u32, b: &mut u32, v: u32, prime: u32) {\n\n let l = mul_32x32_64(b.wrapping_add(v), prime);\n\n *a ^= l as u32;\n\n *b = b.wrapping_add((l >> 32) as u32);\n\n}\n\n\n", "file_path": "src/bits.rs", "rank": 38, "score": 39371.17156959705 }, { "content": "fn main() {\n\n if version_meta().unwrap().channel == Channel::Nightly {\n\n println!(\"cargo:rustc-cfg=nightly\");\n\n }\n\n\n\n if cfg!(target_arch = \"x86\") || cfg!(target_arch = \"x86_64\") {\n\n println!(\"cargo:rustc-cfg=unaligned_access\");\n\n }\n\n}\n", "file_path": "build.rs", "rank": 39, "score": 37721.95708394229 }, { "content": "//!\n\n//! t1ha1 = 64-bit, BASELINE FAST PORTABLE HASH:\n\n//!\n\n//! - Runs faster on 64-bit platforms in other cases may runs slowly.\n\n//! - Portable and stable, returns same 64-bit result\n\n//! on all architectures and CPUs.\n\n//! - Unfortunately it fails the \"strict avalanche criteria\",\n\n//! see test results at https://github.com/demerphq/smhasher.\n\n//!\n\n//! This flaw is insignificant for the t1ha1() purposes and imperceptible\n\n//! from a practical point of view.\n\n//! However, nowadays this issue has resolved in the next t1ha2(),\n\n//! that was initially planned to providing a bit more quality.\n\n#![allow(clippy::cast_ptr_alignment, clippy::many_single_char_names)]\n\n\n\nuse crate::{bits::*, nightly::*};\n\n\n\n/// The little-endian variant for 64-bit CPU.\n\n#[cfg(feature = \"unaligned_access\")]\n", "file_path": "src/t1ha1.rs", "rank": 40, "score": 29655.516730422485 }, { "content": " let mut d = len ^ rot64(seed, 17);\n\n let detent = v as usize + len as usize - 31;\n\n\n\n while likely((v as usize) < detent) {\n\n let w0 = T::fetch(v.offset(0));\n\n let w1 = T::fetch(v.offset(1));\n\n let w2 = T::fetch(v.offset(2));\n\n let w3 = T::fetch(v.offset(3));\n\n v = v.add(4);\n\n prefetch(v);\n\n\n\n let d02 = w0 ^ rot64(w2.wrapping_add(d), 17);\n\n let c13 = w1 ^ rot64(w3.wrapping_add(c), 17);\n\n d = d.wrapping_sub(b ^ rot64(w1, 31));\n\n c = c.wrapping_add(a ^ rot64(w0, 41));\n\n b ^= PRIME_0.wrapping_mul(c13.wrapping_add(w2));\n\n a ^= PRIME_1.wrapping_mul(d02.wrapping_add(w3));\n\n }\n\n\n\n a ^= PRIME_6.wrapping_mul(rot64(c, 17).wrapping_add(d));\n", "file_path": "src/t1ha1.rs", "rank": 41, "score": 29654.694364016024 }, { "content": " 0x338E1CBB4F858226,\n\n 0xFC6B5C5CF7A8D806,\n\n 0x8973CAADDE8DA50C,\n\n 0x9C6D47AE32EBAE72,\n\n 0x1EBF1F9F21D26D78,\n\n 0x80A9704B8E153859,\n\n 0x6AFD20A939F141FB,\n\n 0xC35F6C2B3B553EEF,\n\n 0x59529E8B0DC94C1A,\n\n 0x1569DF036EBC4FA1,\n\n 0xDA32B88593C118F9,\n\n 0xF01E4155FF5A5660,\n\n 0x765A2522DCE2B185,\n\n 0xCEE95554128073EF,\n\n 0x60F072A5CA51DE2F,\n\n ];\n\n\n\n #[test]\n\n fn test_t1ha1_le() {\n\n selfcheck(t1ha1_le, &T1HA_REFVAL_64LE[..])\n\n }\n\n\n\n #[test]\n\n fn test_t1ha1_be() {\n\n selfcheck(t1ha1_be, &T1HA_REFVAL_64BE[..])\n\n }\n\n}\n", "file_path": "src/t1ha1.rs", "rank": 42, "score": 29653.529151348055 }, { "content": " 0x7BAD25A859D87B5D,\n\n 0xAD645ADCF7414F1D,\n\n 0xB07F517E88D7AFB3,\n\n 0xB321C06FB5FFAB5C,\n\n 0xD50F162A1EFDD844,\n\n 0x1DFD3D1924FBE319,\n\n 0xDFAEAB2F09EF7E78,\n\n 0xA7603B5AF07A0B1E,\n\n 0x41CD044C0E5A4EE3,\n\n 0xF64D2F86E813BF33,\n\n 0xFF9FDB99305EB06A,\n\n ];\n\n\n\n const T1HA_REFVAL_64BE: [u64; 81] = [\n\n 0,\n\n 0x6A580668D6048674,\n\n 0xDECC975A0E3B8177,\n\n 0xE3AB9C06FAF4D023,\n\n 0xE401FA8F1B6AF969,\n\n 0x67DB1DAE56FB94E3,\n", "file_path": "src/t1ha1.rs", "rank": 43, "score": 29652.128727567935 }, { "content": " 0x1106266A09B7A073,\n\n 0x550339B1EF2C7BBB,\n\n 0x290A2BAF590045BB,\n\n 0xA182C1258C09F54A,\n\n 0x137D53C34BE7143A,\n\n 0xF6D2B69C6F42BEDC,\n\n 0x39643EAF2CA2E4B4,\n\n 0x22A81F139A2C9559,\n\n 0x5B3D6AEF0AF33807,\n\n 0x56E3F80A68643C08,\n\n 0x9E423BE502378780,\n\n 0xCDB0986F9A5B2FD5,\n\n 0xD5B3C84E7933293F,\n\n 0xE5FB8C90399E9742,\n\n 0x5D393C1F77B2CF3D,\n\n 0xC8C82F5B2FF09266,\n\n 0xACA0230CA6F7B593,\n\n 0xCB5805E2960D1655,\n\n 0x7E2AD5B704D77C95,\n\n 0xC5E903CDB8B9EB5D,\n", "file_path": "src/t1ha1.rs", "rank": 44, "score": 29650.11097891588 }, { "content": " b ^= PRIME_5.wrapping_mul(c.wrapping_add(rot64(d, 17)));\n\n\n\n len &= 31;\n\n }\n\n\n\n match len {\n\n 32 | 31 | 30 | 29 | 28 | 27 | 26 | 25 => {\n\n b = b.wrapping_add(mux64(T::fetch(v.offset(0)), PRIME_4));\n\n a = a.wrapping_add(mux64(T::fetch(v.offset(1)), PRIME_3));\n\n b = b.wrapping_add(mux64(T::fetch(v.offset(2)), PRIME_2));\n\n a = a.wrapping_add(mux64(T::tail(v.offset(3), len as isize), PRIME_1));\n\n final_weak_avalanche(a, b)\n\n }\n\n 24 | 23 | 22 | 21 | 20 | 19 | 18 | 17 => {\n\n a = a.wrapping_add(mux64(T::fetch(v.offset(0)), PRIME_3));\n\n b = b.wrapping_add(mux64(T::fetch(v.offset(1)), PRIME_2));\n\n a = a.wrapping_add(mux64(T::tail(v.offset(2), len as isize), PRIME_1));\n\n final_weak_avalanche(a, b)\n\n }\n\n 16 | 15 | 14 | 13 | 12 | 11 | 10 | 9 => {\n", "file_path": "src/t1ha1.rs", "rank": 45, "score": 29650.11097891588 }, { "content": " 0x5B3E60EEB51DDCD8,\n\n 0x0A7C717017756FE7,\n\n 0xA73773805CA31934,\n\n 0x4DBD6BB7A31E85FD,\n\n 0x24F619D3D5BC2DB4,\n\n 0x3E4AF35A1678D636,\n\n 0x84A1A8DF8D609239,\n\n 0x359C862CD3BE4FCD,\n\n 0xCF3A39F5C27DC125,\n\n 0xC0FF62F8FD5F4C77,\n\n 0x5E9F2493DDAA166C,\n\n 0x17424152BE1CA266,\n\n 0xA78AFA5AB4BBE0CD,\n\n 0x7BFB2E2CEF118346,\n\n 0x647C3E0FF3E3D241,\n\n 0x0352E4055C13242E,\n\n 0x6F42FC70EB660E38,\n\n 0x0BEBAD4FABF523BA,\n\n 0x9269F4214414D61D,\n\n 0x1CA8760277E6006C,\n", "file_path": "src/t1ha1.rs", "rank": 46, "score": 29650.11097891588 }, { "content": " 0xF1E95DF109076C4C,\n\n 0x4E1EB70C58E48540,\n\n 0x5FDD7649D8EC44E4,\n\n 0x559122C706343421,\n\n 0x380133D58665E93D,\n\n 0x9CE74296C8C55AE4,\n\n 0x3556F9A5757AB6D0,\n\n 0xF62751F7F25C469E,\n\n 0x851EEC67F6516D94,\n\n 0xED463EE3848A8695,\n\n 0xDC8791FEFF8ED3AC,\n\n 0x2569C744E1A282CF,\n\n 0xF90EB7C1D70A80B9,\n\n 0x68DFA6A1B8050A4C,\n\n 0x94CCA5E8210D2134,\n\n 0xF5CC0BEABC259F52,\n\n 0x40DBC1F51618FDA7,\n\n 0x0807945BF0FB52C6,\n\n 0xE5EF7E09DE70848D,\n\n 0x63E1DF35FEBE994A,\n", "file_path": "src/t1ha1.rs", "rank": 47, "score": 29650.11097891588 }, { "content": " 0x4CC7D0D21CC03511,\n\n 0x8385DF382CFB3E93,\n\n 0xF17699D0564D348A,\n\n 0xF77EE7F8274A4C8D,\n\n 0xB9D8CEE48903BABE,\n\n 0xFE0EBD2A82B9CFE9,\n\n 0xB49FB6397270F565,\n\n 0x173735C8C342108E,\n\n 0xA37C7FBBEEC0A2EA,\n\n 0xC13F66F462BB0B6E,\n\n 0x0C04F3C2B551467E,\n\n 0x76A9CB156810C96E,\n\n 0x2038850919B0B151,\n\n 0xCEA19F2B6EED647B,\n\n 0x6746656D2FA109A4,\n\n 0xF05137F221007F37,\n\n 0x892FA9E13A3B4948,\n\n 0x4D57B70D37548A32,\n\n 0x1A7CFB3D566580E6,\n\n 0x7CB30272A45E3FAC,\n", "file_path": "src/t1ha1.rs", "rank": 48, "score": 29650.11097891588 }, { "content": " b = b.wrapping_add(mux64(T::fetch(v.offset(0)), PRIME_2));\n\n a = a.wrapping_add(mux64(T::tail(v.offset(1), len as isize), PRIME_1));\n\n final_weak_avalanche(a, b)\n\n }\n\n 8 | 7 | 6 | 5 | 4 | 3 | 2 | 1 => {\n\n a = a.wrapping_add(mux64(T::tail(v, len as isize), PRIME_1));\n\n final_weak_avalanche(a, b)\n\n }\n\n 0 => final_weak_avalanche(a, b),\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "src/t1ha1.rs", "rank": 49, "score": 29650.11097891588 }, { "content": " 0x137CCFFD9D51423F,\n\n 0xB87D96F3B82DF266,\n\n 0x33349AEE7472ED37,\n\n 0x5CC0D3C99555BC07,\n\n 0x4A8F4FA196D964EF,\n\n 0xE82A0D64F281FBFA,\n\n 0x38A1BAC2C36823E1,\n\n 0x77D197C239FD737E,\n\n 0xFB07746B4E07DF26,\n\n 0xC8A2198E967672BD,\n\n 0x5F1A146D143FA05A,\n\n 0x26B877A1201AB7AC,\n\n 0x74E5B145214723F8,\n\n 0xE9CE10E3C70254BC,\n\n 0x299393A0C05B79E8,\n\n 0xFD2D2B9822A5E7E2,\n\n 0x85424FEA50C8E50A,\n\n 0xE6839E714B1FFFE5,\n\n 0x27971CCB46F9112A,\n\n 0xC98695A2E0715AA9,\n", "file_path": "src/t1ha1.rs", "rank": 50, "score": 29650.11097891588 }, { "content": " 0x2025E73769720D5A,\n\n 0xAD6120B2B8A152E1,\n\n 0x2A71D9F13959F2B7,\n\n 0x8A20849A27C32548,\n\n 0x0BCBC9FE3B57884E,\n\n 0x0E028D255667AEAD,\n\n 0xBE66DAD3043AB694,\n\n 0xB00E4C1238F9E2D4,\n\n 0x5C54BDE5AE280E82,\n\n 0x0E22B86754BC3BC4,\n\n 0x016707EBF858B84D,\n\n 0x990015FBC9E095EE,\n\n 0x8B9AF0A3E71F042F,\n\n 0x6AA56E88BD380564,\n\n 0xAACE57113E681A0F,\n\n 0x19F81514AFA9A22D,\n\n 0x80DABA3D62BEAC79,\n\n 0x715210412CABBF46,\n\n 0xD8FA0B9E9D6AA93F,\n\n 0x6C2FC5A4109FD3A2,\n", "file_path": "src/t1ha1.rs", "rank": 51, "score": 29650.11097891588 }, { "content": "#[inline(always)]\n\nfn final128(state: &mut State) -> u128 {\n\n mixup64(\n\n &mut state.a,\n\n &mut state.b,\n\n rot64(state.c, 41) ^ state.d,\n\n PRIME_0,\n\n );\n\n mixup64(\n\n &mut state.b,\n\n &mut state.c,\n\n rot64(state.d, 23) ^ state.a,\n\n PRIME_6,\n\n );\n\n mixup64(\n\n &mut state.c,\n\n &mut state.d,\n\n rot64(state.a, 19) ^ state.b,\n\n PRIME_5,\n\n );\n\n mixup64(\n", "file_path": "src/t1ha2.rs", "rank": 52, "score": 29435.61352949677 }, { "content": "#[inline(always)]\n\npub fn rot32(v: u32, n: u32) -> u32 {\n\n v.rotate_right(n)\n\n}\n\n\n", "file_path": "src/bits.rs", "rank": 53, "score": 28763.001858918018 }, { "content": " 0xA494842ACF4B802C,\n\n 0xFC6D9CDDE2C34A3F,\n\n 0x4ED6863CE455F7A7,\n\n 0x630914D0DB7AAE98,\n\n ];\n\n\n\n #[cfg(any(target_feature = \"aes\", target_feature = \"avx\"))]\n\n #[test]\n\n fn test_ia32aes_avx() {\n\n selfcheck(t1ha0_ia32aes, &T1HA_REFVAL_IA32AES_A[..])\n\n }\n\n\n\n #[cfg(target_feature = \"avx2\")]\n\n #[test]\n\n fn test_ia32aes_avx2() {\n\n selfcheck(t1ha0_ia32aes_avx2, &T1HA_REFVAL_IA32AES_B[..])\n\n }\n\n}\n", "file_path": "src/t1ha0_aes.rs", "rank": 54, "score": 27562.594061829615 }, { "content": " let lo =\n\n _mm_cvtsi128_si32(x) as u32 | u64::from(_mm_cvtsi128_si32(_mm_shuffle_epi32(x, 1))) << 32;\n\n let x = _mm_unpackhi_epi64(x, x);\n\n let hi =\n\n _mm_cvtsi128_si32(x) as u32 | u64::from(_mm_cvtsi128_si32(_mm_shuffle_epi32(x, 1))) << 32;\n\n\n\n (lo, hi)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::selfcheck::selfcheck;\n\n\n\n const T1HA_REFVAL_IA32AES_A: [u64; 81] = [\n\n 0,\n\n 0x772C7311BE32FF42,\n\n 0xB231AC660E5B23B5,\n\n 0x71F6DF5DA3B4F532,\n\n 0x555859635365F660,\n", "file_path": "src/t1ha0_aes.rs", "rank": 55, "score": 27559.691644909934 }, { "content": " _ => unreachable!(),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(any(target_feature = \"avx\", target_feature = \"avx2\"))]\n\nunsafe fn mm_empty() {\n\n _mm256_zeroupper();\n\n}\n\n\n\n#[cfg(all(\n\n not(any(target_feature = \"avx\", target_feature = \"avx2\")),\n\n target_feature = \"mmx\"\n\n))]\n\nunsafe fn mm_empty() {\n\n _mm_empty();\n\n}\n\n\n\n#[cfg(not(any(\n\n target_feature = \"avx\",\n", "file_path": "src/t1ha0_aes.rs", "rank": 56, "score": 27558.905826626247 }, { "content": "#![allow(clippy::cast_ptr_alignment, clippy::many_single_char_names)]\n\n\n\n#[cfg(target_arch = \"x86\")]\n\nuse core::arch::x86::*;\n\n\n\n#[cfg(target_arch = \"x86_64\")]\n\nuse core::arch::x86_64::*;\n\n\n\nuse crate::{bits::*, nightly::*};\n\n\n", "file_path": "src/t1ha0_aes.rs", "rank": 57, "score": 27558.089310658474 }, { "content": " _mm_cvtsi128_si64(_mm_unpackhi_epi64(x, x)),\n\n )\n\n}\n\n\n\n#[cfg(all(\n\n target_arch = \"x86\",\n\n any(target_feature = \"sse4.1\", target_feature = \"avx\")\n\n))]\n\nunsafe fn extract_i64(x: __m128i) -> (i64, i64) {\n\n (\n\n (u64::from(_mm_extract_epi32(x, 0) as u32) | (_mm_extract_epi32(x, 1) as u64) << 32) as i64,\n\n (u64::from(_mm_extract_epi32(x, 2) as u32) | (_mm_extract_epi32(x, 3) as u64) << 32) as i64,\n\n )\n\n}\n\n\n\n#[cfg(all(\n\n target_arch = \"x86\",\n\n not(any(target_feature = \"sse4.1\", target_feature = \"avx\"))\n\n))]\n\nunsafe fn extract_i64(x: __m128i) -> (i64, i64) {\n", "file_path": "src/t1ha0_aes.rs", "rank": 58, "score": 27558.06562593116 }, { "content": " target_feature = \"avx2\"\n\n ))]\n\n const T1HA_REFVAL_IA32AES_B: [u64; 81] = [\n\n 0,\n\n 0x772C7311BE32FF42,\n\n 0x4398F62A8CB6F72A,\n\n 0x71F6DF5DA3B4F532,\n\n 0x555859635365F660,\n\n 0xE98808F1CD39C626,\n\n 0x2EB18FAF2163BB09,\n\n 0x7B9DD892C8019C87,\n\n 0xE2B1431C4DA4D15A,\n\n 0x1984E718A5477F70,\n\n 0x08DD17B266484F79,\n\n 0x4C83A05D766AD550,\n\n 0x92DCEBB131D1907D,\n\n 0xD67BC6FC881B8549,\n\n 0xF6A9886555FBF66B,\n\n 0x6E31616D7F33E25E,\n\n 0x36E31B7426E3049D,\n", "file_path": "src/t1ha0_aes.rs", "rank": 59, "score": 27557.22290735624 }, { "content": " PRIME_1,\n\n );\n\n final64(a, b)\n\n }\n\n 8 | 7 | 6 | 5 | 4 | 3 | 2 | 1 => {\n\n mixup64(\n\n &mut b,\n\n &mut a,\n\n LittenEndianUnaligned::<u64>::tail(v, len as isize),\n\n PRIME_1,\n\n );\n\n final64(a, b)\n\n }\n\n 0 => final64(a, b),\n\n _ => unreachable!(),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(all(\n\n any(target_arch = \"x86\", target_arch = \"x86_64\"),\n\n target_feature = \"avx2\"\n\n))]\n", "file_path": "src/t1ha0_aes.rs", "rank": 60, "score": 27557.068885041834 }, { "content": " target_feature = \"avx2\",\n\n target_feature = \"mmx\"\n\n)))]\n\nunsafe fn mm_empty() {}\n\n\n\n#[cfg(all(\n\n target_arch = \"x86_64\",\n\n any(target_feature = \"sse4.1\", target_feature = \"avx\")\n\n))]\n\nunsafe fn extract_i64(x: __m128i) -> (i64, i64) {\n\n (_mm_extract_epi64(x, 0), _mm_extract_epi64(x, 1))\n\n}\n\n\n\n#[cfg(all(\n\n target_arch = \"x86_64\",\n\n not(any(target_feature = \"sse4.1\", target_feature = \"avx\"))\n\n))]\n\nunsafe fn extract_i64(x: __m128i) -> (i64, i64) {\n\n (\n\n _mm_cvtsi128_si64(x),\n", "file_path": "src/t1ha0_aes.rs", "rank": 61, "score": 27556.799073661667 }, { "content": "\n\n mm_empty();\n\n }\n\n\n\n let v = p as *const u64;\n\n\n\n match len {\n\n 32 | 31 | 30 | 29 | 28 | 27 | 26 | 25 => {\n\n mixup64(\n\n &mut a,\n\n &mut b,\n\n LittenEndianUnaligned::<u64>::fetch(v.offset(0)),\n\n PRIME_4,\n\n );\n\n mixup64(\n\n &mut b,\n\n &mut a,\n\n LittenEndianUnaligned::<u64>::fetch(v.offset(1)),\n\n PRIME_3,\n\n );\n", "file_path": "src/t1ha0_aes.rs", "rank": 62, "score": 27555.758544918277 }, { "content": " y = _mm_add_epi64(x, y);\n\n x = _mm_aesdec_si128(x, _mm_loadu_si128(v));\n\n v = v.add(1)\n\n }\n\n\n\n x = _mm_add_epi64(_mm_aesdec_si128(x, _mm_aesenc_si128(y, x)), y);\n\n\n\n let (lo, hi) = extract_i64(x);\n\n\n\n a = lo as u64;\n\n b = hi as u64;\n\n\n\n mm_empty();\n\n\n\n p = v as *const _;\n\n len &= 15;\n\n }\n\n\n\n let v = p as *const u64;\n\n\n", "file_path": "src/t1ha0_aes.rs", "rank": 63, "score": 27555.62068173434 }, { "content": " match len {\n\n 32 | 31 | 30 | 29 | 28 | 27 | 26 | 25 => {\n\n mixup64(\n\n &mut a,\n\n &mut b,\n\n LittenEndianUnaligned::<u64>::fetch(v.offset(0)),\n\n PRIME_4,\n\n );\n\n mixup64(\n\n &mut b,\n\n &mut a,\n\n LittenEndianUnaligned::<u64>::fetch(v.offset(1)),\n\n PRIME_3,\n\n );\n\n mixup64(\n\n &mut a,\n\n &mut b,\n\n LittenEndianUnaligned::<u64>::fetch(v.offset(2)),\n\n PRIME_2,\n\n );\n", "file_path": "src/t1ha0_aes.rs", "rank": 64, "score": 27555.598701311003 }, { "content": " );\n\n mixup64(\n\n &mut a,\n\n &mut b,\n\n LittenEndianUnaligned::<u64>::fetch(v.offset(1)),\n\n PRIME_2,\n\n );\n\n mixup64(\n\n &mut b,\n\n &mut a,\n\n LittenEndianUnaligned::<u64>::tail(v.offset(2), len as isize),\n\n PRIME_1,\n\n );\n\n final64(a, b)\n\n }\n\n 16 | 15 | 14 | 13 | 12 | 11 | 10 | 9 => {\n\n mixup64(\n\n &mut a,\n\n &mut b,\n\n LittenEndianUnaligned::<u64>::fetch(v.offset(0)),\n", "file_path": "src/t1ha0_aes.rs", "rank": 65, "score": 27555.598701311003 }, { "content": " mixup64(\n\n &mut b,\n\n &mut a,\n\n LittenEndianUnaligned::<u64>::tail(v.offset(3), len as isize),\n\n PRIME_1,\n\n );\n\n final64(a, b)\n\n }\n\n 24 | 23 | 22 | 21 | 20 | 19 | 18 | 17 => {\n\n mixup64(\n\n &mut b,\n\n &mut a,\n\n LittenEndianUnaligned::<u64>::fetch(v.offset(0)),\n\n PRIME_3,\n\n );\n\n mixup64(\n\n &mut a,\n\n &mut b,\n\n LittenEndianUnaligned::<u64>::fetch(v.offset(1)),\n\n PRIME_2,\n", "file_path": "src/t1ha0_aes.rs", "rank": 66, "score": 27555.576991647667 }, { "content": " mixup64(\n\n &mut a,\n\n &mut b,\n\n LittenEndianUnaligned::<u64>::fetch(v.offset(2)),\n\n PRIME_2,\n\n );\n\n mixup64(\n\n &mut b,\n\n &mut a,\n\n LittenEndianUnaligned::<u64>::tail(v.offset(3), len as isize),\n\n PRIME_1,\n\n );\n\n final64(a, b)\n\n }\n\n 24 | 23 | 22 | 21 | 20 | 19 | 18 | 17 => {\n\n mixup64(\n\n &mut b,\n\n &mut a,\n\n LittenEndianUnaligned::<u64>::fetch(v.offset(0)),\n\n PRIME_3,\n", "file_path": "src/t1ha0_aes.rs", "rank": 67, "score": 27555.576991647667 }, { "content": " );\n\n mixup64(\n\n &mut b,\n\n &mut a,\n\n LittenEndianUnaligned::<u64>::tail(v.offset(2), len as isize),\n\n PRIME_1,\n\n );\n\n final64(a, b)\n\n }\n\n 16 | 15 | 14 | 13 | 12 | 11 | 10 | 9 => {\n\n mixup64(\n\n &mut a,\n\n &mut b,\n\n LittenEndianUnaligned::<u64>::fetch(v.offset(0)),\n\n PRIME_2,\n\n );\n\n mixup64(\n\n &mut b,\n\n &mut a,\n\n LittenEndianUnaligned::<u64>::tail(v.offset(1), len as isize),\n", "file_path": "src/t1ha0_aes.rs", "rank": 68, "score": 27555.555547772026 }, { "content": " PRIME_2,\n\n );\n\n mixup64(\n\n &mut b,\n\n &mut a,\n\n LittenEndianUnaligned::<u64>::tail(v.offset(1), len as isize),\n\n PRIME_1,\n\n );\n\n final64(a, b)\n\n }\n\n 8 | 7 | 6 | 5 | 4 | 3 | 2 | 1 => {\n\n mixup64(\n\n &mut b,\n\n &mut a,\n\n LittenEndianUnaligned::<u64>::tail(v, len as isize),\n\n PRIME_1,\n\n );\n\n final64(a, b)\n\n }\n\n 0 => final64(a, b),\n", "file_path": "src/t1ha0_aes.rs", "rank": 69, "score": 27555.310283708077 }, { "content": " x = _mm_add_epi64(y, x);\n\n y = t;\n\n }\n\n\n\n while v < detent {\n\n let v0y = _mm_add_epi64(y, _mm_loadu_si128(v.offset(0)));\n\n let v1x = _mm_sub_epi64(x, _mm_loadu_si128(v.offset(1)));\n\n\n\n v = v.add(2);\n\n\n\n x = _mm_aesdec_si128(x, v0y);\n\n y = _mm_aesdec_si128(y, v1x);\n\n }\n\n\n\n x = _mm_add_epi64(_mm_aesdec_si128(x, _mm_aesenc_si128(y, x)), y);\n\n\n\n let (lo, hi) = extract_i64(x);\n\n\n\n a = lo as u64;\n\n b = hi as u64;\n", "file_path": "src/t1ha0_aes.rs", "rank": 70, "score": 27554.85867534521 }, { "content": " 0x9CABA22D10A2F690,\n\n 0x0D10032511F58111,\n\n 0xE9A36EF5EEA3CD58,\n\n 0xC79242DE194D9D7C,\n\n 0xC3871AA0435EE5C8,\n\n 0x52890BED43CCF4CD,\n\n 0x07A1D0861ACCD373,\n\n 0x227B816FF0FEE9ED,\n\n 0x59FFBF73AACFC0C4,\n\n 0x09AB564F2BEDAD0C,\n\n 0xC05F744F2EE38318,\n\n 0x7B50B621D547C661,\n\n 0x0C1F71CB4E68E5D1,\n\n 0x0E33A47881D4DBAA,\n\n 0xF5C3BF198E9A7C2E,\n\n 0x16328FD8C0F68A91,\n\n 0xA3E399C9AB3E9A59,\n\n 0x163AE71CBCBB18B8,\n\n 0x18F17E4A8C79F7AB,\n\n 0x9250E2EA37014B45,\n", "file_path": "src/t1ha0_aes.rs", "rank": 71, "score": 27552.07390009077 }, { "content": " 0xED512FE96A4FAE81,\n\n 0x9E1099B8140D7BA3,\n\n 0xDFD5A5BE1E6FE9A6,\n\n 0x1D82600E23B66DD4,\n\n 0x3FA3C3B7EE7B52CE,\n\n 0xEE84F7D2A655EF4C,\n\n 0x2A4361EC769E3BEB,\n\n 0x22E4B38916636702,\n\n 0x0063096F5D39A115,\n\n 0x6C51B24DAAFA5434,\n\n 0xBAFB1DB1B411E344,\n\n 0xFF529F161AE0C4B0,\n\n 0x1290EAE3AC0A686F,\n\n 0xA7B0D4585447D1BE,\n\n 0xAED3D18CB6CCAD53,\n\n 0xFC73D46F8B41BEC6,\n\n ];\n\n\n\n #[cfg(all(\n\n any(target_arch = \"x86\", target_arch = \"x86_64\"),\n", "file_path": "src/t1ha0_aes.rs", "rank": 72, "score": 27552.07390009077 }, { "content": " 0x7BBBB111D60B03E4,\n\n 0x3DAA4A3071A0BD88,\n\n 0xA28828D790A2D6DC,\n\n 0xBC70FC88F64BE3F1,\n\n 0xA3E48008BA4333C7,\n\n 0x739E435ACAFC79F7,\n\n 0x42BBB360BE007CC6,\n\n 0x4FFB6FD2AF74EC92,\n\n 0x2A799A2994673146,\n\n 0xBE0A045B69D48E9F,\n\n 0x549432F54FC6A278,\n\n 0x371D3C60369FC702,\n\n 0xDB4557D415B08CA7,\n\n 0xE8692F0A83850B37,\n\n 0x022E46AEB36E9AAB,\n\n 0x117AC9B814E4652D,\n\n 0xA361041267AE9048,\n\n 0x277CB51C961C3DDA,\n\n 0xAFFC96F377CB8A8D,\n\n 0x83CC79FA01DD1BA7,\n", "file_path": "src/t1ha0_aes.rs", "rank": 73, "score": 27552.07390009077 }, { "content": " 0xE98808F1CD39C626,\n\n 0x2EB18FAF2163BB09,\n\n 0x7B9DD892C8019C87,\n\n 0xE2B1431C4DA4D15A,\n\n 0x1984E718A5477F70,\n\n 0x08DD17B266484F79,\n\n 0x4C83A05D766AD550,\n\n 0x92DCEBB131D1907D,\n\n 0xD67BC6FC881B8549,\n\n 0xF6A9886555FBF66B,\n\n 0x6E31616D7F33E25E,\n\n 0x36E31B7426E3049D,\n\n 0x4F8E4FAF46A13F5F,\n\n 0x03EB0CB3253F819F,\n\n 0x636A7769905770D2,\n\n 0x3ADF3781D16D1148,\n\n 0x92D19CB1818BC9C2,\n\n 0x283E68F4D459C533,\n\n 0xFA83A8A88DECAA04,\n\n 0x8C6F00368EAC538C,\n", "file_path": "src/t1ha0_aes.rs", "rank": 74, "score": 27552.07390009077 }, { "content": "\n\n len &= 15;\n\n\n\n let mut salt = y;\n\n\n\n while v.offset(7) < detent {\n\n let mut t = _mm_aesenc_si128(_mm_loadu_si128(v), salt);\n\n t = _mm_aesdec_si128(t, _mm_loadu_si128(v.offset(1)));\n\n t = _mm_aesdec_si128(t, _mm_loadu_si128(v.offset(2)));\n\n t = _mm_aesdec_si128(t, _mm_loadu_si128(v.offset(3)));\n\n t = _mm_aesdec_si128(t, _mm_loadu_si128(v.offset(4)));\n\n t = _mm_aesdec_si128(t, _mm_loadu_si128(v.offset(5)));\n\n t = _mm_aesdec_si128(t, _mm_loadu_si128(v.offset(6)));\n\n t = _mm_aesdec_si128(t, _mm_loadu_si128(v.offset(7)));\n\n\n\n v = v.add(8);\n\n prefetch(v);\n\n\n\n salt = _mm_add_epi64(salt, _mm_set_epi64x(PRIME_5 as i64, PRIME_6 as i64));\n\n t = _mm_aesenc_si128(x, t);\n", "file_path": "src/t1ha0_aes.rs", "rank": 75, "score": 27552.07390009077 }, { "content": " 0x7B66B0CF3797B322,\n\n 0x5131E122FDABA3FF,\n\n 0x6E59FF515C08C7A9,\n\n 0xBA2C5269B2C377B0,\n\n 0xA9D24FD368FE8A2B,\n\n 0x22DB13D32E33E891,\n\n 0x7B97DFC804B876E5,\n\n 0xC598BDFCD0E834F9,\n\n 0xB256163D3687F5A7,\n\n 0x66D7A73C6AEF50B3,\n\n 0xBB34C6A4396695D2,\n\n 0x7F46E1981C3256AD,\n\n 0x4B25A9B217A6C5B4,\n\n 0x7A0A6BCDD2321DA9,\n\n 0x0A1F55E690A7B44E,\n\n 0x8F451A91D7F05244,\n\n 0x624D5D3C9B9800A7,\n\n 0x09DDC2B6409DDC25,\n\n 0x3E155765865622B6,\n\n 0x96519FAC9511B381,\n", "file_path": "src/t1ha0_aes.rs", "rank": 76, "score": 27552.07390009077 }, { "content": " 0x512E58482FE4FBF0,\n\n 0x1AB260EA7D54AE1C,\n\n 0x67976F12CC28BBBD,\n\n 0x0607B5B2E6250156,\n\n 0x7E700BEA717AD36E,\n\n 0x06A058D9D61CABB3,\n\n 0x57DA5324A824972F,\n\n 0x1193BA74DBEBF7E7,\n\n 0xC18DC3140E7002D4,\n\n 0x9F7CCC11DFA0EF17,\n\n 0xC487D6C20666A13A,\n\n 0xB67190E4B50EF0C8,\n\n 0xA53DAA608DF0B9A5,\n\n 0x7E13101DE87F9ED3,\n\n 0x7F8955AE2F05088B,\n\n 0x2DF7E5A097AD383F,\n\n 0xF027683A21EA14B5,\n\n 0x9BB8AEC3E3360942,\n\n 0x92BE39B54967E7FE,\n\n 0x978C6D332E7AFD27,\n", "file_path": "src/t1ha0_aes.rs", "rank": 77, "score": 27552.07390009077 }, { "content": " y = _mm_aesdec_si128(y, v1x);\n\n\n\n let v2y = _mm_add_epi64(y, _mm_loadu_si128(v.offset(2)));\n\n let v3x = _mm_sub_epi64(x, _mm_loadu_si128(v.offset(3)));\n\n x = _mm_aesdec_si128(x, v2y);\n\n y = _mm_aesdec_si128(y, v3x);\n\n\n\n v = v.add(4)\n\n }\n\n\n\n if (len & 32) != 0 {\n\n let v0y = _mm_add_epi64(y, _mm_loadu_si128(v.offset(0)));\n\n let v1x = _mm_sub_epi64(x, _mm_loadu_si128(v.offset(1)));\n\n x = _mm_aesdec_si128(x, v0y);\n\n y = _mm_aesdec_si128(y, v1x);\n\n\n\n v = v.add(2)\n\n }\n\n\n\n if (len & 16) != 0 {\n", "file_path": "src/t1ha0_aes.rs", "rank": 78, "score": 27552.07390009077 }, { "content": " let v6 = _mm_loadu_si128(v.offset(6));\n\n let v7 = _mm_loadu_si128(v.offset(7));\n\n v = v.add(8);\n\n prefetch(v);\n\n\n\n let v0y = _mm_aesenc_si128(v0, y);\n\n let v2x6 = _mm_aesenc_si128(v2, _mm_xor_si128(x, v6));\n\n let v45_67 = _mm_xor_si128(_mm_aesenc_si128(v4, v5), _mm_add_epi64(v6, v7));\n\n\n\n let v0y7_1 = _mm_aesdec_si128(_mm_sub_epi64(v7, v0y), v1);\n\n let v2x6_3 = _mm_aesenc_si128(v2x6, v3);\n\n\n\n x = _mm_aesenc_si128(v45_67, _mm_add_epi64(x, y));\n\n y = _mm_aesenc_si128(v2x6_3, _mm_xor_si128(v0y7_1, v5));\n\n }\n\n\n\n if (len & 64) != 0 {\n\n let v0y = _mm_add_epi64(y, _mm_loadu_si128(v.offset(0)));\n\n let v1x = _mm_sub_epi64(x, _mm_loadu_si128(v.offset(1)));\n\n x = _mm_aesdec_si128(x, v0y);\n", "file_path": "src/t1ha0_aes.rs", "rank": 79, "score": 27552.07390009077 }, { "content": " 0x4F8E4FAF46A13F5F,\n\n 0x03EB0CB3253F819F,\n\n 0x636A7769905770D2,\n\n 0x3ADF3781D16D1148,\n\n 0x92D19CB1818BC9C2,\n\n 0x283E68F4D459C533,\n\n 0xFA83A8A88DECAA04,\n\n 0x8C6F00368EAC538C,\n\n 0x7B66B0CF3797B322,\n\n 0x5131E122FDABA3FF,\n\n 0x6E59FF515C08C7A9,\n\n 0xBA2C5269B2C377B0,\n\n 0xA9D24FD368FE8A2B,\n\n 0x22DB13D32E33E891,\n\n 0x7B97DFC804B876E5,\n\n 0xC598BDFCD0E834F9,\n\n 0xB256163D3687F5A7,\n\n 0x66D7A73C6AEF50B3,\n\n 0xE810F88E85CEA11A,\n\n 0x4814F8F3B83E4394,\n", "file_path": "src/t1ha0_aes.rs", "rank": 80, "score": 27552.07390009077 }, { "content": "\n\n lazy_static! {\n\n pub static ref HASH: fn(data: &[u8], seed: u64) -> u64 = t1ha0_resolve();\n\n }\n\n\n\n #[cfg(not(target_feature = \"aes\"))]\n\n pub use self::t1ha0_fallback as t1ha0_ia32aes_noavx;\n\n\n\n #[cfg(not(target_feature = \"avx\"))]\n\n pub use self::t1ha0_fallback as t1ha0_ia32aes_avx;\n\n\n\n #[cfg(not(target_feature = \"avx2\"))]\n\n pub use self::t1ha0_fallback as t1ha0_ia32aes_avx2;\n\n\n\n fn t1ha0_resolve() -> fn(&[u8], u64) -> u64 {\n\n if is_x86_feature_detected!(\"avx2\") {\n\n t1ha0_ia32aes_avx2\n\n } else if is_x86_feature_detected!(\"avx\") {\n\n t1ha0_ia32aes_avx\n\n } else if is_x86_feature_detected!(\"aes\") {\n", "file_path": "src/lib.rs", "rank": 82, "score": 24.938564202517 }, { "content": "#[macro_use]\n\nextern crate lazy_static;\n\n#[macro_use]\n\nextern crate criterion;\n\n\n\nuse std::mem;\n\nuse std::slice;\n\n\n\nuse criterion::{black_box, Criterion, ParameterizedBenchmark, Throughput};\n\n\n\nuse t1ha::{t1ha0_32, t1ha1, t1ha2_atonce, t1ha2_atonce128, T1ha2Hasher};\n\n\n\n#[cfg(target_feature = \"avx2\")]\n\nuse t1ha::t1ha0_ia32aes_avx2;\n\n#[cfg(not(target_feature = \"avx2\"))]\n", "file_path": "benches/t1ha.rs", "rank": 83, "score": 22.298703324676254 }, { "content": " b.iter(|| t1ha2_atonce(&DATA[..size], SEED));\n\n },\n\n &PARAMS,\n\n )\n\n .with_function(\"t1ha2_atonce128\", move |b, &&size| {\n\n b.iter(|| t1ha2_atonce128(&DATA[..size], SEED));\n\n })\n\n .with_function(\"t1ha2_stream\", move |b, &&size| {\n\n b.iter(|| {\n\n let mut h = T1ha2Hasher::with_seeds(SEED, SEED);\n\n h.update(&DATA[..size]);\n\n h.finish()\n\n });\n\n })\n\n .with_function(\"t1ha2_stream128\", move |b, &&size| {\n\n b.iter(|| {\n\n let mut h = T1ha2Hasher::with_seeds(SEED, SEED);\n\n h.update(&DATA[..size]);\n\n h.finish128() as u64\n\n });\n\n })\n\n .throughput(|&&size| Throughput::Bytes(size as u32)),\n\n );\n\n}\n\n\n\ncriterion_group!(benches, bench_t1ha);\n\ncriterion_main!(benches);\n", "file_path": "benches/t1ha.rs", "rank": 87, "score": 15.688487817224862 }, { "content": " \"t1ha0_32\",\n\n move |b, &&size| {\n\n b.iter(|| t1ha0_32(&DATA[..size], SEED));\n\n },\n\n &PARAMS,\n\n );\n\n\n\n if cfg!(target_feature = \"avx\") {\n\n bench = bench.with_function(\"t1ha0_ia32aes_avx\", move |b, &&size| {\n\n b.iter(|| t1ha0_ia32aes_avx(&DATA[..size], SEED));\n\n });\n\n }\n\n if cfg!(target_feature = \"avx2\") {\n\n bench = bench.with_function(\"t1ha0_ia32aes_avx2\", move |b, &&size| {\n\n b.iter(|| t1ha0_ia32aes_avx2(&DATA[..size], SEED));\n\n });\n\n }\n\n\n\n c.bench(\n\n \"t1ha0\",\n", "file_path": "benches/t1ha.rs", "rank": 88, "score": 15.047919457237503 }, { "content": " pub use t1ha0::t1ha0_32be as t1ha0_32;\n\n pub use t1ha1::t1ha1_be as t1ha1;\n\n }\n\n}\n\n\n\ncfg_if! {\n\n if #[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))] {\n\n cfg_if! {\n\n if #[cfg(any(target_feature = \"aes\", target_feature = \"avx\", target_feature = \"avx2\"))] {\n\n mod t1ha0_aes;\n\n }\n\n }\n\n cfg_if! {\n\n if #[cfg(target_feature = \"aes\")] {\n\n pub use t1ha0_aes::t1ha0_ia32aes as t1ha0_ia32aes_noavx;\n\n }\n\n }\n\n cfg_if! {\n\n if #[cfg(target_feature = \"avx\")] {\n\n pub use t1ha0_aes::t1ha0_ia32aes as t1ha0_ia32aes_avx;\n", "file_path": "src/lib.rs", "rank": 89, "score": 14.985577044291587 }, { "content": " }\n\n }\n\n cfg_if! {\n\n if #[cfg(target_feature = \"avx2\")] {\n\n pub use t1ha0_aes::t1ha0_ia32aes_avx2;\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// An implementation of the `t1ha` hash function.\n\n///\n\n/// See the [crate documentation](index.html) for more details.\n\n#[derive(Clone, Copy, Debug, Default)]\n\npub struct T1haHasher(u64);\n\n\n\nimpl T1haHasher {\n\n /// Create a `t1ha` hasher starting with a state corresponding to the hash `key`.\n\n #[inline]\n\n pub fn with_seed(seed: u64) -> Self {\n", "file_path": "src/lib.rs", "rank": 92, "score": 14.253264206586511 }, { "content": " bench.throughput(|&&size| Throughput::Bytes(size as u32)),\n\n );\n\n\n\n c.bench(\n\n \"t1ha1\",\n\n ParameterizedBenchmark::new(\n\n \"t1ha1\",\n\n move |b, &&size| {\n\n b.iter(|| t1ha1(&DATA[..size], SEED));\n\n },\n\n &PARAMS,\n\n )\n\n .throughput(|&&size| Throughput::Bytes(size as u32)),\n\n );\n\n\n\n c.bench(\n\n \"t1ha2\",\n\n ParameterizedBenchmark::new(\n\n \"t1ha2_atonce\",\n\n move |b, &&size| {\n", "file_path": "benches/t1ha.rs", "rank": 93, "score": 14.12143702152304 }, { "content": "use core::marker::PhantomData;\n\nuse core::mem;\n\nuse core::ptr;\n\n\n\nuse num_traits::{PrimInt, WrappingShr, Zero};\n\n\n\nuse crate::nightly::likely;\n\n\n\nconst PAGESIZE: usize = 4096;\n\n\n", "file_path": "src/bits.rs", "rank": 94, "score": 11.808102733230097 }, { "content": " while likely((v as usize) < detent) {\n\n let d = v;\n\n v = v.add(4);\n\n prefetch(p);\n\n\n\n t1ha2_update::<T>(state, d)\n\n }\n\n\n\n &data[(v as usize) - (p as usize)..]\n\n}\n\n\n\n#[inline(always)]\n\nunsafe fn t1ha2_tail_ab<T: MemoryModel<Item = u64>>(state: &mut State, data: &[u8]) -> u64 {\n\n let v = data.as_ptr() as *const u64;\n\n let len = data.len();\n\n\n\n match len {\n\n 32 | 31 | 30 | 29 | 28 | 27 | 26 | 25 => {\n\n mixup64(&mut state.a, &mut state.b, T::fetch(v.offset(0)), PRIME_4);\n\n mixup64(&mut state.b, &mut state.a, T::fetch(v.offset(1)), PRIME_3);\n", "file_path": "src/t1ha2.rs", "rank": 95, "score": 11.568506134964387 }, { "content": "#[macro_use]\n\nextern crate cfg_if;\n\n\n\nuse core::hash::{BuildHasherDefault, Hasher};\n\n\n\nmod bits;\n\nmod nightly;\n\nmod t1ha0;\n\nmod t1ha1;\n\nmod t1ha2;\n\n\n\npub use t1ha0::{t1ha0_32be, t1ha0_32le};\n\npub use t1ha1::{t1ha1_be, t1ha1_le};\n\npub use t1ha2::{t1ha2_atonce, t1ha2_atonce128, T1ha2Hasher};\n\n\n\ncfg_if! {\n\n if #[cfg(target_endian = \"little\")] {\n\n pub use t1ha0::t1ha0_32le as t1ha0_32;\n\n pub use t1ha1::t1ha1_le as t1ha1;\n\n } else {\n", "file_path": "src/lib.rs", "rank": 96, "score": 11.379133193292896 }, { "content": " |data, seed| {\n\n let mut h = T1ha2Hasher::with_seeds(seed, seed);\n\n h.update(data);\n\n h.finish128() as u64\n\n },\n\n &T1HA_REFVAL_2STREAM128[..],\n\n )\n\n }\n\n}\n", "file_path": "src/t1ha2.rs", "rank": 97, "score": 10.657356864672359 }, { "content": "#![allow(clippy::cast_ptr_alignment)]\n\n\n\nuse crate::{bits::*, nightly::*};\n\n\n\n/// An implementation of `t1ha2` stream hasher.\n\n#[derive(Debug, Default)]\n\npub struct T1ha2Hasher {\n\n state: State,\n\n buffer: [u8; 32],\n\n partial: usize,\n\n total: usize,\n\n}\n\n\n\n#[derive(Debug, Default)]\n", "file_path": "src/t1ha2.rs", "rank": 98, "score": 10.446981177371725 } ]
Rust
src/style/builder.rs
vinaychandra/embedded-text
71c5e8abbb940deff1fcbab0c06c2c2fced5de10
use crate::{ alignment::{HorizontalTextAlignment, LeftAligned, TopAligned, VerticalTextAlignment}, style::{ height_mode::{Exact, HeightMode}, vertical_overdraw::FullRowsOnly, TabSize, TextBoxStyle, }, }; use embedded_graphics::{ prelude::*, style::{TextStyle, TextStyleBuilder}, }; pub struct TextBoxStyleBuilder<C, F, A, V, H> where C: PixelColor, F: Font + Copy, A: HorizontalTextAlignment, V: VerticalTextAlignment, H: HeightMode, { text_style_builder: TextStyleBuilder<C, F>, alignment: A, vertical_alignment: V, height_mode: H, line_spacing: i32, tab_size: TabSize<F>, underlined: bool, strikethrough: bool, } impl<C, F> TextBoxStyleBuilder<C, F, LeftAligned, TopAligned, Exact<FullRowsOnly>> where C: PixelColor, F: Font + Copy, { #[inline] #[must_use] pub fn new(font: F) -> Self { Self { text_style_builder: TextStyleBuilder::new(font), alignment: LeftAligned, vertical_alignment: TopAligned, height_mode: Exact(FullRowsOnly), line_spacing: 0, tab_size: TabSize::default(), underlined: false, strikethrough: false, } } #[inline] #[must_use] pub fn from_text_style(text_style: TextStyle<C, F>) -> Self { let mut text_style_builder = TextStyleBuilder::new(text_style.font); if let Some(color) = text_style.background_color { text_style_builder = text_style_builder.background_color(color); } if let Some(color) = text_style.text_color { text_style_builder = text_style_builder.text_color(color); } Self { text_style_builder, ..Self::new(text_style.font) } } } impl<C, F, A, V, H> TextBoxStyleBuilder<C, F, A, V, H> where C: PixelColor, F: Font + Copy, A: HorizontalTextAlignment, V: VerticalTextAlignment, H: HeightMode, { #[inline] #[must_use] pub fn text_color(self, text_color: C) -> Self { Self { text_style_builder: self.text_style_builder.text_color(text_color), ..self } } #[inline] #[must_use] pub fn line_spacing(self, line_spacing: i32) -> Self { Self { line_spacing, ..self } } #[inline] #[must_use] pub fn background_color(self, background_color: C) -> Self { Self { text_style_builder: self.text_style_builder.background_color(background_color), ..self } } #[inline] #[must_use] #[deprecated] pub fn text_style(self, text_style: TextStyle<C, F>) -> Self { let mut text_style_builder = self.text_style_builder; if let Some(color) = text_style.background_color { text_style_builder = text_style_builder.background_color(color); } if let Some(color) = text_style.text_color { text_style_builder = text_style_builder.text_color(color); } Self { text_style_builder, ..self } } #[inline] #[must_use] pub fn alignment<TA: HorizontalTextAlignment>( self, alignment: TA, ) -> TextBoxStyleBuilder<C, F, TA, V, H> { TextBoxStyleBuilder { text_style_builder: self.text_style_builder, alignment, line_spacing: self.line_spacing, vertical_alignment: self.vertical_alignment, height_mode: self.height_mode, tab_size: self.tab_size, underlined: self.underlined, strikethrough: self.strikethrough, } } #[inline] #[must_use] pub fn vertical_alignment<VA: VerticalTextAlignment>( self, vertical_alignment: VA, ) -> TextBoxStyleBuilder<C, F, A, VA, H> { TextBoxStyleBuilder { text_style_builder: self.text_style_builder, alignment: self.alignment, line_spacing: self.line_spacing, vertical_alignment, height_mode: self.height_mode, tab_size: self.tab_size, underlined: self.underlined, strikethrough: self.strikethrough, } } #[inline] #[must_use] pub fn height_mode<HM: HeightMode>( self, height_mode: HM, ) -> TextBoxStyleBuilder<C, F, A, V, HM> { TextBoxStyleBuilder { text_style_builder: self.text_style_builder, alignment: self.alignment, line_spacing: self.line_spacing, vertical_alignment: self.vertical_alignment, height_mode, tab_size: self.tab_size, underlined: self.underlined, strikethrough: self.strikethrough, } } #[inline] #[must_use] pub fn tab_size(self, tab_size: TabSize<F>) -> Self { Self { tab_size, ..self } } #[inline] #[must_use] pub fn underlined(self, underlined: bool) -> Self { Self { underlined, ..self } } #[inline] #[must_use] pub fn strikethrough(self, strikethrough: bool) -> Self { Self { strikethrough, ..self } } #[inline] #[must_use] pub fn build(self) -> TextBoxStyle<C, F, A, V, H> { TextBoxStyle { text_style: self.text_style_builder.build(), alignment: self.alignment, line_spacing: self.line_spacing, vertical_alignment: self.vertical_alignment, height_mode: self.height_mode, tab_size: self.tab_size, underlined: self.underlined, strikethrough: self.strikethrough, } } } #[cfg(test)] mod test { use super::TextBoxStyleBuilder; use embedded_graphics::{ fonts::Font6x8, pixelcolor::BinaryColor, style::{TextStyle, TextStyleBuilder}, }; #[test] #[allow(deprecated)] fn test_text_style_copy() { let text_styles: [TextStyle<_, _>; 2] = [ TextStyleBuilder::new(Font6x8) .text_color(BinaryColor::On) .build(), TextStyleBuilder::new(Font6x8) .background_color(BinaryColor::On) .build(), ]; for &text_style in text_styles.iter() { let style = TextBoxStyleBuilder::new(Font6x8) .text_style(text_style) .build(); assert_eq!(style.text_style, text_style); } } #[test] fn test_text_style_copy_ctr() { let text_styles: [TextStyle<_, _>; 2] = [ TextStyleBuilder::new(Font6x8) .text_color(BinaryColor::On) .build(), TextStyleBuilder::new(Font6x8) .background_color(BinaryColor::On) .build(), ]; for &text_style in text_styles.iter() { let style = TextBoxStyleBuilder::from_text_style(text_style).build(); assert_eq!(style.text_style, text_style); } } }
use crate::{ alignment::{HorizontalTextAlignment, LeftAligned, TopAligned, VerticalTextAlignment}, style::{ height_mode::{Exact, HeightMode}, vertical_overdraw::FullRowsOnly, TabSize, TextBoxStyle, }, }; use embedded_graphics::{ prelude::*, style::{TextStyle, TextStyleBuilder}, }; pub struct TextBoxStyleBuilder<C, F, A, V, H> where C: PixelColor, F: Font + Copy, A: HorizontalTextAlignment, V: VerticalTextAlignment, H: HeightMode, { text_style_builder: TextStyleBuilder<C, F>, alignment: A, vertical_alignment: V, height_mode: H, line_spacing: i32, tab_size: TabSize<F>, underlined: bool, strikethrough: bool, } impl<C, F> TextBoxStyleBuilder<C, F, LeftAligned, TopAligned, Exact<FullRowsOnly>> where C: PixelColor, F: Font + Copy, { #[inline] #[must_use] pub fn new(font: F) -> Self { Self { text_style_builder: TextStyleBuilder::new(font), alignment: LeftAligned, vertical_alignment: TopAligned, height_mode: Exact(FullRowsOnly), line_spacing: 0, tab_size: TabSize::default(), underlined: false, strikethrough: false, } } #[inline] #[must_use] pub fn from_text_style(text_style: TextStyle<C, F>) -> Self { let mut text_style_builder = TextStyleBuilder::new(text_style.font); if let Some(color) = text_style.background_color { text_style_builder = text_style_builder.background_color(color); } if let Some(color) = text_style.text_color { text_style_builder = text_style_builder.text_color(color); } Self { text_style_builder, ..Self::new(text_style.font) } } } impl<C, F, A, V, H> TextBoxStyleBuilder<C, F, A, V, H> where C: PixelColor, F: Font + Copy, A: HorizontalTextAlignment, V: VerticalTextAlignment, H: HeightMode, { #[inline] #[must_use] pub fn text_color(self, text_color: C) -> Self { Self { text_style_builder: self.text_style_builder.text_color(text_color), ..self } } #[inline] #[must_use] pub fn line_spacing(self, line_spacing: i32) -> Self { Self { line_spacing, ..self } } #[inline] #[must_use] pub fn background_color(self, background_color: C) -> Self { Self { text_style_builder: self.text_style_builder.background_color(background_color), ..self } } #[inline] #[must_use] #[deprecated] pub fn text_style(self, text_style: TextStyle<C, F>) -> Self { let mut text_style_builder = self.text_style_builder; if let Some(color) = text_style.background_color { text_style_builder = text_style_builder.background_color(color); }
Self { text_style_builder, ..self } } #[inline] #[must_use] pub fn alignment<TA: HorizontalTextAlignment>( self, alignment: TA, ) -> TextBoxStyleBuilder<C, F, TA, V, H> { TextBoxStyleBuilder { text_style_builder: self.text_style_builder, alignment, line_spacing: self.line_spacing, vertical_alignment: self.vertical_alignment, height_mode: self.height_mode, tab_size: self.tab_size, underlined: self.underlined, strikethrough: self.strikethrough, } } #[inline] #[must_use] pub fn vertical_alignment<VA: VerticalTextAlignment>( self, vertical_alignment: VA, ) -> TextBoxStyleBuilder<C, F, A, VA, H> { TextBoxStyleBuilder { text_style_builder: self.text_style_builder, alignment: self.alignment, line_spacing: self.line_spacing, vertical_alignment, height_mode: self.height_mode, tab_size: self.tab_size, underlined: self.underlined, strikethrough: self.strikethrough, } } #[inline] #[must_use] pub fn height_mode<HM: HeightMode>( self, height_mode: HM, ) -> TextBoxStyleBuilder<C, F, A, V, HM> { TextBoxStyleBuilder { text_style_builder: self.text_style_builder, alignment: self.alignment, line_spacing: self.line_spacing, vertical_alignment: self.vertical_alignment, height_mode, tab_size: self.tab_size, underlined: self.underlined, strikethrough: self.strikethrough, } } #[inline] #[must_use] pub fn tab_size(self, tab_size: TabSize<F>) -> Self { Self { tab_size, ..self } } #[inline] #[must_use] pub fn underlined(self, underlined: bool) -> Self { Self { underlined, ..self } } #[inline] #[must_use] pub fn strikethrough(self, strikethrough: bool) -> Self { Self { strikethrough, ..self } } #[inline] #[must_use] pub fn build(self) -> TextBoxStyle<C, F, A, V, H> { TextBoxStyle { text_style: self.text_style_builder.build(), alignment: self.alignment, line_spacing: self.line_spacing, vertical_alignment: self.vertical_alignment, height_mode: self.height_mode, tab_size: self.tab_size, underlined: self.underlined, strikethrough: self.strikethrough, } } } #[cfg(test)] mod test { use super::TextBoxStyleBuilder; use embedded_graphics::{ fonts::Font6x8, pixelcolor::BinaryColor, style::{TextStyle, TextStyleBuilder}, }; #[test] #[allow(deprecated)] fn test_text_style_copy() { let text_styles: [TextStyle<_, _>; 2] = [ TextStyleBuilder::new(Font6x8) .text_color(BinaryColor::On) .build(), TextStyleBuilder::new(Font6x8) .background_color(BinaryColor::On) .build(), ]; for &text_style in text_styles.iter() { let style = TextBoxStyleBuilder::new(Font6x8) .text_style(text_style) .build(); assert_eq!(style.text_style, text_style); } } #[test] fn test_text_style_copy_ctr() { let text_styles: [TextStyle<_, _>; 2] = [ TextStyleBuilder::new(Font6x8) .text_color(BinaryColor::On) .build(), TextStyleBuilder::new(Font6x8) .background_color(BinaryColor::On) .build(), ]; for &text_style in text_styles.iter() { let style = TextBoxStyleBuilder::from_text_style(text_style).build(); assert_eq!(style.text_style, text_style); } } }
if let Some(color) = text_style.text_color { text_style_builder = text_style_builder.text_color(color); }
if_condition
[ { "content": "fn demo_loop<V>(window: &mut Window, bounds: &mut Rectangle, alignment: V) -> bool\n\nwhere\n\n V: VerticalTextAlignment + std::fmt::Debug,\n\n for<'a> &'a StyledTextBox<'a, BinaryColor, Font6x8, LeftAligned, TopAligned, Exact<FullRowsOnly>>:\n\n Drawable<BinaryColor>,\n\n{\n\n let text = \"Hello, World!\\nLorem Ipsum is simply dummy text of the printing and typesetting \\\n\n industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when \\\n\n an unknown printer took a galley of type and scrambled it to make a type specimen book.\";\n\n loop {\n\n let mut display: SimulatorDisplay<BinaryColor> = SimulatorDisplay::new(Size::new(255, 255));\n\n\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .vertical_alignment(alignment)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n\n\n let tb = TextBox::new(text, *bounds).into_styled(textbox_style);\n\n tb.draw(&mut display).unwrap();\n\n\n", "file_path": "examples/interactive_vertical.rs", "rank": 0, "score": 194030.33036462235 }, { "content": "fn demo_loop<H>(window: &mut Window, bounds: &mut Rectangle, height_mode: H) -> bool\n\nwhere\n\n H: HeightMode + std::fmt::Debug,\n\n for<'a> &'a StyledTextBox<'a, BinaryColor, Font6x8, LeftAligned, TopAligned, Exact<FullRowsOnly>>:\n\n Drawable<BinaryColor>,\n\n{\n\n let text = \"Hello, World!\\nLorem Ipsum is simply dummy text of the printing and typesetting \\\n\n industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when \\\n\n an unknown printer took a galley of type and scrambled it to make a type specimen book.\";\n\n loop {\n\n let mut display: SimulatorDisplay<BinaryColor> = SimulatorDisplay::new(Size::new(255, 255));\n\n\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .text_color(BinaryColor::On)\n\n .height_mode(height_mode)\n\n .build();\n\n\n\n let tb = TextBox::new(text, *bounds).into_styled(textbox_style);\n\n tb.draw(&mut display).unwrap();\n\n\n", "file_path": "examples/interactive_fit.rs", "rank": 1, "score": 170098.24671369573 }, { "content": "type LineIteratorSource<'a, C, F, A, V, H, SP> =\n\n fn(\n\n TextBoxStyle<C, F, A, V, H>,\n\n Option<Token<'a>>,\n\n Cursor<F>,\n\n Parser<'a>,\n\n ) -> StyledLinePixelIterator<'a, C, F, SP, A, V, H>;\n\n\n\n/// Pixel iterator for styled text.\n\npub struct StyledTextBoxIterator<'a, C, F, A, V, H, SP>\n\nwhere\n\n C: PixelColor,\n\n F: Font + Copy,\n\n A: HorizontalTextAlignment,\n\n V: VerticalTextAlignment,\n\n H: HeightMode,\n\n SP: SpaceConfig<Font = F>,\n\n{\n\n style: TextBoxStyle<C, F, A, V, H>,\n\n state: State<'a, C, F, SP, A, V, H>,\n", "file_path": "src/rendering/mod.rs", "rank": 2, "score": 164287.04748116457 }, { "content": "fn str_width<F: Font>(s: &str, ignore_cr: bool) -> u32 {\n\n let mut width = 0;\n\n let mut current_width = 0;\n\n for c in s.chars() {\n\n if !ignore_cr && c == '\\r' {\n\n width = current_width.max(width);\n\n current_width = 0;\n\n } else {\n\n current_width += F::total_char_width(c);\n\n }\n\n }\n\n\n\n current_width.max(width)\n\n}\n\n\n", "file_path": "src/utils/font_ext.rs", "rank": 3, "score": 163945.63140984275 }, { "content": "fn demo_loop<A>(window: &mut Window, bounds: &mut Rectangle, alignment: A) -> bool\n\nwhere\n\n A: HorizontalTextAlignment + core::fmt::Debug,\n\n for<'a> &'a StyledTextBox<'a, BinaryColor, Font6x8, A, TopAligned, Exact<FullRowsOnly>>:\n\n Drawable<BinaryColor>,\n\n{\n\n let text = \"Hello, World!\\nLorem Ipsum is simply dummy text of the printing and typesetting \\\n\n industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when \\\n\n an unknown printer took a galley of type and scrambled it to make a type specimen book. \\n\\\n\n super\\u{AD}­cali\\u{AD}­fragi\\u{AD}­listic\\u{AD}­espeali\\u{AD}­docious\";\n\n loop {\n\n let mut display: SimulatorDisplay<BinaryColor> = SimulatorDisplay::new(Size::new(255, 255));\n\n\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(alignment)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n\n\n TextBox::new(text, *bounds)\n\n .into_styled(textbox_style)\n", "file_path": "examples/interactive.rs", "rank": 4, "score": 150335.6718630278 }, { "content": "fn max_str_width<F: Font>(s: &str, max_width: u32, ignore_cr: bool) -> (u32, &str) {\n\n let mut width = 0;\n\n let mut current_width = 0;\n\n for (idx, c) in s.char_indices() {\n\n if !ignore_cr && c == '\\r' {\n\n width = current_width.max(width);\n\n current_width = 0;\n\n } else {\n\n let new_width = current_width + F::total_char_width(c);\n\n if new_width > max_width {\n\n width = current_width.max(width);\n\n return (width, unsafe { s.get_unchecked(0..idx) });\n\n } else {\n\n current_width = new_width;\n\n }\n\n }\n\n }\n\n width = current_width.max(width);\n\n (width, s)\n\n}\n", "file_path": "src/utils/font_ext.rs", "rank": 5, "score": 142855.59645752015 }, { "content": "fn benchmark_render_textbox_both_aligned(c: &mut Criterion) {\n\n let style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(CenterAligned)\n\n .vertical_alignment(CenterAligned)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n\n\n c.bench_function(\"TextBox, H/V CenterAligned\", |b| {\n\n b.iter(|| {\n\n let obj = TextBox::new(\n\n black_box(TEXT),\n\n Rectangle::new(Point::zero(), Point::new(6 * 15 - 1, 7)),\n\n )\n\n .into_styled(style);\n\n let object = obj.create_renderer();\n\n object.collect::<Vec<Pixel<BinaryColor>>>()\n\n })\n\n });\n\n}\n\n\n\ncriterion_group!(\n\n render,\n\n benchmark_render_text,\n\n benchmark_render_textbox,\n\n benchmark_render_textbox_aligned,\n\n benchmark_render_textbox_vertical_aligned,\n\n benchmark_render_textbox_both_aligned,\n\n);\n\ncriterion_main!(render);\n", "file_path": "benches/render.rs", "rank": 6, "score": 136024.5885387392 }, { "content": "fn benchmark_render_textbox_aligned(c: &mut Criterion) {\n\n let style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(RightAligned)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n\n\n c.bench_function(\"TextBox, RightAligned\", |b| {\n\n b.iter(|| {\n\n let obj = TextBox::new(\n\n black_box(TEXT),\n\n Rectangle::new(Point::zero(), Point::new(6 * 15 - 1, 7)),\n\n )\n\n .into_styled(style);\n\n let object = obj.create_renderer();\n\n object.collect::<Vec<Pixel<BinaryColor>>>()\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/render.rs", "rank": 7, "score": 136024.5885387392 }, { "content": "fn benchmark_render_textbox_vertical_aligned(c: &mut Criterion) {\n\n let style = TextBoxStyleBuilder::new(Font6x8)\n\n .vertical_alignment(BottomAligned)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n\n\n c.bench_function(\"TextBox, BottomAligned\", |b| {\n\n b.iter(|| {\n\n let obj = TextBox::new(\n\n black_box(TEXT),\n\n Rectangle::new(Point::zero(), Point::new(6 * 15 - 1, 7)),\n\n )\n\n .into_styled(style);\n\n let object = obj.create_renderer();\n\n object.collect::<Vec<Pixel<BinaryColor>>>()\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/render.rs", "rank": 8, "score": 132980.87721537636 }, { "content": "/// Horizontal text alignment base trait.\n\n///\n\n/// Use implementors to parametrize [`TextBoxStyle`] and [`TextBoxStyleBuilder`].\n\n///\n\n/// [`TextBoxStyle`]: ../style/struct.TextBoxStyle.html\n\n/// [`TextBoxStyleBuilder`]: ../style/builder/struct.TextBoxStyleBuilder.html\n\npub trait HorizontalTextAlignment: Copy {\n\n /// Whether or not render spaces in the start of the line.\n\n const STARTING_SPACES: bool;\n\n\n\n /// Whether or not render spaces in the end of the line.\n\n const ENDING_SPACES: bool;\n\n}\n\n\n", "file_path": "src/alignment/mod.rs", "rank": 9, "score": 114581.74401527063 }, { "content": "/// Vertical text alignment base trait.\n\n///\n\n/// Use implementors to parametrize [`TextBoxStyle`] and [`TextBoxStyleBuilder`].\n\n///\n\n/// [`TextBoxStyle`]: ../style/struct.TextBoxStyle.html\n\n/// [`TextBoxStyleBuilder`]: ../style/builder/struct.TextBoxStyleBuilder.html\n\npub trait VerticalTextAlignment: Copy {\n\n /// Set the cursor's initial vertical position\n\n fn apply_vertical_alignment<'a, C, F, A, H>(\n\n cursor: &mut Cursor<F>,\n\n styled_text_box: &'a StyledTextBox<'a, C, F, A, Self, H>,\n\n ) where\n\n C: PixelColor,\n\n F: Font + Copy,\n\n A: HorizontalTextAlignment,\n\n H: HeightMode;\n\n}\n\n\n\npub use bottom::BottomAligned;\n\npub use center::CenterAligned;\n\npub use justified::Justified;\n\npub use left::LeftAligned;\n\npub use right::RightAligned;\n\npub use top::TopAligned;\n", "file_path": "src/alignment/mod.rs", "rank": 10, "score": 114581.74401527063 }, { "content": "fn benchmark_current(c: &mut Criterion) {\n\n c.bench_function(\"Current parser\", |b| {\n\n b.iter(|| Parser::parse(black_box(TEXT)).collect::<Vec<Token<'_>>>())\n\n });\n\n}\n\n\n\ncriterion_group!(parse, benchmark_original, benchmark_current);\n\ncriterion_main!(parse);\n", "file_path": "benches/parse.rs", "rank": 11, "score": 113192.6783006243 }, { "content": "fn benchmark_original(c: &mut Criterion) {\n\n c.bench_function(\"Original parser\", |b| {\n\n b.iter(|| OriginalParser::parse(black_box(TEXT)).collect::<Vec<Token<'_>>>())\n\n });\n\n}\n\n\n", "file_path": "benches/parse.rs", "rank": 12, "score": 113192.6783006243 }, { "content": "fn is_space_char(c: char) -> bool {\n\n // zero-width space breaks whitespace sequences - this works as long as\n\n // space handling is symmetrical (i.e. starting == ending behaviour)\n\n c.is_whitespace() && !['\\n', '\\r', '\\t', SPEC_CHAR_NBSP].contains(&c) || c == SPEC_CHAR_ZWSP\n\n}\n\n\n\nimpl<'a> Parser<'a> {\n\n /// Create a new parser object to process the given piece of text.\n\n #[inline]\n\n #[must_use]\n\n pub fn parse(text: &'a str) -> Self {\n\n Self {\n\n inner: text.chars(),\n\n }\n\n }\n\n\n\n /// Returns true if there are no tokens to process.\n\n #[inline]\n\n #[must_use]\n\n pub fn is_empty(&self) -> bool {\n", "file_path": "src/parser/mod.rs", "rank": 13, "score": 111225.28059933345 }, { "content": "fn is_word_char(c: char) -> bool {\n\n // Word tokens are terminated when a whitespace, zwsp or shy character is found. An exception\n\n // to this rule is the nbsp, which is whitespace but is included in the word.\n\n (!c.is_whitespace() || c == SPEC_CHAR_NBSP)\n\n && ![SPEC_CHAR_ZWSP, SPEC_CHAR_SHY, SPEC_CHAR_ESCAPE].contains(&c)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 14, "score": 111225.28059933345 }, { "content": "fn benchmark_render_textbox(c: &mut Criterion) {\n\n let style = TextBoxStyleBuilder::new(Font6x8)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n\n\n c.bench_function(\"TextBox\", |b| {\n\n b.iter(|| {\n\n let obj = TextBox::new(\n\n black_box(TEXT),\n\n Rectangle::new(Point::zero(), Point::new(6 * 15 - 1, 7)),\n\n )\n\n .into_styled(style);\n\n let object = obj.create_renderer();\n\n object.collect::<Vec<Pixel<BinaryColor>>>()\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/render.rs", "rank": 15, "score": 110801.48099376206 }, { "content": "fn benchmark_render_text(c: &mut Criterion) {\n\n let style = TextStyleBuilder::new(Font6x8)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n\n\n c.bench_function(\"Text\", |b| {\n\n b.iter(|| {\n\n let object = Text::new(black_box(TEXT), Point::zero()).into_styled(style);\n\n object.into_iter().collect::<Vec<Pixel<BinaryColor>>>()\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/render.rs", "rank": 16, "score": 110801.48099376206 }, { "content": "/// This trait is used to associate a renderer type to a horizontal alignment option.\n\n///\n\n/// Implementing this trait is only necessary when creating new alignment algorithms.\n\npub trait RendererFactory<'a, C: PixelColor> {\n\n /// The type of the pixel iterator.\n\n type Renderer: Iterator<Item = Pixel<C>>;\n\n\n\n /// Creates a new renderer object.\n\n fn create_renderer(&self) -> Self::Renderer;\n\n}\n\n\n", "file_path": "src/rendering/mod.rs", "rank": 17, "score": 109197.31885845985 }, { "content": "/// Specifies how the [`TextBox`]'s height is adjusted when it is turned into a [`StyledTextBox`].\n\n///\n\n/// [`TextBox`]: ../../struct.TextBox.html\n\npub trait HeightMode: Copy {\n\n /// Apply the height mode to the textbox\n\n ///\n\n /// *Note:* This function is used by [`TextBox::into_styled`] and normally does not need to be\n\n /// called manually.\n\n ///\n\n /// [`TextBox::into_styled`]: ../../struct.TextBox.html#method.into_styled\n\n fn apply<C, F, A, V, H>(text_box: &mut StyledTextBox<'_, C, F, A, V, H>)\n\n where\n\n C: PixelColor,\n\n F: Font + Copy,\n\n A: HorizontalTextAlignment,\n\n V: VerticalTextAlignment,\n\n H: HeightMode;\n\n\n\n /// Calculate the range of rows of the current line that can be drawn.\n\n ///\n\n /// If a line does not fully fit in the bounding box, some `HeightMode` options allow drawing\n\n /// partial lines. For a partial line, this function calculates, which rows of each character\n\n /// should be displayed.\n", "file_path": "src/style/height_mode.rs", "rank": 18, "score": 105125.63695534389 }, { "content": "/// Implementors of this trait specify how drawing vertically outside the bounding box is handled.\n\npub trait VerticalOverdraw: Copy {\n\n /// Calculate the range of rows of the current line that can be drawn.\n\n fn calculate_displayed_row_range<F: Font>(cursor: &Cursor<F>) -> Range<i32>;\n\n}\n\n\n\n/// Only render full rows of text.\n\n#[derive(Copy, Clone, Debug)]\n\npub struct FullRowsOnly;\n\nimpl VerticalOverdraw for FullRowsOnly {\n\n #[inline]\n\n fn calculate_displayed_row_range<F: Font>(cursor: &Cursor<F>) -> Range<i32> {\n\n if cursor.in_display_area() {\n\n 0..F::CHARACTER_SIZE.height as i32\n\n } else {\n\n 0..0\n\n }\n\n }\n\n}\n\n\n\n/// Render partially visible rows, but only inside the bounding box.\n", "file_path": "src/style/vertical_overdraw.rs", "rank": 19, "score": 105118.64085851988 }, { "content": "#[derive(Debug)]\n\nenum State<C, F>\n\nwhere\n\n C: PixelColor,\n\n F: Font + Copy,\n\n{\n\n /// Fetch next render element.\n\n FetchNext,\n\n\n\n /// Render a character.\n\n Char(CharacterIterator<C, F>),\n\n\n\n /// Render a block of whitespace.\n\n Space(EmptySpaceIterator<C, F>),\n\n\n\n /// Render a block of whitespace with underlined or strikethrough effect.\n\n ModifiedSpace(ModifiedEmptySpaceIterator<C, F>),\n\n}\n\n\n\n/// Pixel iterator to render a single line of styled text.\n\n#[derive(Debug)]\n", "file_path": "src/rendering/line.rs", "rank": 20, "score": 102840.29004385584 }, { "content": "#[inline]\n\npub fn try_parse_sgr(v: &[u8]) -> Option<Sgr> {\n\n let code = *v.get(0)?;\n\n match code {\n\n 0 => Some(Sgr::Reset),\n\n 4 => Some(Sgr::Underline),\n\n 9 => Some(Sgr::CrossedOut),\n\n 24 => Some(Sgr::UnderlineOff),\n\n 29 => Some(Sgr::NotCrossedOut),\n\n 39 => Some(Sgr::DefaultTextColor),\n\n 49 => Some(Sgr::DefaultBackgroundColor),\n\n 30..=37 => Some(Sgr::ChangeTextColor(standard_to_rgb(code - 30))),\n\n 38 => {\n\n let color = try_parse_color(&v[1..])?;\n\n Some(Sgr::ChangeTextColor(color))\n\n }\n\n 90..=97 => Some(Sgr::ChangeTextColor(standard_to_rgb(code - 82))),\n\n 40..=47 => Some(Sgr::ChangeBackgroundColor(standard_to_rgb(code - 40))),\n\n 48 => {\n\n let color = try_parse_color(&v[1..])?;\n\n Some(Sgr::ChangeBackgroundColor(color))\n\n }\n\n 100..=107 => Some(Sgr::ChangeBackgroundColor(standard_to_rgb(code - 92))),\n\n _ => None,\n\n }\n\n}\n", "file_path": "src/rendering/ansi.rs", "rank": 21, "score": 99411.98647347259 }, { "content": "/// `Font` extensions\n\npub trait FontExt {\n\n /// Returns the total width of the character plus the character spacing.\n\n fn total_char_width(c: char) -> u32;\n\n\n\n /// Measure text width\n\n fn str_width(s: &str) -> u32;\n\n\n\n /// This function is identical to [`str_width`] except it does **not** handle carriage\n\n /// return characters.\n\n ///\n\n /// [`str_width`]: #method.str_width\n\n fn str_width_nocr(s: &str) -> u32;\n\n\n\n /// Measures a sequence of characters in a line with a determinate maximum width.\n\n ///\n\n /// Returns the width of the characters that fit into the given space and the processed string.\n\n fn max_str_width(s: &str, max_width: u32) -> (u32, &str);\n\n\n\n /// This function is identical to [`max_str_width`] except it does **not** handle carriage\n\n /// return characters.\n", "file_path": "src/utils/font_ext.rs", "rank": 22, "score": 91082.34757807343 }, { "content": "/// Retrieves size of space characters.\n\npub trait SpaceConfig: Copy + Default {\n\n /// The font for which this space config belongs.\n\n type Font: Font;\n\n\n\n /// Look at the size of next n spaces, without advancing.\n\n fn peek_next_width(&self, n: u32) -> u32;\n\n\n\n /// Advance the internal state\n\n fn consume(&mut self, n: u32) -> u32;\n\n}\n\n\n\n/// Contains the fixed width of a space character.\n\n#[derive(Copy, Clone, Debug)]\n\npub struct UniformSpaceConfig<F: Font + Copy> {\n\n _font: PhantomData<F>,\n\n\n\n /// Space width.\n\n pub space_width: u32,\n\n}\n\n\n", "file_path": "src/rendering/space_config.rs", "rank": 23, "score": 74102.64912003535 }, { "content": "fn main() -> Result<(), core::convert::Infallible> {\n\n let text = \"Lorem Ipsum is simply dummy text of the printing and typesetting industry.\";\n\n\n\n let underlined_style = TextBoxStyleBuilder::new(Font6x8)\n\n .text_color(BinaryColor::On)\n\n .height_mode(FitToText)\n\n .underlined(true)\n\n .line_spacing(2)\n\n .build();\n\n let strikethrough_style = TextBoxStyleBuilder::new(Font6x8)\n\n .text_color(BinaryColor::On)\n\n .height_mode(FitToText)\n\n .strikethrough(true)\n\n .line_spacing(2)\n\n .build();\n\n\n\n let text_box = TextBox::new(text, Rectangle::new(Point::zero(), Point::new(96, 0)))\n\n .into_styled(underlined_style);\n\n\n\n let text_box2 = TextBox::new(text, Rectangle::new(Point::new(96, 0), Point::new(192, 0)))\n", "file_path": "examples/extra_styles.rs", "rank": 24, "score": 69254.71889214947 }, { "content": "fn try_parse_8b_color(v: &[u8]) -> Option<Rgb> {\n\n let color = *v.get(0)?;\n\n match color {\n\n // 0- 7: standard colors (as in ESC [ 30–37 m)\n\n // 8- 15: high intensity colors (as in ESC [ 90–97 m)\n\n 0..=15 => Some(standard_to_rgb(color)),\n\n\n\n // 16-231: 6 × 6 × 6 cube (216 colors): 16 + 36 × r + 6 × g + b (0 ≤ r, g, b ≤ 5)\n\n 16..=231 => {\n\n let color = color - 16;\n\n let extend_6 = |c| c * 51;\n\n\n\n let b = extend_6(color % 6);\n\n let color = color / 6;\n\n\n\n let g = extend_6(color % 6);\n\n let color = color / 6;\n\n\n\n let r = extend_6(color % 6);\n\n\n", "file_path": "src/rendering/ansi.rs", "rank": 25, "score": 58833.0142343379 }, { "content": "fn try_parse_rgb(v: &[u8]) -> Option<Rgb> {\n\n let r = *v.get(0)?;\n\n let g = *v.get(1)?;\n\n let b = *v.get(2)?;\n\n\n\n Some(Rgb::new(r, g, b))\n\n}\n\n\n", "file_path": "src/rendering/ansi.rs", "rank": 26, "score": 58833.0142343379 }, { "content": "fn try_parse_color(v: &[u8]) -> Option<Rgb> {\n\n let color_type = *v.get(0)?;\n\n\n\n match color_type {\n\n 2 => try_parse_rgb(&v[1..]),\n\n 5 => try_parse_8b_color(&v[1..]),\n\n\n\n _ => None,\n\n }\n\n}\n\n\n\n/// Parse a set of SGR parameter numbers into a more convenient type\n", "file_path": "src/rendering/ansi.rs", "rank": 27, "score": 58833.0142343379 }, { "content": "/// [`Rectangle`] extensions\n\npub trait RectExt {\n\n /// Returns the (correct) size of a [`Rectangle`].\n\n fn size(self) -> Size;\n\n\n\n /// Sorts the coordinates of a [`Rectangle`] so that `top` < `bottom` and `left` < `right`.\n\n fn into_well_formed(self) -> Rectangle;\n\n}\n\n\n\nimpl RectExt for Rectangle {\n\n #[inline]\n\n #[must_use]\n\n fn size(self) -> Size {\n\n // TODO: remove if fixed in embedded-graphics\n\n let width = (self.bottom_right.x - self.top_left.x) as u32 + 1;\n\n let height = (self.bottom_right.y - self.top_left.y) as u32 + 1;\n\n\n\n Size::new(width, height)\n\n }\n\n\n\n #[inline]\n", "file_path": "src/utils/rect_ext.rs", "rank": 28, "score": 51576.15377786021 }, { "content": "fn main() -> Result<(), core::convert::Infallible> {\n\n let text = \"Hello, World!\\nembedded-text supports\\rcarriage return.\\nNon-breaking \\\n\n spaces\\u{A0}are also supported.\\nAlso\\u{200B}Supports\\u{200B}Zero\\u{200B}Width\\u{200B}Space\\u{200B}Characters\";\n\n\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(Justified)\n\n .text_color(BinaryColor::On)\n\n .background_color(BinaryColor::Off)\n\n .height_mode(FitToText)\n\n .build();\n\n\n\n let text_box = TextBox::new(text, Rectangle::new(Point::zero(), Point::new(128, 0)))\n\n .into_styled(textbox_style);\n\n\n\n // Create a window just tall enough to fit the text.\n\n let mut display: SimulatorDisplay<BinaryColor> = SimulatorDisplay::new(text_box.size());\n\n text_box.draw(&mut display).unwrap();\n\n\n\n let output_settings = OutputSettingsBuilder::new()\n\n .theme(BinaryColorTheme::OledBlue)\n\n .build();\n\n Window::new(\"Special character handling example\", &output_settings).show_static(&display);\n\n Ok(())\n\n}\n", "file_path": "examples/specials.rs", "rank": 29, "score": 42747.260701452535 }, { "content": "fn main() -> Result<(), core::convert::Infallible> {\n\n let text = \"Hello, World!\\nLorem Ipsum is simply dummy text of the printing and typesetting \\\n\n industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when \\\n\n an unknown printer took a galley of type and scrambled it to make a type specimen book.\";\n\n\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(RightAligned)\n\n .text_color(BinaryColor::On)\n\n .height_mode(FitToText)\n\n .build();\n\n\n\n let text_box = TextBox::new(text, Rectangle::new(Point::zero(), Point::new(128, 0)))\n\n .into_styled(textbox_style);\n\n\n\n // Create a window just tall enough to fit the text.\n\n let mut display: SimulatorDisplay<BinaryColor> = SimulatorDisplay::new(text_box.size());\n\n text_box.draw(&mut display).unwrap();\n\n\n\n let output_settings = OutputSettingsBuilder::new()\n\n .theme(BinaryColorTheme::OledBlue)\n\n .build();\n\n Window::new(\"Hello right aligned TextBox\", &output_settings).show_static(&display);\n\n Ok(())\n\n}\n", "file_path": "examples/right.rs", "rank": 30, "score": 42747.260701452535 }, { "content": "fn main() -> Result<(), core::convert::Infallible> {\n\n let text = \"Hello, World!\\nLorem Ipsum is simply dummy text of the printing and typesetting \\\n\n industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when \\\n\n an unknown printer took a galley of type and scrambled it to make a type specimen book.\";\n\n\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .text_color(BinaryColor::On)\n\n .height_mode(FitToText)\n\n .build();\n\n\n\n let text_box = TextBox::new(text, Rectangle::new(Point::zero(), Point::new(128, 0)))\n\n .into_styled(textbox_style);\n\n\n\n // Create a window just tall enough to fit the text.\n\n let mut display: SimulatorDisplay<BinaryColor> = SimulatorDisplay::new(text_box.size());\n\n text_box.draw(&mut display).unwrap();\n\n\n\n let output_settings = OutputSettingsBuilder::new()\n\n .theme(BinaryColorTheme::OledBlue)\n\n .build();\n\n Window::new(\"Hello TextBox\", &output_settings).show_static(&display);\n\n Ok(())\n\n}\n", "file_path": "examples/left.rs", "rank": 31, "score": 42747.260701452535 }, { "content": "fn main() -> Result<(), core::convert::Infallible> {\n\n let text = \"Hello, World!\\nLorem Ipsum is simply dummy text of the printing and typesetting \\\n\n industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when \\\n\n an unknown printer took a galley of type and scrambled it to make a type specimen book.\";\n\n\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(Justified)\n\n .height_mode(FitToText)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n\n\n let text_box = TextBox::new(text, Rectangle::new(Point::zero(), Point::new(128, 0)))\n\n .into_styled(textbox_style);\n\n\n\n // Create a window just tall enough to fit the text.\n\n let mut display: SimulatorDisplay<BinaryColor> = SimulatorDisplay::new(text_box.size());\n\n text_box.draw(&mut display).unwrap();\n\n\n\n let output_settings = OutputSettingsBuilder::new()\n\n .theme(BinaryColorTheme::OledBlue)\n\n .build();\n\n Window::new(\"Hello fully justified TextBox\", &output_settings).show_static(&display);\n\n Ok(())\n\n}\n", "file_path": "examples/justified.rs", "rank": 32, "score": 42747.260701452535 }, { "content": "fn main() -> Result<(), core::convert::Infallible> {\n\n let output_settings = OutputSettingsBuilder::new()\n\n .theme(BinaryColorTheme::OledBlue)\n\n .build();\n\n let mut window = Window::new(\"TextBox input demonstration\", &output_settings);\n\n let bounds = Rectangle::new(Point::new(0, 0), Point::new(128, 640));\n\n\n\n let inputs: HashMap<Keycode, (&str, &str, &str, &str)> = [\n\n // (Keycode, (NO, SHIFT, CAPS, ALT_GR))\n\n (Keycode::A, (\"a\", \"A\", \"A\", \"ä\")),\n\n (Keycode::B, (\"b\", \"B\", \"B\", \"{\")),\n\n (Keycode::C, (\"c\", \"C\", \"C\", \"&\")),\n\n (Keycode::D, (\"d\", \"D\", \"D\", \"Đ\")),\n\n (Keycode::E, (\"e\", \"E\", \"E\", \"Ä\")),\n\n (Keycode::F, (\"f\", \"F\", \"F\", \"[\")),\n\n (Keycode::G, (\"g\", \"G\", \"G\", \"]\")),\n\n (Keycode::H, (\"h\", \"H\", \"H\", \"\")),\n\n (Keycode::I, (\"i\", \"I\", \"I\", \"Í\")),\n\n (Keycode::J, (\"j\", \"J\", \"J\", \"í\")),\n\n (Keycode::K, (\"k\", \"K\", \"K\", \"ł\")),\n", "file_path": "examples/editor.rs", "rank": 33, "score": 42747.260701452535 }, { "content": "fn main() -> Result<(), core::convert::Infallible> {\n\n let text = \"Hello, World!\\nLorem Ipsum is simply dummy text of the printing and typesetting \\\n\n industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when \\\n\n an unknown printer took a galley of type and scrambled it to make a type specimen book.\";\n\n\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(CenterAligned)\n\n .height_mode(FitToText)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n\n\n let text_box = TextBox::new(text, Rectangle::new(Point::zero(), Point::new(128, 0)))\n\n .into_styled(textbox_style);\n\n\n\n // Create a window just tall enough to fit the text.\n\n let mut display: SimulatorDisplay<BinaryColor> = SimulatorDisplay::new(text_box.size());\n\n text_box.draw(&mut display).unwrap();\n\n\n\n let output_settings = OutputSettingsBuilder::new()\n\n .theme(BinaryColorTheme::OledBlue)\n\n .build();\n\n Window::new(\"Hello center aligned TextBox\", &output_settings).show_static(&display);\n\n Ok(())\n\n}\n", "file_path": "examples/center.rs", "rank": 34, "score": 42747.260701452535 }, { "content": "fn main() -> Result<(), core::convert::Infallible> {\n\n let text = \"Hello, World!\\nLorem Ipsum is simply dummy text of the printing and typesetting \\\n\n industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when \\\n\n an unknown printer took a galley of type and scrambled it to make a type specimen book.\";\n\n\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(Justified)\n\n .text_color(Rgb565::RED)\n\n .background_color(Rgb565::GREEN)\n\n .build();\n\n\n\n let mut display: SimulatorDisplay<Rgb565> = SimulatorDisplay::new(Size::new(129, 129));\n\n\n\n TextBox::new(text, Rectangle::new(Point::zero(), Point::new(128, 128)))\n\n .into_styled(textbox_style)\n\n .draw(&mut display)\n\n .unwrap();\n\n\n\n let output_settings = OutputSettingsBuilder::new().build();\n\n Window::new(\"Hello TextBox with text background color\", &output_settings).show_static(&display);\n\n Ok(())\n\n}\n", "file_path": "examples/color.rs", "rank": 35, "score": 42747.260701452535 }, { "content": "fn main() -> Result<(), core::convert::Infallible> {\n\n let output_settings = OutputSettingsBuilder::new()\n\n .theme(BinaryColorTheme::OledBlue)\n\n .build();\n\n let mut window = Window::new(\"TextBox demonstration\", &output_settings);\n\n\n\n let mut bounds = Rectangle::new(Point::new(0, 8), Point::new(128, 200));\n\n\n\n 'running: loop {\n\n if !demo_loop(&mut window, &mut bounds, Justified) {\n\n break 'running;\n\n }\n\n if !demo_loop(&mut window, &mut bounds, LeftAligned) {\n\n break 'running;\n\n }\n\n if !demo_loop(&mut window, &mut bounds, CenterAligned) {\n\n break 'running;\n\n }\n\n if !demo_loop(&mut window, &mut bounds, RightAligned) {\n\n break 'running;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/interactive.rs", "rank": 36, "score": 42747.260701452535 }, { "content": "fn main() -> Result<(), core::convert::Infallible> {\n\n let mut display: SimulatorDisplay<BinaryColor> = SimulatorDisplay::new(Size::new(192, 129));\n\n\n\n let text = \"The quick brown fox jumped over the lazy dog.\";\n\n\n\n let textbox_style_top = TextBoxStyleBuilder::new(Font6x8)\n\n .text_color(BinaryColor::On)\n\n .vertical_alignment(TopAligned)\n\n .build();\n\n\n\n let textbox_style_center = TextBoxStyleBuilder::new(Font6x8)\n\n .text_color(BinaryColor::On)\n\n .vertical_alignment(CenterAligned)\n\n .build();\n\n\n\n let textbox_style_bottom = TextBoxStyleBuilder::new(Font6x8)\n\n .text_color(BinaryColor::On)\n\n .vertical_alignment(BottomAligned)\n\n .build();\n\n\n", "file_path": "examples/vertical.rs", "rank": 37, "score": 42747.260701452535 }, { "content": "fn standard_to_rgb(idx: u8) -> Rgb {\n\n // These colors are used in PowerShell 6 in Windows 10\n\n match idx {\n\n 0 => Rgb::new(12, 12, 12),\n\n 1 => Rgb::new(197, 15, 31),\n\n 2 => Rgb::new(19, 161, 14),\n\n 3 => Rgb::new(193, 156, 0),\n\n 4 => Rgb::new(0, 55, 218),\n\n 5 => Rgb::new(136, 23, 152),\n\n 6 => Rgb::new(58, 150, 221),\n\n 7 => Rgb::new(204, 204, 204),\n\n\n\n 8 => Rgb::new(118, 118, 118),\n\n 9 => Rgb::new(231, 72, 86),\n\n 10 => Rgb::new(22, 198, 12),\n\n 11 => Rgb::new(249, 241, 165),\n\n 12 => Rgb::new(59, 120, 255),\n\n 13 => Rgb::new(180, 0, 158),\n\n 14 => Rgb::new(97, 214, 214),\n\n _ => Rgb::new(242, 242, 242),\n\n }\n\n}\n\n\n", "file_path": "src/rendering/ansi.rs", "rank": 38, "score": 42531.7352025124 }, { "content": "fn main() -> Result<(), core::convert::Infallible> {\n\n let output_settings = OutputSettingsBuilder::new()\n\n .theme(BinaryColorTheme::OledBlue)\n\n .build();\n\n let mut window = Window::new(\"TextBox demonstration\", &output_settings);\n\n\n\n let mut bounds = Rectangle::new(Point::new(0, 8), Point::new(128, 200));\n\n\n\n 'running: loop {\n\n if !demo_loop(&mut window, &mut bounds, Exact(FullRowsOnly)) {\n\n break 'running;\n\n }\n\n if !demo_loop(&mut window, &mut bounds, FitToText) {\n\n break 'running;\n\n }\n\n if !demo_loop(&mut window, &mut bounds, ShrinkToText(FullRowsOnly)) {\n\n break 'running;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/interactive_fit.rs", "rank": 39, "score": 41647.23709794856 }, { "content": "fn main() -> Result<(), core::convert::Infallible> {\n\n let text = \"Hello, World!\\nLorem Ipsum is simply dummy text of the printing and typesetting \\\n\n industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when \\\n\n an unknown printer took a galley of type and scrambled it to make a type specimen book.\";\n\n\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x6)\n\n .text_color(BinaryColor::On)\n\n .height_mode(FitToText)\n\n .build();\n\n\n\n let text_box = TextBox::new(text, Rectangle::new(Point::zero(), Point::new(128, 0)))\n\n .into_styled(textbox_style);\n\n\n\n // Create a window just tall enough to fit the text.\n\n let mut display: SimulatorDisplay<BinaryColor> = SimulatorDisplay::new(text_box.size());\n\n text_box.draw(&mut display).unwrap();\n\n\n\n let output_settings = OutputSettingsBuilder::new()\n\n .theme(BinaryColorTheme::OledBlue)\n\n .build();\n\n Window::new(\"Hello TextBox\", &output_settings).show_static(&display);\n\n Ok(())\n\n}\n", "file_path": "examples/variable_width.rs", "rank": 40, "score": 41647.23709794856 }, { "content": "fn main() -> Result<(), core::convert::Infallible> {\n\n let output_settings = OutputSettingsBuilder::new()\n\n .theme(BinaryColorTheme::OledBlue)\n\n .build();\n\n let mut window = Window::new(\"TextBox demonstration\", &output_settings);\n\n\n\n let mut bounds = Rectangle::new(Point::new(0, 8), Point::new(128, 200));\n\n\n\n 'running: loop {\n\n if !demo_loop(&mut window, &mut bounds, TopAligned) {\n\n break 'running;\n\n }\n\n if !demo_loop(&mut window, &mut bounds, CenterAligned) {\n\n break 'running;\n\n }\n\n if !demo_loop(&mut window, &mut bounds, BottomAligned) {\n\n break 'running;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/interactive_vertical.rs", "rank": 41, "score": 41647.23709794856 }, { "content": "fn main() -> Result<(), core::convert::Infallible> {\n\n let text = format!(\n\n \"{comment}/// Comment\\n\\\n\n {base_text}#[{attribute}derive{base_text}(Debug)]\\n\\\n\n {keyword}enum {type_name}{underlined}Foo{underlined_off}{base_text}<{lifetime}'a{base_text}> {{\\n\\\n\n {comment}\\t/// Decide what {strikethrough}not{strikethrough_off} to do next.\\n\\\n\n {highlighted_background}\\t{enum_variant}Bar{base_text}({type_name}{underlined}Token{underlined_off}{base_text}<{lifetime}'a{base_text}>),{end_of_line}\\n\\\n\n {line_background}{base_text}}}\",\n\n // colors\n\n line_background = \"\\x1b[48;5;16m\",\n\n highlighted_background = \"\\x1b[48;5;235m\",\n\n enum_variant = \"\\x1b[38;2;36;144;241m\",\n\n keyword = \"\\x1b[38;2;84;128;166m\",\n\n comment = \"\\x1b[38;2;94;153;73m\",\n\n base_text = \"\\x1b[97m\",\n\n attribute =\"\\x1b[38;2;220;220;157m\",\n\n type_name = \"\\x1b[38;2;78;201;176m\",\n\n lifetime = \"\\x1b[38;2;84;128;166m\",\n\n end_of_line = \"\\x1b[40C\",\n\n underlined = \"\\x1b[4m\",\n", "file_path": "examples/colored_text.rs", "rank": 42, "score": 41647.23709794856 }, { "content": " /// Desired space between lines, in pixels\n\n pub line_spacing: i32,\n\n\n\n /// Desired column width for tabs\n\n pub tab_size: TabSize<F>,\n\n\n\n /// If true, the text will be underlined\n\n pub underlined: bool,\n\n\n\n /// If true, the text will be crossed out\n\n pub strikethrough: bool,\n\n}\n\n\n\nimpl<C, F, A, V, H> TextBoxStyle<C, F, A, V, H>\n\nwhere\n\n C: PixelColor,\n\n F: Font + Copy,\n\n A: HorizontalTextAlignment,\n\n V: VerticalTextAlignment,\n\n H: HeightMode,\n", "file_path": "src/style/mod.rs", "rank": 43, "score": 34023.04321516445 }, { "content": "{\n\n /// Creates a `TextBoxStyle` object with transparent background.\n\n #[inline]\n\n pub fn new(\n\n font: F,\n\n text_color: C,\n\n alignment: A,\n\n vertical_alignment: V,\n\n height_mode: H,\n\n ) -> Self {\n\n Self {\n\n text_style: TextStyle::new(font, text_color),\n\n alignment,\n\n vertical_alignment,\n\n height_mode,\n\n line_spacing: 0,\n\n tab_size: TabSize::default(),\n\n underlined: false,\n\n strikethrough: false,\n\n }\n", "file_path": "src/style/mod.rs", "rank": 45, "score": 34017.2721362304 }, { "content": " }\n\n\n\n /// Creates a `TextBoxStyle` object from the given text style and alignment.\n\n #[inline]\n\n pub fn from_text_style(\n\n text_style: TextStyle<C, F>,\n\n alignment: A,\n\n vertical_alignment: V,\n\n height_mode: H,\n\n ) -> Self {\n\n Self {\n\n text_style,\n\n alignment,\n\n vertical_alignment,\n\n height_mode,\n\n line_spacing: 0,\n\n tab_size: TabSize::default(),\n\n underlined: false,\n\n strikethrough: false,\n\n }\n", "file_path": "src/style/mod.rs", "rank": 46, "score": 34014.867704479344 }, { "content": "pub struct TextBoxStyle<C, F, A, V, H>\n\nwhere\n\n C: PixelColor,\n\n F: Font + Copy,\n\n A: HorizontalTextAlignment,\n\n V: VerticalTextAlignment,\n\n H: HeightMode,\n\n{\n\n /// Style properties for text.\n\n pub text_style: TextStyle<C, F>,\n\n\n\n /// Horizontal text alignment.\n\n pub alignment: A,\n\n\n\n /// Vertical text alignment.\n\n pub vertical_alignment: V,\n\n\n\n /// The height behaviour\n\n pub height_mode: H,\n\n\n", "file_path": "src/style/mod.rs", "rank": 47, "score": 34014.57750719508 }, { "content": "///\n\n/// This type makes it more obvious what unit is used to define the width of tabs.\n\n/// The default tab size is 4 spaces.\n\n#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct TabSize<F: Font> {\n\n pub(crate) width: i32,\n\n _font: PhantomData<F>,\n\n}\n\n\n\nimpl<F: Font> Default for TabSize<F> {\n\n #[inline]\n\n fn default() -> Self {\n\n Self::spaces(4)\n\n }\n\n}\n\n\n\nimpl<F: Font> TabSize<F> {\n\n /// Calculate tab size from a number of spaces in the current font.\n\n #[inline]\n\n pub fn spaces(n: u32) -> Self {\n", "file_path": "src/style/mod.rs", "rank": 57, "score": 34009.64065391105 }, { "content": "//! Colors.\n\nuse embedded_graphics::pixelcolor::{BinaryColor, Rgb555, Rgb565, Rgb888};\n\n\n\n/// 24bit RGB color\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\npub struct Rgb {\n\n pub(crate) r: u8,\n\n pub(crate) g: u8,\n\n pub(crate) b: u8,\n\n}\n\n\n\nimpl Rgb {\n\n /// Creates a new color value.\n\n #[inline]\n\n pub fn new(r: u8, g: u8, b: u8) -> Self {\n\n Self { r, g, b }\n\n }\n\n}\n\n\n\nimpl From<Rgb> for BinaryColor {\n", "file_path": "src/style/color.rs", "rank": 61, "score": 34003.130648042876 }, { "content": "pub mod vertical_overdraw;\n\n\n\nuse crate::{\n\n alignment::{HorizontalTextAlignment, VerticalTextAlignment},\n\n parser::{Parser, Token},\n\n rendering::{\n\n ansi::Sgr,\n\n cursor::Cursor,\n\n line_iter::{LineElementIterator, RenderElement},\n\n space_config::UniformSpaceConfig,\n\n },\n\n style::height_mode::HeightMode,\n\n utils::font_ext::FontExt,\n\n};\n\nuse core::marker::PhantomData;\n\nuse embedded_graphics::{prelude::*, primitives::Rectangle, style::TextStyle};\n\n\n\npub use builder::TextBoxStyleBuilder;\n\n\n\n/// Tab size helper\n", "file_path": "src/style/mod.rs", "rank": 62, "score": 34002.65121974901 }, { "content": " let space = F::total_char_width(' ') as i32;\n\n // make sure n is at least 1, and the multiplication doesn't overflow\n\n let size = (n.max(1) as i32).checked_mul(space).unwrap_or(4 * space);\n\n\n\n Self::pixels(size)\n\n }\n\n\n\n /// Define the tab size in pixels.\n\n #[inline]\n\n pub fn pixels(px: i32) -> Self {\n\n Self {\n\n width: px,\n\n _font: PhantomData,\n\n }\n\n }\n\n\n\n /// Calculate the rendered with of the next tab\n\n #[inline]\n\n pub fn next_width(self, pos: i32) -> u32 {\n\n let next_tab_pos = (pos / self.width + 1) * self.width;\n", "file_path": "src/style/mod.rs", "rank": 65, "score": 34000.530361794576 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::{alignment::*, parser::Parser, style::builder::TextBoxStyleBuilder};\n\n use embedded_graphics::{\n\n fonts::{Font, Font6x8},\n\n pixelcolor::BinaryColor,\n\n };\n\n\n\n #[test]\n\n fn no_infinite_loop() {\n\n let _ = TextBoxStyleBuilder::new(Font6x8)\n\n .text_color(BinaryColor::On)\n\n .build()\n\n .measure_text_height(\"a\", 5);\n\n }\n\n\n\n #[test]\n", "file_path": "src/style/mod.rs", "rank": 66, "score": 33999.46185860926 }, { "content": " /// // |Lorem Ipsum |\n\n /// // |is simply |\n\n /// // |dummy text |\n\n /// // |of the |\n\n /// // |printing and|\n\n /// // |typesetting |\n\n /// // |industry. |\n\n ///\n\n /// assert_eq!(7 * 8, height);\n\n /// ```\n\n #[inline]\n\n #[must_use]\n\n pub fn measure_text_height(&self, text: &str, max_width: u32) -> u32 {\n\n let mut n_lines = 0_i32;\n\n let mut parser = Parser::parse(text);\n\n let mut carry = None;\n\n\n\n loop {\n\n let (w, _, t, underlined) = self.measure_line(&mut parser, carry.clone(), max_width);\n\n\n", "file_path": "src/style/mod.rs", "rank": 67, "score": 33998.04921785985 }, { "content": " Point::new(\n\n max_line_width.saturating_sub(1) as i32,\n\n F::CHARACTER_SIZE.height.saturating_sub(1) as i32,\n\n ),\n\n ),\n\n self.line_spacing,\n\n );\n\n let mut iter: LineElementIterator<'_, F, _, A> = LineElementIterator::new(\n\n parser.clone(),\n\n cursor,\n\n UniformSpaceConfig::default(),\n\n carried_token.clone(),\n\n self.tab_size,\n\n );\n\n\n\n let mut current_width = 0;\n\n let mut last_spaces = 0;\n\n let mut total_spaces = 0;\n\n let mut underlined = self.underlined;\n\n while let Some(token) = iter.next() {\n", "file_path": "src/style/mod.rs", "rank": 70, "score": 33995.03627540606 }, { "content": "use embedded_graphics::{fonts::Font6x8, pixelcolor::BinaryColor, prelude::*};\n\nuse embedded_graphics_simulator::{\n\n BinaryColorTheme, OutputSettingsBuilder, SimulatorDisplay, Window,\n\n};\n\nuse embedded_text::prelude::*;\n\n\n", "file_path": "examples/extra_styles.rs", "rank": 71, "score": 33994.92128018741 }, { "content": " (next_tab_pos - pos) as u32\n\n }\n\n}\n\n\n\n/// Styling options of a [`TextBox`].\n\n///\n\n/// `TextBoxStyle` contains the `Font`, foreground and background `PixelColor`, line spacing,\n\n/// [`HeightMode`], [`HorizontalTextAlignment`] and [`VerticalTextAlignment`] information necessary\n\n/// to draw a [`TextBox`].\n\n///\n\n/// To construct a new `TextBoxStyle` object, use the [`new`] or [`from_text_style`] methods or\n\n/// the [`TextBoxStyleBuilder`] object.\n\n///\n\n/// [`TextBox`]: ../struct.TextBox.html\n\n/// [`HorizontalTextAlignment`]: ../alignment/trait.HorizontalTextAlignment.html\n\n/// [`VerticalTextAlignment`]: ../alignment/trait.VerticalTextAlignment.html\n\n/// [`TextBoxStyleBuilder`]: builder/struct.TextBoxStyleBuilder.html\n\n/// [`new`]: #method.new\n\n/// [`from_text_style`]: #method.from_text_style\n\n#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Default)]\n", "file_path": "src/style/mod.rs", "rank": 72, "score": 33994.050174855925 }, { "content": " }\n\n\n\n /// Measure the width and count spaces in a single line of text.\n\n ///\n\n /// Returns (width, rendered space count, carried token)\n\n ///\n\n /// Instead of peeking ahead when processing tokens, this function advances the parser before\n\n /// processing a token. If a token opens a new line, it will be returned as the carried token.\n\n /// If the carried token is `None`, the parser has finished processing the text.\n\n #[inline]\n\n #[must_use]\n\n pub fn measure_line<'a>(\n\n &self,\n\n parser: &mut Parser<'a>,\n\n carried_token: Option<Token<'a>>,\n\n max_line_width: u32,\n\n ) -> (u32, u32, Option<Token<'a>>, bool) {\n\n let cursor: Cursor<F> = Cursor::new(\n\n Rectangle::new(\n\n Point::zero(),\n", "file_path": "src/style/mod.rs", "rank": 73, "score": 33993.65519082708 }, { "content": "//! * `\\x1b[4m`: Underlined text\n\n//! * `\\x1b[24m`: Turn off text underline\n\n//! * `\\x1b[9m`: Crossed out/strikethrough text\n\n//! * `\\x1b[29m`: Turn off strikethrough\n\n//! * `\\x1b[39m`: Reset text color\n\n//! * `\\x1b[49m`: Reset background color\n\n//!\n\n//! Reset style options to default\n\n//! ------------------------------\n\n//!\n\n//! `embedded-text` supports the `Reset all` (`\\x1b[0m`), `Default text color` (`\\x1b[39m`) and\n\n//! `Default background color` (`\\x1b[49m`) codes. These codes can be used to reset colors to\n\n//! *transparent* (i.e. no pixels drawn for text or background).\n\n//!\n\n//! In addition, `Reset all` turns off the underlined and crossed out styles.\n\n//!\n\n//! Other supported ANSI escape codes\n\n//! ---------------------------------\n\n//!\n\n//! Besides changing text style, you can also move the cursor using ANSI escape codes!\n", "file_path": "src/style/mod.rs", "rank": 74, "score": 33993.57921821503 }, { "content": " /// Measures text height when rendered using a given width.\n\n ///\n\n /// # Example: measure height of text when rendered using a 6x8 font and 72px width.\n\n ///\n\n /// ```rust\n\n /// # use embedded_text::style::builder::TextBoxStyleBuilder;\n\n /// # use embedded_graphics::fonts::Font6x8;\n\n /// # use embedded_graphics::pixelcolor::BinaryColor;\n\n /// #\n\n /// let style = TextBoxStyleBuilder::new(Font6x8)\n\n /// .text_color(BinaryColor::On)\n\n /// .build();\n\n ///\n\n /// let height = style.measure_text_height(\n\n /// \"Lorem Ipsum is simply dummy text of the printing and typesetting industry.\",\n\n /// 72,\n\n /// );\n\n ///\n\n /// // Expect 7 lines of text, wrapped in something like the following:\n\n ///\n", "file_path": "src/style/mod.rs", "rank": 75, "score": 33993.49386793617 }, { "content": " if (w != 0 || t.is_some()) && carry != Some(Token::CarriageReturn) {\n\n // something was in this line, increment height\n\n // if last carried token was a carriage return, we already counted the height\n\n n_lines += 1;\n\n }\n\n\n\n if t.is_none() {\n\n let mut height = (n_lines * F::CHARACTER_SIZE.height as i32\n\n + n_lines.saturating_sub(1) * self.line_spacing)\n\n as u32;\n\n\n\n if underlined {\n\n height += 1;\n\n }\n\n\n\n return height;\n\n }\n\n\n\n carry = t;\n\n }\n", "file_path": "src/style/mod.rs", "rank": 76, "score": 33991.969251252944 }, { "content": " #[test]\n\n fn test_measure_line() {\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(CenterAligned)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n\n\n let mut text = Parser::parse(\"123 45 67\");\n\n\n\n let (w, s, _, _) =\n\n textbox_style.measure_line(&mut text, None, 6 * Font6x8::CHARACTER_SIZE.width);\n\n assert_eq!(w, 6 * Font6x8::CHARACTER_SIZE.width);\n\n assert_eq!(s, 1);\n\n }\n\n\n\n #[test]\n\n fn test_measure_line_counts_nbsp() {\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(CenterAligned)\n\n .text_color(BinaryColor::On)\n", "file_path": "src/style/mod.rs", "rank": 77, "score": 33991.12017166515 }, { "content": "//! You have the following options:\n\n//!\n\n//! - Move the cursor forward `<n>` characters: `\\x1b[<n>C`. This command will stop at the end of\n\n//! line, so you can use it to simulate a highlighted line, for example.\n\n//! *Note:* Moving the cursor *forward* fills the line with the background color. If you want to\n\n//! avoid this, make sure to reset the background color before moving the cursor!\n\n//! - Move the cursor backward `<n>` characters: `\\x1b[<n>D`. This command will stop at the start\n\n//! of line.\n\n//!\n\n//! [`Sgr`]: ../rendering/ansi/enum.Sgr.html\n\n//! [`Rgb`]: ./color/struct.Rgb.html\n\n//! [`TextBox`]: ../struct.TextBox.html\n\n//! [`TextBoxStyle`]: struct.TextBoxStyle.html\n\n//! [`TextBoxStyleBuilder`]: builder/struct.TextBoxStyleBuilder.html\n\n//! [`TextBoxStyleBuilder::new`]: builder/struct.TextBoxStyleBuilder.html#method.new\n\n//! [`TextBox::into_styled`]: ../struct.TextBox.html#method.into_styled\n\n\n\npub mod builder;\n\npub mod color;\n\npub mod height_mode;\n", "file_path": "src/style/mod.rs", "rank": 78, "score": 33990.9137339452 }, { "content": "//! `TextBox` styling.\n\n//!\n\n//! Style objects and why you need them\n\n//! ===================================\n\n//!\n\n//! By itself, a [`TextBox`] does not contain the information necessary to draw it on a display.\n\n//! This information is called \"style\" and it is contained in [`TextBoxStyle`] objects.\n\n//!\n\n//! The recommended (and most flexible) way of constructing a style object is using the\n\n//! [`TextBoxStyleBuilder`] builder object. The least amount of information necessary to create a\n\n//! text style is the `Font` used to render the text, so you'll need to specify this when you call\n\n//! [`TextBoxStyleBuilder::new`].\n\n//! You can then chain together various builder methods to customize font rendering.\n\n//!\n\n//! See the [`TextBoxStyleBuilder`] for more information on what styling options you have.\n\n//!\n\n//! To apply a style, call [`TextBox::into_styled`].\n\n//!\n\n//! In-band text styling using ANSI escape codes\n\n//! ============================================\n", "file_path": "src/style/mod.rs", "rank": 79, "score": 33990.86718294115 }, { "content": " .build();\n\n\n\n let mut text = Parser::parse(\"123\\u{A0}45\");\n\n\n\n let (w, s, _, _) =\n\n textbox_style.measure_line(&mut text, None, 5 * Font6x8::CHARACTER_SIZE.width);\n\n assert_eq!(w, 5 * Font6x8::CHARACTER_SIZE.width);\n\n assert_eq!(s, 1);\n\n }\n\n\n\n #[test]\n\n fn test_measure_height_nbsp() {\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(CenterAligned)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n\n\n let text = \"123\\u{A0}45 123\";\n\n\n\n let height = textbox_style.measure_text_height(text, 5 * Font6x8::CHARACTER_SIZE.width);\n", "file_path": "src/style/mod.rs", "rank": 80, "score": 33990.33514456152 }, { "content": " #[inline]\n\n fn from(rgb: Rgb) -> Self {\n\n if rgb.r > 127 && rgb.g > 127 && rgb.b > 127 {\n\n Self::On\n\n } else {\n\n Self::Off\n\n }\n\n }\n\n}\n\n\n\nimpl From<Rgb> for Rgb888 {\n\n #[inline]\n\n fn from(rgb: Rgb) -> Self {\n\n Self::new(rgb.r, rgb.g, rgb.b)\n\n }\n\n}\n\n\n\nimpl From<Rgb> for Rgb555 {\n\n #[inline]\n\n fn from(rgb: Rgb) -> Self {\n", "file_path": "src/style/color.rs", "rank": 81, "score": 33989.47983233779 }, { "content": " assert_eq!(height, 16);\n\n\n\n // bug discovered while using the interactive example\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(LeftAligned)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n\n\n let text = \"embedded-text also\\u{A0}supports non-breaking spaces.\";\n\n\n\n let height = textbox_style.measure_text_height(text, 79);\n\n assert_eq!(height, 4 * Font6x8::CHARACTER_SIZE.height);\n\n }\n\n\n\n #[test]\n\n fn height_with_line_spacing() {\n\n let style = TextBoxStyleBuilder::new(Font6x8)\n\n .text_color(BinaryColor::On)\n\n .line_spacing(2)\n\n .build();\n", "file_path": "src/style/mod.rs", "rank": 82, "score": 33989.35465833088 }, { "content": " .into_styled(strikethrough_style);\n\n\n\n // Create a window just tall enough to fit the text.\n\n let mut display: SimulatorDisplay<BinaryColor> = SimulatorDisplay::new(Size::new(\n\n text_box.size().width + text_box2.size().width,\n\n text_box.size().height.max(text_box2.size().height),\n\n ));\n\n text_box.draw(&mut display).unwrap();\n\n text_box2.draw(&mut display).unwrap();\n\n\n\n let output_settings = OutputSettingsBuilder::new()\n\n .theme(BinaryColorTheme::OledBlue)\n\n .build();\n\n Window::new(\"Hello TextBox\", &output_settings).show_static(&display);\n\n Ok(())\n\n}\n", "file_path": "examples/extra_styles.rs", "rank": 83, "score": 33988.719992585626 }, { "content": " Self::new(rgb.r >> 3, rgb.g >> 3, rgb.b >> 3)\n\n }\n\n}\n\n\n\nimpl From<Rgb> for Rgb565 {\n\n #[inline]\n\n fn from(rgb: Rgb) -> Self {\n\n Self::new(rgb.r >> 3, rgb.g >> 2, rgb.b >> 3)\n\n }\n\n}\n", "file_path": "src/style/color.rs", "rank": 84, "score": 33988.282557289254 }, { "content": "//!\n\n//! Sometimes you need more flexibility than what a single style object can provide, like changing\n\n//! font color for a specific word in the text. `embedded-text` supports this use case by using a\n\n//! subset of the standard [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code).\n\n//! These are special character sequences you can use *in the text* to change the font stlye of the\n\n//! text itself. This documentation does not aim to provide a full specification of all the ANSI\n\n//! escape codes, only describes the supported subset.\n\n//!\n\n//! > *Note:* if `embedded-text` fails to parse an escape sequence, it will ignore the `\\x1b` character\n\n//! and display the rest as normal text.\n\n//!\n\n//! All escape sequences start with the `\\x1b[` sequence, where `\\x1b` is the ASCII `escape`\n\n//! character. `embedded-text` supports a subset of the `SGR` parameters, which are numeric codes\n\n//! with specific functions, followed by a number of parameters and end with the `m` character.\n\n//!\n\n//! Currently, `embedded-text` supports changing the text and background colors. To do this, you\n\n//! have the following options:\n\n//!\n\n//! Standard color codes\n\n//! --------------------\n", "file_path": "src/style/mod.rs", "rank": 85, "score": 33987.527962140535 }, { "content": " (\" \", 6, 0),\n\n (\"\\n \", 6, 8),\n\n (\"word\\n\", 2 * 6, 16),\n\n (\"word\\n \\nnext\", 50, 24),\n\n (\" Word \", 36, 8),\n\n ];\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(CenterAligned)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n for (i, (text, width, expected_height)) in data.iter().enumerate() {\n\n let height = textbox_style.measure_text_height(text, *width);\n\n assert_eq!(\n\n height, *expected_height,\n\n r#\"#{}: Height of \"{}\" is {} but is expected to be {}\"#,\n\n i, text, height, expected_height\n\n );\n\n }\n\n }\n\n\n", "file_path": "src/style/mod.rs", "rank": 86, "score": 33987.1810430537 }, { "content": "//! to the above types. The resulting color will be the closest match to what you specify.\n\n//!\n\n//! If you wish to use a different color type, you'll need to implement `From<Rgb>` for your color\n\n//! type and write the conversion yourself.\n\n//!\n\n//! Color values on monochrome displays\n\n//! -----------------------------------\n\n//!\n\n//! Monochrome displays use the `BinaryColor` color which can have two values: `On` or `Off`.\n\n//! You can still use the ANSI colors with the following considerations:\n\n//!\n\n//! * If the value of all three color channels are greater than `127`, the resulting color in `On`\n\n//! * Otherwise, the color is converted to `Off`.\n\n//!\n\n//! Other text styling options\n\n//! --------------------------\n\n//!\n\n//! The following [`Sgr`] sequences are supported:\n\n//!\n\n//! * `\\x1b[0m`: Reset everything\n", "file_path": "src/style/mod.rs", "rank": 88, "score": 33985.983522095616 }, { "content": "//!\n\n//! <style>\n\n//! .ansi_color {\n\n//! display: block;\n\n//! text-align: center;\n\n//! color: white;\n\n//! }\n\n//! </style>\n\n//!\n\n//! The standard color codes option is the simplest, and least flexible way to set color.\n\n//!\n\n//! | Color name | Text color | Background color | RGB888 |\n\n//! |---------------------|------------|------------------|-------------------------------------------------------------------------------------------------|\n\n//! | Black | `\\x1b[30m` | `\\x1b[40m` | <span class=\"ansi_color\" style=\"background: rgb(12,12,12);\"> 12,12,12 </span> |\n\n//! | Red | `\\x1b[31m` | `\\x1b[41m` | <span class=\"ansi_color\" style=\"background: rgb(197,15,31);\"> 197,15,31 </span> |\n\n//! | Green | `\\x1b[32m` | `\\x1b[42m` | <span class=\"ansi_color\" style=\"background: rgb(19,161,14);\"> 19,161,14 </span> |\n\n//! | Yellow | `\\x1b[33m` | `\\x1b[43m` | <span class=\"ansi_color\" style=\"background: rgb(193,156,0);\"> 193,156,0 </span> |\n\n//! | Blue | `\\x1b[34m` | `\\x1b[44m` | <span class=\"ansi_color\" style=\"background: rgb(0,55,218);\"> 0,55,218 </span> |\n\n//! | Magenta | `\\x1b[35m` | `\\x1b[45m` | <span class=\"ansi_color\" style=\"background: rgb(136,23,152);\"> 136,23,152 </span> |\n\n//! | Cyan | `\\x1b[36m` | `\\x1b[46m` | <span class=\"ansi_color\" style=\"background: rgb(58,150,221);\"> 58,150,221 </span> |\n", "file_path": "src/style/mod.rs", "rank": 89, "score": 33985.93087379572 }, { "content": " total_spaces += 1;\n\n } else if !A::ENDING_SPACES {\n\n // if ENDING_SPACES is true, spaces have already been counted and\n\n // last_spaces is 0\n\n total_spaces = last_spaces;\n\n }\n\n }\n\n\n\n RenderElement::Sgr(Sgr::Underline) => underlined = true,\n\n\n\n // Ignore color changes\n\n _ => {}\n\n }\n\n }\n\n\n\n let carried = iter.remaining_token();\n\n *parser = iter.parser;\n\n (current_width as u32, total_spaces, carried, underlined)\n\n }\n\n\n", "file_path": "src/style/mod.rs", "rank": 90, "score": 33984.33515708633 }, { "content": "//! | White | `\\x1b[37m` | `\\x1b[47m` | <span class=\"ansi_color\" style=\"background: rgb(204,204,204); color: black;\"> 204,204,204 </span> |\n\n//! | Gray (Bright Black) | `\\x1b[90m` | `\\x1b[100m` | <span class=\"ansi_color\" style=\"background: rgb(118,118,118); color: black;\"> 118,118,118 </span> |\n\n//! | Bright Red | `\\x1b[91m` | `\\x1b[101m` | <span class=\"ansi_color\" style=\"background: rgb(231,72,86);\"> 231,72,86 </span> |\n\n//! | Bright Green | `\\x1b[92m` | `\\x1b[102m` | <span class=\"ansi_color\" style=\"background: rgb(22,198,12); color: black;\"> 22,198,12 </span> |\n\n//! | Bright Yellow | `\\x1b[93m` | `\\x1b[103m` | <span class=\"ansi_color\" style=\"background: rgb(249,241,165); color: black;\"> 249,241,165 </span> |\n\n//! | Bright Blue | `\\x1b[94m` | `\\x1b[104m` | <span class=\"ansi_color\" style=\"background: rgb(59,120,255);\"> 59,120,255 </span> |\n\n//! | Bright Magenta | `\\x1b[95m` | `\\x1b[105m` | <span class=\"ansi_color\" style=\"background: rgb(180,0,158);\"> 180,0,158 </span> |\n\n//! | Bright Cyan | `\\x1b[96m` | `\\x1b[106m` | <span class=\"ansi_color\" style=\"background: rgb(97,214,214); color: black;\"> 97,214,214 </span> |\n\n//! | Bright White | `\\x1b[97m` | `\\x1b[107m` | <span class=\"ansi_color\" style=\"background: rgb(242,242,242); color: black;\"> 242,242,242 </span> |\n\n//!\n\n//! 8 bit colors\n\n//! ------------\n\n//!\n\n//! 8 bit colors are in the form of either `\\x1b[38;5;<n>m` (text color) or `\\x1b[48;5;<n>m`\n\n//! (background color) sequece. Here, `<n>` marks a parameter that determines the color. `<n>` can\n\n//! have the following values:\n\n//!\n\n//! * 0-15: standard colors in the order of the above table.\n\n//! For example, `\\x1b[38;5;12m` is the `Bright Blue` color.\n\n//! * 16-231: 6 × 6 × 6 cube (216 colors): `16 + 36 × r + 6 × g + b (0 ≤ r, g, b ≤ 5)`\n", "file_path": "src/style/mod.rs", "rank": 91, "score": 33983.81149778949 }, { "content": " (\"Longer\\rnowrap\", 36, 8),\n\n ];\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n for (i, (text, width, expected_height)) in data.iter().enumerate() {\n\n let height = textbox_style.measure_text_height(text, *width);\n\n assert_eq!(\n\n height, *expected_height,\n\n r#\"#{}: Height of \"{}\" is {} but is expected to be {}\"#,\n\n i, text, height, expected_height\n\n );\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_measure_height_ignored_spaces() {\n\n let data = [\n\n (\"\", 0, 0),\n\n (\" \", 0, 0),\n", "file_path": "src/style/mod.rs", "rank": 92, "score": 33983.60700169519 }, { "content": "\n\n let height = style.measure_text_height(\n\n \"Lorem Ipsum is simply dummy text of the printing and typesetting industry.\",\n\n 72,\n\n );\n\n\n\n assert_eq!(height, 7 * 8 + 6 * 2);\n\n }\n\n}\n", "file_path": "src/style/mod.rs", "rank": 93, "score": 33983.138303746746 }, { "content": "//! * 232-255: grayscale from black to white\n\n//!\n\n//! 24 bit colors\n\n//! -------------\n\n//!\n\n//! 8 bit colors are in the form of either `\\x1b[38;2;<r>;<g>;<b>m` (text color) or\n\n//! `\\x1b[48;2;<r>;<g>;<b>m` (background color) sequece. Here, `<r>`, `<g>` and `<b>` can take any\n\n//! value between `0` and `255`.\n\n//!\n\n//! Color values on color spaces other than `Rgb888`\n\n//! ------------------------------------------------\n\n//!\n\n//! By default, `embedded-text` uses the following color types provided by `embedded-graphics`:\n\n//!\n\n//! * `Rgb888`\n\n//! * `Rgb565`\n\n//! * `Rgb555`\n\n//! * `BinaryColor`\n\n//!\n\n//! Internally, all ANSI color sequences are turned into the [`Rgb`] type, which can be converted\n", "file_path": "src/style/mod.rs", "rank": 94, "score": 33982.59202780732 }, { "content": " match token {\n\n RenderElement::Space(_, count) => {\n\n if A::ENDING_SPACES {\n\n // only track width if spaces are rendered at the end of a line\n\n current_width = iter.cursor.position.x;\n\n\n\n // in this case, count all spaces\n\n total_spaces += count;\n\n } else {\n\n // ... otherwise save the number of spaces and it will be tracked with\n\n // the next printed character, or it will be discarded\n\n last_spaces = total_spaces + count;\n\n }\n\n }\n\n\n\n RenderElement::PrintedCharacter(c) => {\n\n // the current width is always the position where the cursor is (left is 0)\n\n current_width = iter.cursor.position.x;\n\n\n\n if c == '\\u{A0}' {\n", "file_path": "src/style/mod.rs", "rank": 95, "score": 33979.959102059525 }, { "content": " fn test_measure_height() {\n\n let data = [\n\n (\"\", 0, 0),\n\n (\" \", 0, 8),\n\n (\" \", 5, 8),\n\n (\" \", 6, 8),\n\n (\"\\n\", 6, 8),\n\n (\"\\n \", 6, 16),\n\n (\"word\", 4 * 6, 8), // exact fit into 1 line\n\n (\"word\", 4 * 6 - 1, 16),\n\n (\"word\", 2 * 6, 16), // exact fit into 2 lines\n\n (\"word word\", 4 * 6, 16), // exact fit into 2 lines\n\n (\"word\\n\", 2 * 6, 16),\n\n (\"word\\nnext\", 50, 16),\n\n (\"word\\n\\nnext\", 50, 24),\n\n (\"word\\n \\nnext\", 50, 24),\n\n (\"verylongword\", 50, 16),\n\n (\"some verylongword\", 50, 24),\n\n (\"1 23456 12345 61234 561\", 36, 40),\n\n (\" Word \", 36, 24),\n", "file_path": "src/style/mod.rs", "rank": 96, "score": 33979.959102059525 }, { "content": "//! Top vertical text alignment.\n\nuse crate::{\n\n alignment::{HorizontalTextAlignment, VerticalTextAlignment},\n\n rendering::cursor::Cursor,\n\n style::height_mode::HeightMode,\n\n StyledTextBox,\n\n};\n\nuse embedded_graphics::prelude::*;\n\n\n\n/// Align text to the top of the TextBox.\n\n#[derive(Copy, Clone, Debug)]\n\npub struct TopAligned;\n\n\n\nimpl VerticalTextAlignment for TopAligned {\n\n #[inline]\n\n fn apply_vertical_alignment<'a, C, F, A, H>(\n\n _cursor: &mut Cursor<F>,\n\n _styled_text_box: &'a StyledTextBox<'a, C, F, A, Self, H>,\n\n ) where\n\n C: PixelColor,\n", "file_path": "src/alignment/top.rs", "rank": 97, "score": 33806.148930090756 }, { "content": "//! Bottom vertical text alignment.\n\nuse crate::{\n\n alignment::{HorizontalTextAlignment, VerticalTextAlignment},\n\n rendering::cursor::Cursor,\n\n style::height_mode::HeightMode,\n\n StyledTextBox,\n\n};\n\nuse embedded_graphics::prelude::*;\n\n\n\n/// Align text to the bottom of the TextBox.\n\n#[derive(Copy, Clone, Debug)]\n\npub struct BottomAligned;\n\n\n\nimpl VerticalTextAlignment for BottomAligned {\n\n #[inline]\n\n fn apply_vertical_alignment<'a, C, F, A, H>(\n\n cursor: &mut Cursor<F>,\n\n styled_text_box: &'a StyledTextBox<'a, C, F, A, Self, H>,\n\n ) where\n\n C: PixelColor,\n", "file_path": "src/alignment/bottom.rs", "rank": 98, "score": 33806.148930090756 }, { "content": "}\n\n\n\nimpl<'a, C, F, V, H> RendererFactory<'a, C> for StyledTextBox<'a, C, F, CenterAligned, V, H>\n\nwhere\n\n C: PixelColor + From<Rgb>,\n\n F: Font + Copy,\n\n V: VerticalTextAlignment,\n\n H: HeightMode,\n\n{\n\n type Renderer = StyledTextBoxIterator<'a, C, F, CenterAligned, V, H, UniformSpaceConfig<F>>;\n\n\n\n #[inline]\n\n #[must_use]\n\n fn create_renderer(&self) -> Self::Renderer {\n\n StyledTextBoxIterator::new(self, |style, carried, mut cursor, parser| {\n\n let max_line_width = cursor.line_width();\n\n let (width, _, _, _) =\n\n style.measure_line(&mut parser.clone(), carried.clone(), max_line_width);\n\n cursor.advance_unchecked((max_line_width - width + 1) / 2);\n\n\n", "file_path": "src/alignment/center.rs", "rank": 99, "score": 33804.16666167424 } ]
Rust
crates/apps/plugin-host/plugin-host-lib/src/commands/options/mod.rs
yamadapc/augmented-audio
2f662cd8aa1a0ba46445f8f41c8483ae2dc552d3
use clap::{App, ArgMatches}; #[derive(Clone)] pub struct RunOptions { plugin_path: String, input_audio: Option<String>, output_audio: Option<String>, open_editor: bool, watch: bool, audio_host_id: Option<String>, output_device_id: Option<String>, buffer_size: Option<usize>, sample_rate: Option<usize>, input_device_id: Option<String>, use_default_input_device: bool, use_mono_input: Option<usize>, } impl RunOptions { pub fn plugin_path(&self) -> &str { &self.plugin_path } pub fn input_audio(&self) -> &Option<String> { &self.input_audio } pub fn output_audio(&self) -> &Option<String> { &self.output_audio } pub fn open_editor(&self) -> bool { self.open_editor } pub fn watch(&self) -> bool { self.watch } pub fn audio_host_id(&self) -> &Option<String> { &self.audio_host_id } pub fn output_device_id(&self) -> &Option<String> { &self.output_device_id } pub fn buffer_size(&self) -> Option<usize> { self.buffer_size } pub fn sample_rate(&self) -> Option<usize> { self.sample_rate } pub fn input_device_id(&self) -> &Option<String> { &self.input_device_id } pub fn use_default_input_device(&self) -> bool { self.use_default_input_device } pub fn use_mono_input(&self) -> Option<usize> { self.use_mono_input } } pub fn build_run_command<'a, 'b>() -> App<'a, 'b> { clap::App::new("run") .about("Process audio") .arg(clap::Arg::from_usage( "-p, --plugin=<PLUGIN_PATH> 'An audio-plugin to load'", )) .arg(clap::Arg::from_usage( "-i, --input=[INPUT_PATH] 'An audio file to process'", )) .arg(clap::Arg::from_usage( "-o, --output=[OUTPUT_PATH] 'If specified, will render offline into file'", )) .arg(clap::Arg::from_usage( "-e, --editor 'Open the editor window'", )) .arg(clap::Arg::from_usage( "-w, --watch 'Watch and reload the VST when it changes'", )) .arg(clap::Arg::from_usage( "--host-id=[HOST_ID] 'Audio host name'", )) .arg(clap::Arg::from_usage( "--output-device-id=[OUTPUT_DEVICE_ID] 'Output device id'", )) .arg(clap::Arg::from_usage( "--buffer-size=[BUFFER_SIZE] 'Buffer size'", )) .arg(clap::Arg::from_usage( "--sample-rate=[SAMPLE_RATE] 'Sample rate'", )) .arg(clap::Arg::from_usage( "--input-device-id=[INPUT_DEVICE_ID] 'Open audio input with Input device id'", )) .arg(clap::Arg::from_usage( "--use-default-input-device 'Open audio input with the default device'", )) .arg(clap::Arg::from_usage( "--use-mono-input=[CHANNEL_NUMBER] 'If specified, the input stream will be mono-ed selecting the desired channel'", )) } pub fn parse_run_options(matches: ArgMatches) -> Option<RunOptions> { let matches = matches.subcommand_matches("run")?; let plugin_path = matches.value_of("plugin")?.to_string(); let input_audio = matches.value_of("input").map(|i| i.to_string()); let output_audio = matches.value_of("output").map(|value| value.to_string()); let open_editor = matches.is_present("editor"); let watch = matches.is_present("watch"); let audio_host_id = matches.value_of("host-id").map(|value| value.to_string()); let output_device_id = matches .value_of("output-device-id") .map(|value| value.to_string()); let buffer_size = matches .value_of("buffer-size") .map(|value| value.parse().expect("Invalid buffer size")); let sample_rate = matches .value_of("sample-rate") .map(|value| value.parse().expect("Invalid sample rate")); let input_device_id = matches .value_of("input-device-id") .map(|value| value.to_string()); let use_default_input_device = matches.is_present("use-default-input-device"); let use_mono_input = matches .value_of("use-mono-input") .map(|s| s.parse().expect("Invalid channel number")); Some(RunOptions { plugin_path, input_audio, output_audio, open_editor, watch, audio_host_id, output_device_id, buffer_size, sample_rate, input_device_id, use_default_input_device, use_mono_input, }) }
use clap::{App, ArgMatches}; #[derive(Clone)] pub struct RunOptions { plugin_path: String, input_audio: Option<String>, output_audio: Option<String>, open_editor: bool, watch: bool, audio_host_id: Option<String>, output_device_id: Option<String>, buffer_size: Option<usize>, sample_rate: Option<usize>, input_device_id: Option<String>, use_default_input_device: bool, use_mono_input: Option<usize>, } impl RunOptions { pub fn plugin_path(&self) -> &str { &self.plugin_path } pub fn input_audio(&self) -> &Option<String> { &self.input_audio } pub fn output_audio(&self) -> &Option<String> { &self.output_audio } pub fn open_editor(&self) -> bool { self.open_editor } pub fn watch(&self) -> bool { self.watch } pub fn audio_host_id(&self) -> &Option<String> { &self.audio_host_id } pub fn output_device_id(&self) -> &Option<String> { &self.output_device_id } pub fn buffer_size(&self) -> Option<usize> { self.buffer_size } pub fn sample_rate(&self) -> Option<usi
") .map(|value| value.parse().expect("Invalid buffer size")); let sample_rate = matches .value_of("sample-rate") .map(|value| value.parse().expect("Invalid sample rate")); let input_device_id = matches .value_of("input-device-id") .map(|value| value.to_string()); let use_default_input_device = matches.is_present("use-default-input-device"); let use_mono_input = matches .value_of("use-mono-input") .map(|s| s.parse().expect("Invalid channel number")); Some(RunOptions { plugin_path, input_audio, output_audio, open_editor, watch, audio_host_id, output_device_id, buffer_size, sample_rate, input_device_id, use_default_input_device, use_mono_input, }) }
ze> { self.sample_rate } pub fn input_device_id(&self) -> &Option<String> { &self.input_device_id } pub fn use_default_input_device(&self) -> bool { self.use_default_input_device } pub fn use_mono_input(&self) -> Option<usize> { self.use_mono_input } } pub fn build_run_command<'a, 'b>() -> App<'a, 'b> { clap::App::new("run") .about("Process audio") .arg(clap::Arg::from_usage( "-p, --plugin=<PLUGIN_PATH> 'An audio-plugin to load'", )) .arg(clap::Arg::from_usage( "-i, --input=[INPUT_PATH] 'An audio file to process'", )) .arg(clap::Arg::from_usage( "-o, --output=[OUTPUT_PATH] 'If specified, will render offline into file'", )) .arg(clap::Arg::from_usage( "-e, --editor 'Open the editor window'", )) .arg(clap::Arg::from_usage( "-w, --watch 'Watch and reload the VST when it changes'", )) .arg(clap::Arg::from_usage( "--host-id=[HOST_ID] 'Audio host name'", )) .arg(clap::Arg::from_usage( "--output-device-id=[OUTPUT_DEVICE_ID] 'Output device id'", )) .arg(clap::Arg::from_usage( "--buffer-size=[BUFFER_SIZE] 'Buffer size'", )) .arg(clap::Arg::from_usage( "--sample-rate=[SAMPLE_RATE] 'Sample rate'", )) .arg(clap::Arg::from_usage( "--input-device-id=[INPUT_DEVICE_ID] 'Open audio input with Input device id'", )) .arg(clap::Arg::from_usage( "--use-default-input-device 'Open audio input with the default device'", )) .arg(clap::Arg::from_usage( "--use-mono-input=[CHANNEL_NUMBER] 'If specified, the input stream will be mono-ed selecting the desired channel'", )) } pub fn parse_run_options(matches: ArgMatches) -> Option<RunOptions> { let matches = matches.subcommand_matches("run")?; let plugin_path = matches.value_of("plugin")?.to_string(); let input_audio = matches.value_of("input").map(|i| i.to_string()); let output_audio = matches.value_of("output").map(|value| value.to_string()); let open_editor = matches.is_present("editor"); let watch = matches.is_present("watch"); let audio_host_id = matches.value_of("host-id").map(|value| value.to_string()); let output_device_id = matches .value_of("output-device-id") .map(|value| value.to_string()); let buffer_size = matches .value_of("buffer-size
random
[ { "content": "/// Check if there's a non-null CFBundle with this identifier.\n\npub fn has_bundle(bundle_identifier: &str) -> bool {\n\n unsafe {\n\n let bundle_identifier = make_cfstring(bundle_identifier);\n\n if let Some(bundle_identifier) = bundle_identifier {\n\n let bundle = CFBundleGetBundleWithIdentifier(bundle_identifier);\n\n !bundle.is_null()\n\n } else {\n\n false\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/augmented/gui/macos-bundle-resources/src/macos/mod.rs", "rank": 0, "score": 271229.23878746026 }, { "content": "pub fn time<T>(label: &str, body: impl FnOnce() -> T) -> T {\n\n let start = Instant::now();\n\n let result = body();\n\n log::info!(\"{} duration={}ms\", label, start.elapsed().as_millis());\n\n result\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_time() {\n\n let result = time(\"test_time\", || 10_i32.pow(2));\n\n assert_eq!(result, 100);\n\n }\n\n}\n", "file_path": "crates/augmented/ops/augmented-metrics/src/lib.rs", "rank": 1, "score": 248583.92387874937 }, { "content": "pub fn audio_thread_set_options(input_device_id: String, output_device_id: String) -> Result<i32> {\n\n let actor_system_thread = ActorSystemThread::current();\n\n actor_system_thread.spawn_result(async move {\n\n let audio_thread = AudioThread::from_registry();\n\n audio_thread\n\n .send(AudioThreadMessage::SetOptions {\n\n host_id: AudioHostId::Default,\n\n input_device_id: if input_device_id == \"default\" {\n\n Some(AudioDeviceId::Default)\n\n } else {\n\n Some(AudioDeviceId::Id(input_device_id))\n\n },\n\n output_device_id: if output_device_id == \"default\" {\n\n AudioDeviceId::Default\n\n } else {\n\n AudioDeviceId::Id(output_device_id)\n\n },\n\n })\n\n .await\n\n .unwrap().unwrap();\n\n });\n\n Ok(0)\n\n}\n\n\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 2, "score": 245956.6646416511 }, { "content": "pub fn run_snapshot_tests(_path: &str, manifest: CargoToml, update_snapshots: bool) {\n\n let crate_name = manifest.package.name;\n\n log::info!(\"Running snapshot tests for {}\", crate_name);\n\n let metadata = manifest.package.metadata.unwrap();\n\n let examples = metadata.augmented.unwrap().processor_examples.unwrap();\n\n\n\n for example in examples {\n\n run_example_snapshot_tests(&crate_name, &example, update_snapshots);\n\n }\n\n}\n\n\n", "file_path": "crates/augmented/development/augmented-dev-cli/src/services/snapshot_tests_service/mod.rs", "rank": 3, "score": 228276.84163242037 }, { "content": "/// Check if there's a non-null main CFBundle.\n\npub fn has_main_bundle() -> bool {\n\n unsafe {\n\n let main_bundle = CFBundleGetMainBundle();\n\n !main_bundle.is_null()\n\n }\n\n}\n\n\n", "file_path": "crates/augmented/gui/macos-bundle-resources/src/macos/mod.rs", "rank": 4, "score": 221395.79323642707 }, { "content": "pub fn get_version() -> String {\n\n format!(\n\n \"{}-{}-{}\",\n\n env!(\"PROFILE\"),\n\n env!(\"CARGO_PKG_VERSION\"),\n\n env!(\"GIT_REV_SHORT\")\n\n )\n\n}\n\n\n", "file_path": "crates/apps/plugin-host/plugin-host-gui2/src/utils.rs", "rank": 5, "score": 221266.02822837693 }, { "content": "pub fn parse_header_body<StringRepr: Borrow<str>, Buffer: Borrow<[u8]>>(\n\n input: Input,\n\n) -> Result<MIDIFileChunk<StringRepr, Buffer>> {\n\n let (input, format) = be_u16(input)?;\n\n let format = match format {\n\n 0 => Ok(MIDIFileFormat::Single),\n\n 1 => Ok(MIDIFileFormat::Simultaneous),\n\n 2 => Ok(MIDIFileFormat::Sequential),\n\n _ => Ok(MIDIFileFormat::Unknown),\n\n }?;\n\n let (input, num_tracks) = be_u16(input)?;\n\n let (input, division_word) = be_u16(input)?;\n\n\n\n let division_type = division_word >> 15;\n\n let (input, division) = match division_type {\n\n 0 => {\n\n let ticks_per_quarter_note = (division_word << 1) >> 1;\n\n Ok((\n\n input,\n\n MIDIFileDivision::TicksPerQuarterNote {\n", "file_path": "crates/augmented/data/augmented-midi/src/lib.rs", "rank": 6, "score": 220559.33361077736 }, { "content": "pub fn init(name: &str) {\n\n if let Err(err) = logging::configure_logging(&get_configuration_root_path(), name) {\n\n eprintln!(\"{}: Failed to initialize logging {:?}\", name, err);\n\n }\n\n}\n", "file_path": "crates/augmented/ops/audio-plugin-logger/src/lib.rs", "rank": 7, "score": 215924.6122488607 }, { "content": "pub fn audio_io_get_input_devices() -> Result<String> {\n\n let devices_list = AudioIOService::devices_list(None)?;\n\n let result = serde_json::to_string(&devices_list)?;\n\n Ok(result)\n\n}\n\n\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 8, "score": 213496.6111977687 }, { "content": "pub fn set_is_playing(value: bool) -> Result<i32> {\n\n with_state0(|state| {\n\n state\n\n .processor_handle\n\n .is_playing\n\n .store(value, Ordering::Relaxed);\n\n })\n\n}\n\n\n", "file_path": "crates/apps/metronome/src/api.rs", "rank": 9, "score": 203840.57515972102 }, { "content": "pub fn parse_options(supports_midi: bool) -> Options {\n\n parse_options_from(supports_midi, &mut std::env::args_os())\n\n}\n\n\n", "file_path": "crates/augmented/application/audio-processor-standalone/src/options.rs", "rank": 10, "score": 202420.79242020243 }, { "content": "fn run_example_snapshot_tests(crate_name: &str, example: &str, update_snapshots: bool) {\n\n spawn!(cargo build --package ${crate_name} --release --example ${example})\n\n .unwrap()\n\n .wait()\n\n .expect(\"Failed to build example\");\n\n\n\n run_cmd!(mkdir -p test/snapshots/${crate_name}/).unwrap();\n\n spawn!(cargo run --package ${crate_name} --release --example ${example} -- --input-file ./input-files/C3-loop.mp3 --output-file test/snapshots/${crate_name}/${example}.tmp.wav)\n\n .unwrap()\n\n .wait()\n\n .expect(\"Failed to run example\");\n\n\n\n let md5_commit =\n\n run_fun!(md5 -q test/snapshots/${crate_name}/${example}.wav).unwrap_or(\"\".into());\n\n let md5_test =\n\n run_fun!(md5 -q test/snapshots/${crate_name}/${example}.tmp.wav).unwrap_or(\"\".into());\n\n\n\n if update_snapshots {\n\n if md5_test != md5_commit {\n\n log::warn!(\"Updating snapshot {}/{}\", crate_name, example,);\n", "file_path": "crates/augmented/development/augmented-dev-cli/src/services/snapshot_tests_service/mod.rs", "rank": 11, "score": 200134.8280576482 }, { "content": "fn get_upload_path(release_key: &str) -> String {\n\n let bucket = std::env::var(\"AWS_S3_BUCKET\").unwrap();\n\n let bucket_path = format!(\"{}{}\", bucket, release_key);\n\n bucket_path\n\n}\n\n\n", "file_path": "crates/augmented/development/augmented-dev-cli/src/services/build_command_service/release_service.rs", "rank": 12, "score": 199961.72989897334 }, { "content": "pub fn run_file_watch_loop(\n\n rx: Receiver<DebouncedEvent>,\n\n run_options: RunOptions,\n\n host: Addr<TestPluginHost>,\n\n) -> ! {\n\n let inner = || -> Result<(), std::io::Error> {\n\n let mut current_hash = get_file_hash(run_options.plugin_path().as_ref())?;\n\n loop {\n\n match rx.recv() {\n\n Ok(_) => {\n\n let new_hash = get_file_hash(run_options.plugin_path().as_ref())?;\n\n if new_hash == current_hash {\n\n log::warn!(\"Ignoring event due to same plugin hash\");\n\n continue;\n\n } else {\n\n log::info!(\n\n \"Received file change event. Plug-in will be reloaded content_hash={}\",\n\n new_hash\n\n );\n\n current_hash = new_hash;\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/src/commands/main/file_watch.rs", "rank": 13, "score": 199026.20564515132 }, { "content": "pub fn set_vst_file_path(path: String) -> Result<i32> {\n\n send_host_message(LoadPluginMessage {\n\n plugin_path: PathBuf::from(path),\n\n });\n\n Ok(0)\n\n}\n\n\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 14, "score": 198939.02657982605 }, { "content": "pub fn set_input_file_path(path: String) -> Result<i32> {\n\n send_host_message(SetAudioFilePathMessage(PathBuf::from(path)));\n\n Ok(0)\n\n}\n\n\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 15, "score": 198939.02657982605 }, { "content": "pub fn audio_node_create(audio_processor_name: String) -> Result<u32> {\n\n let processor: Result<NodeType<f32>> = match audio_processor_name.as_str() {\n\n \"delay\" => Ok(Box::new(audio_processor_time::MonoDelayProcessor::default())),\n\n \"filter\" => Ok(Box::new(augmented_dsp_filters::rbj::FilterProcessor::new(\n\n augmented_dsp_filters::rbj::FilterType::LowPass,\n\n ))),\n\n \"gain\" => Ok(Box::new(\n\n audio_processor_utility::gain::GainProcessor::default(),\n\n )),\n\n \"pan\" => Ok(Box::new(\n\n audio_processor_utility::pan::PanProcessor::default(),\n\n )),\n\n _ => Err(anyhow::Error::msg(\"Failed to create processor\")),\n\n };\n\n let processor = processor?;\n\n\n\n let index = crate::graph::audio_node_create_raw(processor);\n\n\n\n Ok(index as u32)\n\n}\n\n\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 16, "score": 196673.03495937018 }, { "content": "pub fn draw_vec_chart(filename: &str, plot_name: &str, vec: Vec<f32>) {\n\n let filename = Path::new(filename);\n\n let chart_filename = filename.with_file_name(format!(\n\n \"{}--{}.png\",\n\n filename.file_name().unwrap().to_str().unwrap(),\n\n plot_name\n\n ));\n\n\n\n let backend = BitMapBackend::new(&chart_filename, (1000, 200));\n\n let drawing_area = backend.into_drawing_area();\n\n drawing_area.fill(&WHITE).unwrap();\n\n\n\n let x_range = (0, vec.len());\n\n let y_range = (\n\n vec.iter().cloned().fold(-1. / 0., f32::max) as f64,\n\n vec.iter().cloned().fold(1. / 0., f32::min) as f64,\n\n );\n\n let values: Vec<(usize, f64)> = vec\n\n .iter()\n\n .enumerate()\n", "file_path": "crates/augmented/testing/audio-processor-testing-helpers/src/charts.rs", "rank": 17, "score": 195639.4897957226 }, { "content": "pub fn get_events_sink(sink: StreamSink<String>) -> Result<i32> {\n\n std::thread::spawn(move || loop {\n\n sink.add(\"MESSAGE\".to_string());\n\n std::thread::sleep(Duration::from_millis(1000));\n\n });\n\n Ok(0)\n\n}\n\n\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 18, "score": 191791.03901044774 }, { "content": "fn crate_has_changes(path: &str, manifest: &CargoToml) -> bool {\n\n let tag = format!(\"{}@{}\", &*manifest.package.name, &*manifest.package.version);\n\n let result = cmd_lib::run_fun!(PAGER= git diff $tag $path);\n\n log::info!(\"git diff {} {}\\n ==> {:?}\", tag, path, result);\n\n\n\n match result {\n\n Ok(s) if s.is_empty() => {\n\n log::warn!(\"SKIPPING - NO changes in {}\", path);\n\n false\n\n }\n\n _ => {\n\n log::warn!(\"BUMPING - Found changes in {}\", path);\n\n true\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/augmented/development/augmented-dev-cli/src/services/release_service/mod.rs", "rank": 19, "score": 189629.45845187677 }, { "content": "fn find_target(config_path: &str, cargo_package: &CargoToml) -> Option<String> {\n\n let config_path = std::fs::canonicalize(Path::new(config_path)).ok()?;\n\n let mut config_dir = config_path.parent()?;\n\n loop {\n\n log::info!(\"Searching for target in {:?}\", config_dir);\n\n let mut read_dir = config_dir.read_dir().ok()?;\n\n let target_dir =\n\n read_dir.find(|item| item.is_ok() && item.as_ref().unwrap().file_name() == \"target\");\n\n if let Some(Ok(target_dir)) = target_dir {\n\n return Some(String::from(\n\n target_dir\n\n .path()\n\n .join(format!(\"release/lib{}.dylib\", cargo_package.lib.name))\n\n .to_str()?,\n\n ));\n\n } else {\n\n config_dir = config_dir.parent()?;\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/augmented/development/bundler/src/main.rs", "rank": 20, "score": 188602.54495789474 }, { "content": "pub fn render_to_xml<C: Component>(mut root: C) -> String {\n\n use xml::writer::{EmitterConfig, EventWriter, XmlEvent};\n\n\n\n let bytes = Vec::new();\n\n let buf_sink = BufWriter::new(bytes);\n\n let mut writer = EmitterConfig::new()\n\n .perform_indent(true)\n\n .create_writer(buf_sink);\n\n\n\n let root_tag = root.tag();\n\n let root_tag_str = root_tag.to_string();\n\n\n\n let event: XmlEvent = XmlEvent::start_element(&*root_tag_str).into();\n\n writer.write(event);\n\n log::info!(\"Rendering {:?}\", root_tag_str);\n\n\n\n {\n\n let empty_props: Box<dyn Props + 'static> = Box::new(());\n\n let mut ctx = RenderContext::new(&empty_props).into();\n\n let node = root.render(ctx);\n", "file_path": "crates/spikes/augmented-ui/src/component.rs", "rank": 21, "score": 187519.64638156773 }, { "content": "pub fn configure_logging(root_config_path: &Path, name: &str) -> Result<()> {\n\n let log_dir = ensure_logging_directory(root_config_path)?;\n\n let log_path = log_dir.join(name);\n\n let logfile = RollingFileAppender::builder()\n\n .encoder(Box::new(PatternEncoder::new(\n\n \"{d} [{l}] {M}:{L} - {m} - tid:{T}:{t} pid:{P}\\n\",\n\n )))\n\n .build(\n\n log_path,\n\n Box::new(CompoundPolicy::new(\n\n Box::new(SizeTrigger::new(1024 * 1024 * 10)),\n\n Box::new(DeleteRoller::new()),\n\n )),\n\n )\n\n .map_err(LoggingSetupError::FileAppender)?;\n\n\n\n let config = Config::builder()\n\n .appender(Appender::builder().build(\"logfile\", Box::new(logfile)))\n\n .appender(Appender::builder().build(\"stdout\", Box::new(ConsoleAppender::builder().build())))\n\n .build(\n", "file_path": "crates/augmented/ops/audio-plugin-logger/src/logging.rs", "rank": 22, "score": 185448.10262199407 }, { "content": "pub fn shallow_render_to_xml<C: Component>(mut root: C) -> String {\n\n use xml::writer::{EmitterConfig, EventWriter, XmlEvent};\n\n\n\n let bytes = Vec::new();\n\n let buf_sink = BufWriter::new(bytes);\n\n let mut writer = EmitterConfig::new()\n\n .perform_indent(true)\n\n .create_writer(buf_sink);\n\n\n\n let root_tag = root.tag();\n\n let root_tag_str = root_tag.to_string();\n\n\n\n let event: XmlEvent = XmlEvent::start_element(&*root_tag_str).into();\n\n writer.write(event);\n\n log::info!(\"Rendering {:?}\", root_tag_str);\n\n\n\n {\n\n let empty_props: Box<dyn Props + 'static> = Box::new(());\n\n let mut ctx = RenderContext::new(&empty_props).into();\n\n let node = root.render(ctx);\n", "file_path": "crates/spikes/augmented-ui/src/component.rs", "rank": 23, "score": 185333.8719816489 }, { "content": "pub fn run_all_snapshot_tests(list_crates_service: ListCratesService, update_snapshots: bool) {\n\n let augmented_crates = list_crates_service.find_augmented_crates();\n\n augmented_crates\n\n .into_iter()\n\n .filter(|(_, manifest)| manifest.has_snapshot_tests())\n\n .for_each(|(path, manifest)| run_snapshot_tests(&path, manifest, update_snapshots));\n\n}\n", "file_path": "crates/augmented/development/augmented-dev-cli/src/services/snapshot_tests_service/mod.rs", "rank": 24, "score": 177921.90514725586 }, { "content": "/// Build a CFStringRef out of a &str ref.\n\nfn make_cfstring(s: &str) -> Option<CFStringRef> {\n\n unsafe {\n\n let allocator = CFAllocatorGetDefault();\n\n let c_str = CString::new(s).ok()?;\n\n let cfstring_ref =\n\n CFStringCreateWithCString(allocator, c_str.as_ptr(), kCFStringEncodingUTF8);\n\n\n\n if cfstring_ref.is_null() {\n\n return None;\n\n }\n\n\n\n Some(cfstring_ref)\n\n }\n\n}\n\n\n", "file_path": "crates/augmented/gui/macos-bundle-resources/src/macos/mod.rs", "rank": 25, "score": 174056.3567478094 }, { "content": "fn section_heading<'a, T: Into<String>>(label: T) -> impl Into<Element<'a, Message>> {\n\n let text = Text::new(label);\n\n Column::with_children(vec![\n\n Container::new(text)\n\n .style(Container0::default())\n\n .padding(Spacing::base_spacing())\n\n .into(),\n\n horizontal_rule().into(),\n\n ])\n\n}\n\n\n", "file_path": "crates/augmented/gui/audio-settings-gui/src/lib.rs", "rank": 26, "score": 171417.81892269367 }, { "content": "/// Opens an audio file with default options & trying to guess the format\n\npub fn default_read_audio_file(input_audio_path: &str) -> Result<ProbeResult, AudioFileError> {\n\n log::info!(\n\n \"Trying to open and probe audio file at {}\",\n\n input_audio_path\n\n );\n\n\n\n let mut hint = Hint::new();\n\n let media_source = {\n\n let audio_input_path = Path::new(input_audio_path);\n\n let _ = try_set_audio_file_hint(&mut hint, audio_input_path);\n\n File::open(audio_input_path)?\n\n };\n\n let audio_file = MediaSourceStream::new(Box::new(media_source), Default::default());\n\n let format_opts: FormatOptions = Default::default();\n\n let metadata_opts: MetadataOptions = Default::default();\n\n let audio_file = get_probe().format(&hint, audio_file, &format_opts, &metadata_opts)?;\n\n Ok(audio_file)\n\n}\n\n\n", "file_path": "crates/augmented/audio/audio-processor-file/src/audio_file_processor/file_io.rs", "rank": 27, "score": 169826.56918292536 }, { "content": "fn prerelease_crate(path: &str, manifest: &CargoToml, all_crates: &Vec<(String, CargoToml)>) {\n\n log::info!(\"Running pre-release proc for {}\", path);\n\n\n\n let new_version = bump_own_version_prerelease(&manifest.package.name, path);\n\n\n\n log::info!(\n\n \" => New version is {}, will now bump it throughout the repo\",\n\n new_version.to_string()\n\n );\n\n\n\n for (other_crate_path, _) in all_crates {\n\n let manifest_path = format!(\"{}/Cargo.toml\", other_crate_path);\n\n let cargo_manifest_str = std::fs::read_to_string(&manifest_path).unwrap();\n\n let mut cargo_manifest = cargo_manifest_str.parse::<toml_edit::Document>().unwrap();\n\n\n\n if cargo_manifest.get(\"dependencies\").is_none()\n\n && cargo_manifest.get(\"dev-dependencies\").is_none()\n\n {\n\n continue;\n\n }\n", "file_path": "crates/augmented/development/augmented-dev-cli/src/services/release_service/mod.rs", "rank": 28, "score": 169176.7137827504 }, { "content": "fn section_heading<'a, T: Into<String>>(label: T) -> impl Into<Element<'a, Message>> {\n\n let text = Text::new(label);\n\n Column::with_children(vec![\n\n Container::new(text)\n\n .style(Container0::default())\n\n .padding(Spacing::base_spacing())\n\n .into(),\n\n horizontal_rule().into(),\n\n ])\n\n}\n\n\n", "file_path": "crates/apps/plugin-host/plugin-host-gui2/src/ui/audio_io_settings/view.rs", "rank": 29, "score": 161853.83309669822 }, { "content": "pub fn initialize_logger() {\n\n let _ = wisual_logger::try_init_from_env();\n\n}\n\n\n\npub struct AudioGuiInitialModel {\n\n host_ids: Vec<String>,\n\n input_ids: Vec<String>,\n\n output_ids: Vec<String>,\n\n}\n\n\n", "file_path": "crates/spikes/AugmentedNative/src/lib.rs", "rank": 30, "score": 158891.43153835915 }, { "content": "pub fn initialize_logger() {\n\n let _ = wisual_logger::try_init_from_env();\n\n}\n\n\n\nuniffi_macros::include_scaffolding!(\"augmented\");\n", "file_path": "crates/apps/recording_buddy/src/lib.rs", "rank": 31, "score": 158891.43153835915 }, { "content": "pub fn parse_chunk<\n\n 'a,\n\n StringRepr: Borrow<str> + From<&'a str>,\n\n Buffer: Borrow<[u8]> + From<Input<'a>>,\n\n>(\n\n input: Input<'a>,\n\n) -> Result<'a, MIDIFileChunk<StringRepr, Buffer>> {\n\n let (input, chunk_name) = take(4u32)(input)?;\n\n let chunk_name: &str = std::str::from_utf8(chunk_name)\n\n .map_err(|err| Err::Failure(Error::from_external_error(input, ErrorKind::Fail, err)))?;\n\n\n\n let (input, chunk_length) = parse_chunk_length(input)?;\n\n let (input, chunk_body) = take(chunk_length)(input)?;\n\n\n\n let (_, chunk) = match chunk_name {\n\n \"MThd\" => {\n\n assert_eq!(chunk_length, 6);\n\n parse_header_body(chunk_body)\n\n }\n\n \"MTrk\" => {\n", "file_path": "crates/augmented/data/augmented-midi/src/lib.rs", "rank": 32, "score": 157071.20553112536 }, { "content": "pub fn main() {\n\n let window = WindowDesc::new(|| make_ui()).title(\"External Event Demo\");\n\n\n\n let launcher = AppLauncher::with_window(window);\n\n let event_sink = launcher.get_external_handle();\n\n\n\n let garbage_collector = GarbageCollector::default();\n\n let processor = BufferAnalyserProcessor::new(garbage_collector.handle());\n\n let queue_handle = processor.queue();\n\n let _audio_streams = audio_processor_start(processor);\n\n thread::spawn(move || generate_audio_updates(event_sink, queue_handle));\n\n\n\n launcher\n\n .launch(AudioData(Vec::new()))\n\n .expect(\"launch failed\");\n\n}\n\n\n", "file_path": "crates/spikes/example-druid-audio-viz/src/main.rs", "rank": 33, "score": 157071.20553112536 }, { "content": "///! Try to set-up the logger and ignore errors\n\npub fn init_from_env() {\n\n let _ = try_init_from_env();\n\n}\n", "file_path": "crates/augmented/ops/wisual-logger/src/lib.rs", "rank": 34, "score": 157071.20553112536 }, { "content": "pub fn audio_node_set_parameter(\n\n _audio_node_id: i32,\n\n _parameter_name: String,\n\n _parameter_value: f32,\n\n) -> Result<i32> {\n\n todo!()\n\n}\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 35, "score": 157071.20553112536 }, { "content": "pub fn parse_midi_file<\n\n 'a,\n\n StringRepr: Borrow<str> + From<&'a str>,\n\n Buffer: Borrow<[u8]> + From<&'a [u8]>,\n\n>(\n\n input: Input<'a>,\n\n) -> Result<'a, MIDIFile<StringRepr, Buffer>> {\n\n let (input, chunks) = many0(parse_chunk)(input)?;\n\n Ok((input, MIDIFile { chunks }))\n\n}\n\n\n", "file_path": "crates/augmented/data/augmented-midi/src/lib.rs", "rank": 36, "score": 155321.67287023435 }, { "content": "fn get_cli_version() -> String {\n\n format!(\n\n \"{}-{}-{}\",\n\n env!(\"PROFILE\"),\n\n env!(\"CARGO_PKG_VERSION\"),\n\n env!(\"GIT_REV_SHORT\")\n\n )\n\n}\n\n\n", "file_path": "crates/augmented/development/augmented-dev-cli/src/main.rs", "rank": 37, "score": 155184.90899193235 }, { "content": "/// Get the path to a resource\n\npub fn get_path(\n\n bundle_identifier: &str,\n\n resource_name: &str,\n\n resource_type: Option<&str>,\n\n sub_dir_name: Option<&str>,\n\n) -> Option<PathBuf> {\n\n let resource_name = make_cfstring(resource_name)?;\n\n let resource_type = resource_type\n\n .map(|resource_type| make_cfstring(resource_type))\n\n .flatten()\n\n .unwrap_or(std::ptr::null());\n\n let sub_dir_name = sub_dir_name\n\n .map(|sub_dir_name| make_cfstring(sub_dir_name))\n\n .flatten()\n\n .unwrap_or(std::ptr::null());\n\n\n\n unsafe {\n\n log::debug!(\"Getting bundle {}\", bundle_identifier);\n\n let bundle_identifier = make_cfstring(bundle_identifier)?;\n\n let main_bundle = CFBundleGetBundleWithIdentifier(bundle_identifier);\n", "file_path": "crates/augmented/gui/macos-bundle-resources/src/macos/mod.rs", "rank": 38, "score": 153638.79365797428 }, { "content": "/// Create a sine wave buffer with this duration\n\npub fn oscillator_buffer(\n\n sample_rate: f32,\n\n frequency: f32,\n\n length: Duration,\n\n generator_fn: fn(f32) -> f32,\n\n) -> Vec<f32> {\n\n let mut source = Oscillator::new(generator_fn);\n\n source.set_sample_rate(sample_rate);\n\n source.set_frequency(frequency);\n\n let mut output = Vec::new();\n\n let length_samples = (length.as_secs_f32() * sample_rate).ceil();\n\n output.resize(length_samples as usize, 0.0);\n\n for sample in &mut output {\n\n *sample = source.next_sample();\n\n }\n\n output\n\n}\n", "file_path": "crates/augmented/testing/audio-processor-testing-helpers/src/generators.rs", "rank": 39, "score": 153638.79365797428 }, { "content": "pub fn convert_wav_file_to_mp3(\n\n wav_file_path: &str,\n\n mp3_file_path: &str,\n\n) -> std::io::Result<ExitStatus> {\n\n let mut result = Command::new(\"lame\")\n\n .arg(wav_file_path)\n\n .arg(mp3_file_path)\n\n .spawn()?;\n\n result.wait()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::convert_wav_file_to_mp3;\n\n\n\n #[test]\n\n fn it_can_encode_mp3() {\n\n let crate_dir = env!(\"CARGO_MANIFEST_DIR\");\n\n let input_path = format!(\"{}/test-inputs/synth.wav\", crate_dir);\n\n let output_path = format!(\"{}/test-inputs/synth.mp3\", crate_dir);\n\n let exit_code = convert_wav_file_to_mp3(&input_path, &output_path).unwrap();\n\n assert!(exit_code.success());\n\n }\n\n}\n", "file_path": "crates/augmented/audio/lame-mp3/src/lib.rs", "rank": 40, "score": 153638.79365797428 }, { "content": "fn get_file_hash(path: &Path) -> Result<String, std::io::Error> {\n\n let file_contents = std::fs::read(path)?;\n\n let digest = md5::compute(file_contents);\n\n Ok(format!(\"{:x}\", digest))\n\n}\n\n\n", "file_path": "crates/apps/plugin-host/plugin-host-gui2/src/services/plugin_file_watch.rs", "rank": 41, "score": 152565.84997937965 }, { "content": "fn get_file_hash(path: &Path) -> Result<String, std::io::Error> {\n\n let file_contents = std::fs::read(path)?;\n\n let digest = md5::compute(file_contents);\n\n Ok(format!(\"{:x}\", digest))\n\n}\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/src/commands/main/file_watch.rs", "rank": 42, "score": 152565.84997937965 }, { "content": "fn build_plist_and_bundle(config_path: &str, output_path: &str) -> PathBuf {\n\n log::info!(\n\n \"Reading package toml file config_path={} output_path={}\",\n\n config_path,\n\n output_path\n\n );\n\n let input_cargo_file = read_to_string(config_path).expect(\"Failed to read toml file\");\n\n let toml_file: CargoToml =\n\n toml::from_str(&input_cargo_file).expect(\"Failed to parse toml file\");\n\n let name = toml_file\n\n .package\n\n .metadata\n\n .bundle\n\n .name\n\n .clone()\n\n .unwrap_or_else(|| toml_file.package.name.to_string());\n\n\n\n log::info!(\"Building package plist\");\n\n let plist_file = build_plist(&name, &toml_file);\n\n\n", "file_path": "crates/augmented/development/bundler/src/main.rs", "rank": 43, "score": 152214.24553341215 }, { "content": "/// Start a processor using CPAL.\n\npub fn standalone_start(\n\n mut app: impl StandaloneProcessor,\n\n handle: Option<&Handle>,\n\n) -> StandaloneHandles {\n\n let _ = wisual_logger::try_init_from_env();\n\n\n\n let (midi_host, mut midi_context) = initialize_midi_host(&mut app, handle);\n\n\n\n // Audio set-up\n\n let host = cpal::default_host();\n\n log::info!(\"Using host: {}\", host.id().name());\n\n let buffer_size = 512;\n\n let sample_rate = 44100;\n\n let accepts_input = app.options().accepts_input;\n\n let input_tuple = if accepts_input {\n\n Some(configure_input_device(&host, buffer_size, sample_rate))\n\n } else {\n\n None\n\n };\n\n let (output_device, output_config) = configure_output_device(host, buffer_size, sample_rate);\n", "file_path": "crates/augmented/application/audio-processor-standalone/src/standalone_cpal/mod.rs", "rank": 44, "score": 152024.0672477629 }, { "content": "/// Test two buffers have equivalent RMS levels\n\npub fn test_level_equivalence(\n\n input_buffer: &[f32],\n\n output_buffer: &[f32],\n\n input_window_size: usize,\n\n output_window_size: usize,\n\n threshold: f32,\n\n) {\n\n let input_chunks = input_buffer.chunks(input_window_size);\n\n let output_chunks = output_buffer.chunks(output_window_size);\n\n assert!(!input_buffer.is_empty());\n\n assert!(!output_buffer.is_empty());\n\n // assert!((input_chunks.len() as i32 - output_chunks.len() as i32).abs() < 2);\n\n for (input_chunk, output_chunk) in input_chunks.zip(output_chunks) {\n\n let input_level = util::rms_level(input_chunk);\n\n let output_level = util::rms_level(output_chunk);\n\n let diff = (input_level - output_level).abs();\n\n\n\n assert!(diff < threshold);\n\n }\n\n}\n", "file_path": "crates/augmented/testing/audio-processor-testing-helpers/src/lib.rs", "rank": 45, "score": 152018.83006545246 }, { "content": "/// A default main function for an [`AudioProcessor`] and [`MidiEventHandler`].\n\n///\n\n/// Run an [`AudioProcessor`] / [`MidiEventHandler`] as a stand-alone cpal app and forward MIDI\n\n/// messages received on all inputs to it. Same as `audio_processor_main`, but requires\n\n/// [`MidiEventHandler`] to support MIDI.\n\n///\n\n/// Will internally create [`cpal::Stream`], [`audio_processor_standalone_midi::MidiHost`] and park the current thread. If the thread\n\n/// is unparked the function will exit and the audio/MIDI threads will stop once these structures\n\n/// are dropped.\n\npub fn audio_processor_main_with_midi<\n\n Processor: AudioProcessor<SampleType = f32> + MidiEventHandler + Send + 'static,\n\n>(\n\n audio_processor: Processor,\n\n handle: &Handle,\n\n) {\n\n let app = StandaloneProcessorImpl::new(audio_processor);\n\n standalone_main(app, Some(handle));\n\n}\n\n\n", "file_path": "crates/augmented/application/audio-processor-standalone/src/lib.rs", "rank": 46, "score": 152018.83006545246 }, { "content": "fn build_frontend(frontend_path: &str) {\n\n log::info!(\"Building front-end\");\n\n let path = Path::new(frontend_path);\n\n run_cmd!(cd ${path}; yarn run build).unwrap();\n\n log::info!(\"Finished building front-end\");\n\n}\n\n\n", "file_path": "crates/augmented/development/bundler/src/main.rs", "rank": 47, "score": 150658.50596461585 }, { "content": "/// Build a `String` from a `CFStringRef`.\n\nfn string_from_cfstring(url_cfstring: CFStringRef) -> Option<String> {\n\n unsafe {\n\n let length = CFStringGetLength(url_cfstring) + 1;\n\n let mut output_str = String::with_capacity(length as usize);\n\n for _ in 0..length {\n\n output_str.push(' ');\n\n }\n\n let output_str = CString::new(output_str).ok()?;\n\n let output_str = output_str.into_raw();\n\n let result = CFStringGetCString(url_cfstring, output_str, length, kCFStringEncodingUTF8);\n\n if result == 0 {\n\n return None;\n\n }\n\n let output_str = CString::from_raw(output_str);\n\n let output_str = output_str.to_str().ok()?;\n\n Some(output_str.to_string())\n\n }\n\n}\n\n\n", "file_path": "crates/augmented/gui/macos-bundle-resources/src/macos/mod.rs", "rank": 48, "score": 149848.11866858782 }, { "content": "/// Modify version field in a certain manifest to be bumped to the next pre-release major version\n\nfn bump_own_version_prerelease(name: &str, path: &str) -> Version {\n\n let manifest_path = format!(\"{}/Cargo.toml\", path);\n\n let cargo_manifest_str = std::fs::read_to_string(&manifest_path).unwrap();\n\n let mut cargo_manifest = cargo_manifest_str.parse::<toml_edit::Document>().unwrap();\n\n let version = cargo_manifest[\"package\"][\"version\"].as_str().unwrap();\n\n log::info!(\" => Found name={} version={}\", name, version);\n\n\n\n let sem_version = Version::parse(version).unwrap();\n\n let next_version = prerelease_bump(sem_version);\n\n\n\n cargo_manifest[\"package\"][\"version\"] = value_from_version(&next_version);\n\n let cargo_manifest_str = cargo_manifest.to_string();\n\n std::fs::write(&manifest_path, cargo_manifest_str).unwrap();\n\n\n\n next_version\n\n}\n\n\n", "file_path": "crates/augmented/development/augmented-dev-cli/src/services/release_service/mod.rs", "rank": 49, "score": 149703.3641730593 }, { "content": "fn create_project(project_path: &str) {\n\n let project_index_path = format!(\"{}/index.flexbuf\", project_path);\n\n log::info!(\"Writing project to {}\", project_index_path);\n\n let project = Project {\n\n id: uuid::Uuid::new_v4().to_string(),\n\n title: \"New project\".to_string(),\n\n };\n\n let project = flexbuffers::to_vec(project).unwrap();\n\n std::fs::write(project_index_path, project).unwrap();\n\n}\n\n\n", "file_path": "crates/apps/daw/examples/create_example_project.rs", "rank": 50, "score": 148968.82188062172 }, { "content": "pub fn initialize_midi_host(\n\n app: &mut impl StandaloneProcessor,\n\n handle: Option<&Handle>,\n\n) -> (Option<MidiHost>, Option<MidiContext>) {\n\n let midi_host = app.midi().and(handle).map(|handle| {\n\n // MIDI set-up\n\n let mut midi_host = MidiHost::default_with_handle(handle);\n\n midi_host.start_midi().expect(\"Failed to start MIDI host\");\n\n midi_host\n\n });\n\n let midi_context = midi_host.as_ref().map(|midi_host| {\n\n let midi_message_queue = midi_host.messages().clone();\n\n let midi_audio_thread_handler = MidiAudioThreadHandler::default();\n\n MidiContext {\n\n midi_audio_thread_handler,\n\n midi_message_queue,\n\n }\n\n });\n\n (midi_host, midi_context)\n\n}\n\n\n", "file_path": "crates/augmented/application/audio-processor-standalone/src/standalone_cpal/midi/general.rs", "rank": 51, "score": 148954.0454770797 }, { "content": "pub fn run_list_devices() {\n\n let hosts = cpal::available_hosts();\n\n hosts.iter().for_each(|host_id| {\n\n if print_host_devices(host_id).is_err() {\n\n log::error!(\"Error listing devices for host {}\", host_id.name());\n\n }\n\n });\n\n}\n\n\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/src/commands/list_devices.rs", "rank": 52, "score": 148954.0454770797 }, { "content": "/// Start an [`AudioProcessor`] / [`MidiEventHandler`] as a stand-alone cpal app and forward MIDI\n\n/// messages received on all inputs to it.\n\n///\n\n/// Returns the [`cpal::Stream`]s and [`MidiHost`]. The audio-thread will keep running until these are\n\n/// dropped.\n\npub fn audio_processor_start_with_midi<\n\n Processor: AudioProcessor<SampleType = f32> + MidiEventHandler + Send + 'static,\n\n>(\n\n audio_processor: Processor,\n\n handle: &Handle,\n\n) -> StandaloneHandles {\n\n let app = StandaloneProcessorImpl::new(audio_processor);\n\n standalone_start(app, Some(handle))\n\n}\n\n\n", "file_path": "crates/augmented/application/audio-processor-standalone/src/standalone_cpal/mod.rs", "rank": 53, "score": 148954.0454770797 }, { "content": "pub fn initialize_midi_host(\n\n _app: &mut impl StandaloneProcessor,\n\n _handle: Option<&Handle>,\n\n) -> (Option<MidiHost>, Option<MidiContext>) {\n\n (None, None)\n\n}\n\n\n", "file_path": "crates/augmented/application/audio-processor-standalone/src/standalone_cpal/midi/ios.rs", "rank": 54, "score": 148954.0454770797 }, { "content": "pub fn flush_midi_events(\n\n _midi_context: Option<&mut MidiContext>,\n\n _processor: &impl StandaloneProcessor,\n\n) {\n\n}\n", "file_path": "crates/augmented/application/audio-processor-standalone/src/standalone_cpal/midi/ios.rs", "rank": 55, "score": 148954.0454770797 }, { "content": "pub fn flush_midi_events(\n\n midi_context: Option<&mut MidiContext>,\n\n processor: &mut impl StandaloneProcessor,\n\n) {\n\n if let Some(MidiContext {\n\n midi_audio_thread_handler,\n\n midi_message_queue,\n\n }) = midi_context\n\n {\n\n if let Some(midi_handler) = processor.midi() {\n\n midi_audio_thread_handler.collect_midi_messages(midi_message_queue);\n\n midi_handler.process_midi_events(midi_audio_thread_handler.buffer());\n\n midi_audio_thread_handler.clear();\n\n }\n\n }\n\n}\n", "file_path": "crates/augmented/application/audio-processor-standalone/src/standalone_cpal/midi/general.rs", "rank": 56, "score": 148954.0454770797 }, { "content": "pub fn initialize() -> Result<i32> {\n\n let mut handles = STATE.lock().unwrap();\n\n *handles = Some(State::new());\n\n Ok(0)\n\n}\n\n\n", "file_path": "crates/apps/metronome/src/api.rs", "rank": 57, "score": 147720.0348400332 }, { "content": "pub fn deinitialize() -> Result<i32> {\n\n let mut handles = STATE.lock().unwrap();\n\n *handles = None;\n\n Ok(0)\n\n}\n\n\n", "file_path": "crates/apps/metronome/src/api.rs", "rank": 58, "score": 147720.0348400332 }, { "content": "pub fn read_file_contents(\n\n audio_file: &mut ProbeResult,\n\n) -> Result<SymphoniaAudioBuffer<f32>, AudioFileError> {\n\n let audio_file_stream = audio_file\n\n .format\n\n .default_track()\n\n .ok_or(AudioFileError::OpenStreamError)?;\n\n let mut decoder = symphonia::default::get_codecs()\n\n .make(&audio_file_stream.codec_params, &Default::default())?;\n\n let audio_file_stream_id = audio_file_stream.id;\n\n\n\n let mut audio_buffer: Vec<SymphoniaAudioBuffer<f32>> = Vec::new();\n\n metrics::time(\"AudioFileProcessor - Reading file packages\", || loop {\n\n match audio_file.format.next_packet().ok() {\n\n None => break,\n\n Some(packet) => {\n\n if packet.track_id() != audio_file_stream_id {\n\n break;\n\n }\n\n\n", "file_path": "crates/augmented/audio/audio-processor-file/src/audio_file_processor/file_io.rs", "rank": 59, "score": 147503.02434695832 }, { "content": "fn with_state0(f: impl FnOnce(&State) -> ()) -> Result<i32> {\n\n with_state(|state| {\n\n f(state);\n\n Ok(0)\n\n })\n\n}\n\n\n", "file_path": "crates/apps/metronome/src/api.rs", "rank": 60, "score": 146110.36544346804 }, { "content": "pub fn black() -> Color {\n\n rgb(19, 19, 19)\n\n}\n\n\n", "file_path": "crates/augmented/gui/audio-processor-iced-design-system/src/colors.rs", "rank": 61, "score": 145924.91259678864 }, { "content": "pub fn green() -> Color {\n\n rgb(73, 190, 84)\n\n}\n\n\n", "file_path": "crates/augmented/gui/audio-processor-iced-design-system/src/colors.rs", "rank": 62, "score": 145924.91259678864 }, { "content": "pub fn yellow() -> Color {\n\n rgb(240, 187, 104)\n\n}\n\n\n\npub struct Colors;\n\n\n\nimpl Colors {\n\n pub fn text() -> Color {\n\n white()\n\n }\n\n\n\n pub fn success() -> Color {\n\n green()\n\n }\n\n\n\n pub fn error() -> Color {\n\n red()\n\n }\n\n\n\n pub fn warning() -> Color {\n", "file_path": "crates/augmented/gui/audio-processor-iced-design-system/src/colors.rs", "rank": 63, "score": 145924.91259678864 }, { "content": "pub fn red() -> Color {\n\n rgb(199, 84, 80)\n\n}\n\n\n", "file_path": "crates/augmented/gui/audio-processor-iced-design-system/src/colors.rs", "rank": 64, "score": 145924.91259678864 }, { "content": "pub fn start_playback() -> Result<i32> {\n\n if let Some(audio_file_processor) =\n\n ProcessorHandleRegistry::current().get::<Shared<AudioFileProcessorHandle>>(\"audio-file\")\n\n {\n\n audio_file_processor.play();\n\n }\n\n Ok(0)\n\n}\n\n\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 65, "score": 145899.8088327994 }, { "content": "pub fn stop_playback() -> Result<i32> {\n\n if let Some(audio_file_processor) =\n\n ProcessorHandleRegistry::current().get::<Shared<AudioFileProcessorHandle>>(\"audio-file\")\n\n {\n\n audio_file_processor.stop();\n\n }\n\n Ok(0)\n\n}\n\n\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 66, "score": 145899.8088327994 }, { "content": "pub fn initialize_audio() -> Result<i32> {\n\n send_host_message(StartMessage);\n\n Ok(0)\n\n}\n\n\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 67, "score": 145899.8088327994 }, { "content": "pub fn initialize_logger() -> Result<i32> {\n\n let _ = wisual_logger::try_init_from_env();\n\n log::info!(\"Rust logger initialized\");\n\n Ok(0)\n\n}\n\n\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 68, "score": 145899.8088327994 }, { "content": "pub fn convert_audio_file_sample_rate(\n\n audio_file_contents: &SymphoniaAudioBuffer<f32>,\n\n output_rate: f32,\n\n channel_number: usize,\n\n) -> Vec<f32> {\n\n let audio_file_channel = audio_file_contents.chan(channel_number);\n\n let input_rate = audio_file_contents.spec().rate as f32;\n\n let audio_file_duration = audio_file_channel.len() as f32 / input_rate;\n\n\n\n let output_size = (audio_file_duration * output_rate).ceil() as usize;\n\n let mut channel = Vec::new();\n\n channel.resize(output_size, 0.0);\n\n let audio_file_channel = audio_file_contents.chan(channel_number);\n\n\n\n // Convert sample rate from audio file to in-memory\n\n log::info!(\n\n \"Converting sample_rate channel={} input_rate={} output_rate={}\",\n\n channel_number,\n\n input_rate,\n\n output_rate\n", "file_path": "crates/augmented/audio/audio-processor-file/src/audio_file_processor/file_io.rs", "rank": 69, "score": 144749.8139553435 }, { "content": "pub fn create_transport_runtime() -> Runtime {\n\n log::info!(\"Creating tokio event-loop\");\n\n let runtime = tokio::runtime::Builder::new_multi_thread()\n\n .thread_name(\"ws-transport-tokio\")\n\n .worker_threads(1)\n\n .enable_all()\n\n .build()\n\n .unwrap();\n\n runtime\n\n}\n\n\n\nasync fn run_websockets_accept_loop(\n\n listener: TcpListener,\n\n input_sender: Sender<Message>,\n\n current_id: AtomicCell<u32>,\n\n connections: ConnectionMap,\n\n) {\n\n log::info!(\"Waiting for ws connections\");\n\n while let Ok((stream, _)) = listener.accept().await {\n\n let peer = stream\n", "file_path": "crates/augmented/gui/webview-transport/src/websockets/tokio_websockets.rs", "rank": 70, "score": 144364.40114935924 }, { "content": "pub fn get_audio_info() -> AudioGuiInitialModel {\n\n log::info!(\"get_audio_info called\");\n\n let host_list = AudioIOService::hosts();\n\n let input_list = AudioIOService::input_devices(None).unwrap();\n\n let output_list = AudioIOService::output_devices(None).unwrap();\n\n\n\n AudioGuiInitialModel {\n\n host_ids: host_list,\n\n input_ids: input_list.into_iter().map(|device| device.name).collect(),\n\n output_ids: output_list.into_iter().map(|device| device.name).collect(),\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct AudioGuiModel {\n\n host_id: Option<String>,\n\n input_id: Option<String>,\n\n output_id: Option<String>,\n\n}\n\n\n", "file_path": "crates/spikes/AugmentedNative/src/lib.rs", "rank": 71, "score": 144364.40114935924 }, { "content": "pub fn light_gray() -> Color {\n\n rgb(60, 60, 60)\n\n}\n\n\n", "file_path": "crates/augmented/gui/audio-processor-iced-design-system/src/colors.rs", "rank": 72, "score": 144364.40114935924 }, { "content": "pub fn get_configuration_root_path() -> PathBuf {\n\n let home_path = dirs::home_dir().unwrap_or_else(|| PathBuf::from(\"\"));\n\n home_path.join(\".ruas\")\n\n}\n", "file_path": "crates/apps/tremolo-plugin/src/config/mod.rs", "rank": 73, "score": 144364.40114935924 }, { "content": "pub fn audio_graph_setup() -> Result<i32> {\n\n log::info!(\"Starting audio-graph-manager\");\n\n let actor_system_thread = ActorSystemThread::current();\n\n actor_system_thread.spawn_result(async move {\n\n let manager = AudioGraphManager::from_registry();\n\n manager.send(audio_graph::SetupGraphMessage).await.unwrap();\n\n let audio_thread = AudioThread::from_registry();\n\n audio_thread.send(AudioThreadMessage::Start).await.unwrap().unwrap();\n\n });\n\n Ok(0)\n\n}\n\n\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 74, "score": 144150.2761719084 }, { "content": "pub fn get_input_config(\n\n options: &AudioThreadOptions,\n\n input_device: &cpal::Device,\n\n) -> Result<cpal::StreamConfig, AudioThreadError> {\n\n let input_config = input_device.default_input_config()?;\n\n let sample_format = input_config.sample_format();\n\n let mut input_config: cpal::StreamConfig = input_config.into();\n\n input_config.buffer_size = options.buffer_size.clone().into();\n\n input_config.channels = options.num_channels as u16;\n\n input_config.sample_rate = options.sample_rate;\n\n\n\n if sample_format != cpal::SampleFormat::F32 {\n\n return Err(AudioThreadError::UnsupportedSampleFormat);\n\n }\n\n Ok(input_config)\n\n}\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/src/audio_io/audio_thread/cpal_option_handling.rs", "rank": 75, "score": 143442.62163827493 }, { "content": "pub fn get_output_config(\n\n options: &AudioThreadOptions,\n\n output_device: &cpal::Device,\n\n) -> Result<cpal::StreamConfig, AudioThreadError> {\n\n let output_config = output_device.default_output_config()?;\n\n let sample_format = output_config.sample_format();\n\n let mut output_config: cpal::StreamConfig = output_config.into();\n\n output_config.buffer_size = options.buffer_size.clone().into();\n\n output_config.channels = options.num_channels as u16;\n\n output_config.sample_rate = options.sample_rate;\n\n\n\n if sample_format != cpal::SampleFormat::F32 {\n\n return Err(AudioThreadError::UnsupportedSampleFormat);\n\n }\n\n Ok(output_config)\n\n}\n\n\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/src/audio_io/audio_thread/cpal_option_handling.rs", "rank": 76, "score": 143442.62163827493 }, { "content": "/// Generates a frequency response plot for a given audio processor\n\npub fn generate_frequency_response_plot<Processor>(\n\n filename: &str,\n\n plot_name: &str,\n\n audio_processor: &mut Processor,\n\n) where\n\n Processor: AudioProcessor<SampleType = f32>,\n\n{\n\n let mut settings = AudioProcessorSettings::default();\n\n settings.sample_rate = 22050.0;\n\n settings.input_channels = 1;\n\n settings.output_channels = 1;\n\n audio_processor.prepare(settings);\n\n let sample_rate = settings.sample_rate;\n\n\n\n let frequencies = get_test_frequencies();\n\n let responses = frequencies\n\n .iter()\n\n .map(|frequency| test_frequency_response(sample_rate, *frequency, audio_processor))\n\n .collect();\n\n let chart_model = build_frequency_response_chart_model(responses);\n", "file_path": "crates/augmented/testing/audio-processor-testing-helpers/src/charts.rs", "rank": 77, "score": 142860.12800841586 }, { "content": "pub fn get_configuration_root_path() -> PathBuf {\n\n let home_path = dirs::home_dir().unwrap_or_else(|| PathBuf::from(\"\"));\n\n home_path.join(\".ruas\")\n\n}\n\n\n", "file_path": "crates/augmented/ops/audio-plugin-logger/src/lib.rs", "rank": 78, "score": 142860.12800841586 }, { "content": "/// Return a handle to a global GC instance\n\npub fn handle() -> &'static Handle {\n\n GARBAGE_COLLECTOR.handle()\n\n}\n\n\n", "file_path": "crates/augmented/audio/audio-garbage-collector/src/lib.rs", "rank": 79, "score": 142467.3969596483 }, { "content": "pub fn get_cpal_input_device(\n\n host: &cpal::Host,\n\n input_device_id: &AudioDeviceId,\n\n) -> Result<cpal::Device, AudioThreadError> {\n\n let maybe_device = match &input_device_id {\n\n AudioDeviceId::Default => host.default_input_device(),\n\n AudioDeviceId::Id(id) => find_cpal_input_device_by_name(host, id),\n\n };\n\n\n\n match maybe_device {\n\n Some(device) => Ok(device),\n\n None => Err(AudioThreadError::OutputDeviceNotFoundError),\n\n }\n\n}\n\n\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/src/audio_io/audio_thread/cpal_option_handling.rs", "rank": 80, "score": 142178.6448702169 }, { "content": "pub fn get_cpal_output_device(\n\n host: &cpal::Host,\n\n output_device_id: &AudioDeviceId,\n\n) -> Result<cpal::Device, AudioThreadError> {\n\n let maybe_device = match &output_device_id {\n\n AudioDeviceId::Default => host.default_output_device(),\n\n AudioDeviceId::Id(id) => find_cpal_output_device_by_name(host, id),\n\n };\n\n\n\n match maybe_device {\n\n Some(device) => Ok(device),\n\n None => Err(AudioThreadError::OutputDeviceNotFoundError),\n\n }\n\n}\n\n\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/src/audio_io/audio_thread/cpal_option_handling.rs", "rank": 81, "score": 142178.6448702169 }, { "content": "#[cfg(not(target_os = \"macos\"))]\n\npub fn close_window(\n\n raw_window_handle: raw_window_handle::RawWindowHandle,\n\n) -> Option<iced::Rectangle> {\n\n todo!(\"Not implemented\")\n\n}\n\n\n", "file_path": "crates/apps/plugin-host/plugin-host-gui2/src/ui/main_content_view/plugin_editor_window/view/mod.rs", "rank": 82, "score": 142178.6448702169 }, { "content": "#[cfg(not(target_os = \"macos\"))]\n\npub fn open_plugin_window(\n\n editor: &mut Box<dyn Editor>,\n\n size: (i32, i32),\n\n position: Option<iced::Point>,\n\n) -> PluginWindowHandle {\n\n todo!(\"Not implemented\")\n\n}\n\n\n", "file_path": "crates/apps/plugin-host/plugin-host-gui2/src/ui/main_content_view/plugin_editor_window/view/mod.rs", "rank": 83, "score": 140955.7754518006 }, { "content": "/// Return a reference to a global GC instance\n\npub fn current() -> &'static GarbageCollector {\n\n &GARBAGE_COLLECTOR\n\n}\n\n\n", "file_path": "crates/augmented/audio/audio-garbage-collector/src/lib.rs", "rank": 84, "score": 140847.4333671265 }, { "content": "fn make_ui() -> impl Widget<AudioData> {\n\n AudioWave {}.expand().padding(10.0).center()\n\n}\n", "file_path": "crates/spikes/example-druid-audio-viz/src/main.rs", "rank": 85, "score": 140749.62727471287 }, { "content": "pub fn open_plugin_window(\n\n editor: &mut Box<dyn Editor>,\n\n size: (i32, i32),\n\n position: Option<Point>,\n\n) -> PluginWindowHandle {\n\n let _pool = unsafe { NSAutoreleasePool::new(nil) };\n\n let (width, height) = size;\n\n let rect = NSRect::new(\n\n NSPoint::new(0.0, 0.0),\n\n NSSize::new(width as f64, height as f64),\n\n );\n\n let ns_window = unsafe {\n\n let ns_window = NSWindow::alloc(nil).initWithContentRect_styleMask_backing_defer_(\n\n rect,\n\n NSWindowStyleMask::NSTitledWindowMask,\n\n NSBackingStoreBuffered,\n\n NO,\n\n );\n\n // .autorelease();\n\n ns_window.center();\n", "file_path": "crates/apps/plugin-host/plugin-host-gui2/src/ui/main_content_view/plugin_editor_window/view/macos/mod.rs", "rank": 86, "score": 139772.04011621408 }, { "content": "pub fn set_audio_info(model: AudioGuiModel) {\n\n log::info!(\"set_audio_info called with {:?}\", model);\n\n}\n\n\n\nuniffi_macros::include_scaffolding!(\"augmented\");\n", "file_path": "crates/spikes/AugmentedNative/src/lib.rs", "rank": 87, "score": 139286.92191969708 }, { "content": "///! Try to set-up the logger and return a result\n\npub fn try_init_from_env() -> Result<(), SetLoggerError> {\n\n env_logger::Builder::from_env(env_logger::Env::default().default_filter_or(\"info\"))\n\n .format(LogFormatter::format)\n\n .try_init()\n\n}\n\n\n", "file_path": "crates/augmented/ops/wisual-logger/src/lib.rs", "rank": 88, "score": 137782.64877875373 }, { "content": "pub fn saw_generator(phase: f32) -> f32 {\n\n (1.0 - (phase % 1.0)) * 2.0 - 1.0\n\n}\n", "file_path": "crates/augmented/audio/oscillator/src/generators/mod.rs", "rank": 89, "score": 136551.66229782221 }, { "content": "pub fn square_generator(phase: f32) -> f32 {\n\n static LIMIT: f32 = 0.5;\n\n\n\n if (phase % 1.0) < LIMIT {\n\n -1.0\n\n } else {\n\n 1.0\n\n }\n\n}\n\n\n", "file_path": "crates/augmented/audio/oscillator/src/generators/mod.rs", "rank": 90, "score": 136551.66229782221 }, { "content": "pub fn sine_generator(phase: f32) -> f32 {\n\n (phase * TWO_PI).sin()\n\n}\n\n\n", "file_path": "crates/augmented/audio/oscillator/src/generators/mod.rs", "rank": 91, "score": 136551.66229782221 }, { "content": "pub fn audio_graph_get_system_indexes() -> Result<Vec<u32>> {\n\n ActorSystemThread::current().spawn_result(async move {\n\n let manager = AudioGraphManager::from_registry();\n\n let (input_index, output_index) = manager\n\n .send(audio_graph::GetSystemIndexesMessage)\n\n .await\n\n .unwrap()\n\n .unwrap();\n\n Ok(vec![\n\n input_index.index() as u32,\n\n output_index.index() as u32,\n\n ])\n\n })\n\n}\n\n\n", "file_path": "crates/apps/daw/src/api.rs", "rank": 92, "score": 134991.15085039282 }, { "content": "fn move_frontend_to_output(frontend_path: &str, output_path: &Path) {\n\n let frontend_build_path = Path::new(frontend_path).join(\"build\");\n\n log::info!(\n\n \"Copying front-end into bundle path frontend_build_path={} output_path={}\",\n\n frontend_build_path.to_str().unwrap(),\n\n output_path.to_str().unwrap()\n\n );\n\n let resources_directory = output_path.join(\"Contents/Resources\");\n\n create_dir_all(resources_directory.clone()).expect(\"Failed to create resources dir\");\n\n run_cmd!(cp -r ${frontend_build_path} ${resources_directory}/frontend)\n\n .expect(\"Failed to copy front-end assets\");\n\n}\n\n\n", "file_path": "crates/augmented/development/bundler/src/main.rs", "rank": 93, "score": 134726.60550521087 }, { "content": "pub fn set_tempo(value: f32) -> Result<i32> {\n\n with_state0(|state| {\n\n state.processor_handle.tempo.set(value);\n\n })\n\n}\n\n\n", "file_path": "crates/apps/metronome/src/api.rs", "rank": 94, "score": 134489.94308333733 }, { "content": "pub fn set_volume(value: f32) -> Result<i32> {\n\n with_state0(|state| {\n\n state.processor_handle.volume.set(value);\n\n })\n\n}\n\n\n", "file_path": "crates/apps/metronome/src/api.rs", "rank": 95, "score": 134489.94308333733 }, { "content": "fn sdk_path() -> Result<String, std::io::Error> {\n\n use std::process::Command;\n\n\n\n let output = Command::new(\"xcrun\")\n\n .args(&[\"--show-sdk-path\"])\n\n .output()?\n\n .stdout;\n\n let prefix_str = std::str::from_utf8(&output).expect(\"invalid output from `xcrun`\");\n\n Ok(prefix_str.trim_end().to_string())\n\n}\n\n\n", "file_path": "crates/augmented/audio/avfaudio-sys/build.rs", "rank": 96, "score": 134067.69253265747 }, { "content": "fn with_state<T>(f: impl FnOnce(&State) -> Result<T>) -> Result<T> {\n\n let handles = STATE.lock().unwrap();\n\n if let Some(state) = &*handles {\n\n f(state)\n\n } else {\n\n Err(anyhow::Error::msg(\n\n \"Failed to lock state. `initialize` needs to be called.\",\n\n ))\n\n }\n\n}\n\n\n", "file_path": "crates/apps/metronome/src/api.rs", "rank": 97, "score": 134065.3159676859 }, { "content": "/// Entry-point for the run plug-in command. Mostly kicks-off other work:\n\n///\n\n/// * Parses options\n\n/// * Creates the host, audio and other threads\n\n/// * Loads the audio-file (blocking before starting the plug-in)\n\n/// * Loads the audio-plugin\n\n/// * Creates a window for the plug-in & blocks on it (if specified)\n\n/// * Otherwise parks the current thread forever\n\npub fn run_test(run_options: RunOptions) {\n\n if run_options.output_audio().is_some() {\n\n run_offline_rendering(run_options);\n\n return;\n\n }\n\n\n\n let actor_system_thread = ActorSystemThread::current();\n\n\n\n let (audio_settings, audio_thread_options) = get_audio_options(&run_options);\n\n let mut host = actor_system_thread.spawn_result(async move {\n\n TestPluginHost::new(audio_settings, audio_thread_options, false)\n\n });\n\n host.set_mono_input(run_options.use_mono_input());\n\n run_load_audio_file(&run_options, &mut host);\n\n run_initialize_plugin(&run_options, &mut host);\n\n\n\n let instance = host.plugin_instance();\n\n let host = actor_system_thread.spawn_result(async move { host.start() });\n\n // This needs to be kept around otherwise the watcher will stop when dropped\n\n let _maybe_watcher = run_initialize_file_watch_thread(&run_options, host.clone());\n", "file_path": "crates/apps/plugin-host/plugin-host-lib/src/commands/main/mod.rs", "rank": 98, "score": 133578.4172570175 }, { "content": "pub fn setup_low_pass<Sample: Float + FloatConst>(\n\n coefficients: &mut BiquadCoefficients<Sample>,\n\n sample_rate: Sample,\n\n cutoff_frequency: Sample,\n\n q: Sample,\n\n) {\n\n let one = Sample::from(1.0).unwrap();\n\n let two = Sample::from(2.0).unwrap();\n\n\n\n let w0: Sample = two * Sample::PI() * cutoff_frequency / sample_rate;\n\n let cs: Sample = w0.cos();\n\n let sn: Sample = w0.sin();\n\n let al: Sample = sn / (two * q);\n\n let b0: Sample = (one - cs) / two;\n\n let b1: Sample = one - cs;\n\n let b2: Sample = (one - cs) / two;\n\n let a0: Sample = one + al;\n\n let a1: Sample = -two * cs;\n\n let a2: Sample = one - al;\n\n coefficients.set_coefficients(a0, a1, a2, b0, b1, b2);\n\n}\n\n\n", "file_path": "crates/augmented/dsp/dsp-filters/src/rbj.rs", "rank": 99, "score": 133486.87770944947 } ]
Rust
kaylee/src/instructions/math.rs
electricjones/kaylee
6cdc7e67ae8a3d9a989d8d18def496c9ceecab40
use std::fmt::Error; use kaylee_derive::Instruction; use crate::instructions; use crate::instructions::{display_instruction_with_values, Executable, Instruction, InstructionDocumentation, InstructionSignature, OperandType, OperandValues}; use crate::vm::{ExecutionResult, Kaylee, RegisterValue}; #[derive(Instruction)] #[opcode = 70] #[signature = "ADD $D $L $R"] pub struct Add { operand_values: OperandValues, } impl Executable for Add { fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> { let callback = |left: RegisterValue, right: RegisterValue| { (left + right) as RegisterValue }; let result = instructions::basic_register_execution(self, vm, callback); Ok(ExecutionResult::Value(result)) } } #[derive(Instruction)] #[opcode = 71] #[signature = "SUB $D $L $R"] pub struct Subtract { operand_values: OperandValues, } impl Executable for Subtract { fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> { let callback = |left: RegisterValue, right: RegisterValue| { (left - right) as RegisterValue }; let result = instructions::basic_register_execution(self, vm, callback); Ok(ExecutionResult::Value(result)) } } #[derive(Instruction)] #[opcode = 72] #[signature = "MUL $D $L $R"] pub struct Multiply { operand_values: OperandValues, } impl Executable for Multiply { fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> { let callback = |left: RegisterValue, right: RegisterValue| { (left * right) as RegisterValue }; let result = instructions::basic_register_execution(self, vm, callback); Ok(ExecutionResult::Value(result)) } } #[derive(Instruction)] #[opcode = 73] #[signature = "DIV $D $L $R"] pub struct Divide { operand_values: OperandValues, } impl Executable for Divide { fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> { let destination = self.operand_values[0].as_register_id(); let left = self.get_register_value_for_operand(1, vm).unwrap(); let right = self.get_register_value_for_operand(2, vm).unwrap(); let value = left / right; let remainder = (left % right) as u32; vm.set_register(destination, value).unwrap(); vm.set_remainder(remainder); Ok(ExecutionResult::Value(value)) } } #[cfg(test)] mod tests { use crate::instructions::math::{Add, Divide, Multiply, Subtract}; use crate::program::Program; use crate::vm::Kaylee; #[test] fn test_add() { let program = Program::from(vec![ Add::OPCODE, 29, 0, 2, Add::OPCODE, 30, 1, 3, Add::OPCODE, 31, 29, 30, ]); let mut vm = Kaylee::new(); vm.set_register(0, 12).unwrap(); vm.set_register(1, 10).unwrap(); vm.set_register(2, 500).unwrap(); vm.set_register(3, 7).unwrap(); vm.run(program); assert_eq!(512, vm.register(29).unwrap()); assert_eq!(17, vm.register(30).unwrap()); assert_eq!(529, vm.register(31).unwrap()); } #[test] fn test_subtract() { let program = Program::from(vec![ Subtract::OPCODE, 29, 0, 2, Subtract::OPCODE, 30, 1, 3, Subtract::OPCODE, 31, 29, 30, ]); let mut vm = Kaylee::new(); vm.set_register(0, 222).unwrap(); vm.set_register(1, 14).unwrap(); vm.set_register(2, 22).unwrap(); vm.set_register(3, 3).unwrap(); vm.run(program); assert_eq!(200, vm.register(29).unwrap()); assert_eq!(11, vm.register(30).unwrap()); assert_eq!(189, vm.register(31).unwrap()); } #[test] fn test_multiply() { let program = Program::from(vec![ Multiply::OPCODE, 29, 0, 2, Multiply::OPCODE, 30, 1, 3, Multiply::OPCODE, 31, 29, 30, ]); let mut vm = Kaylee::new(); vm.set_register(0, 2).unwrap(); vm.set_register(1, 4).unwrap(); vm.set_register(2, 6).unwrap(); vm.set_register(3, 8).unwrap(); vm.run(program); assert_eq!(12, vm.register(29).unwrap()); assert_eq!(32, vm.register(30).unwrap()); assert_eq!(384, vm.register(31).unwrap()); } #[test] fn test_divide_no_remainder() { let program = Program::from(vec![ Divide::OPCODE, 31, 0, 1, ]); let mut vm = Kaylee::new(); vm.set_register(0, 16).unwrap(); vm.set_register(1, 2).unwrap(); vm.run(program); assert_eq!(8, vm.register(31).unwrap()); assert_eq!(0, vm.remainder()); } #[test] fn test_divide_with_remainder() { let program = Program::from(vec![ Divide::OPCODE, 31, 0, 1, ]); let mut vm = Kaylee::new(); vm.set_register(0, 13).unwrap(); vm.set_register(1, 5).unwrap(); vm.run(program); assert_eq!(2, vm.register(31).unwrap()); assert_eq!(3, vm.remainder()); } #[test] fn test_math() { let program = Program::from(vec![ Add::OPCODE, 29, 0, 2, Add::OPCODE, 30, 29, 2, Subtract::OPCODE, 30, 29, 1, Add::OPCODE, 28, 3, 4, Multiply::OPCODE, 31, 3, 2, Divide::OPCODE, 3, 29, 30, Subtract::OPCODE, 4, 2, 30, Add::OPCODE, 0, 3, 28, Multiply::OPCODE, 1, 3, 4, Divide::OPCODE, 31, 28, 30, ]); let mut vm = Kaylee::new(); vm.set_register(0, 2).unwrap(); vm.set_register(1, 4).unwrap(); vm.set_register(2, 6).unwrap(); vm.set_register(3, 8).unwrap(); vm.set_register(4, 9).unwrap(); vm.run(program); assert_eq!(19, vm.register(0).unwrap()); assert_eq!(4, vm.register(1).unwrap()); assert_eq!(6, vm.register(2).unwrap()); assert_eq!(2, vm.register(3).unwrap()); assert_eq!(2, vm.register(4).unwrap()); assert_eq!(17, vm.register(28).unwrap()); assert_eq!(8, vm.register(29).unwrap()); assert_eq!(4, vm.register(30).unwrap()); assert_eq!(4, vm.register(31).unwrap()); assert_eq!(1, vm.remainder()); } }
use std::fmt::Error; use kaylee_derive::Instruction; use crate::instructions; use crate::instructions::{display_instruction_with_values, Executable, Instruction, InstructionDocumentation, InstructionSignature, OperandType, OperandValues}; use crate::vm::{ExecutionResult, Kaylee, RegisterValue}; #[derive(Instruction)] #[opcode = 70] #[signature = "ADD $D $L $R"] pub struct Add { operand_values: OperandValues, } impl Executable for Add { fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> { let callback = |left: RegisterValue, right: RegisterValue| { (left + right) as RegisterValue }; let result = instructions::basic_register_execution(self, vm, callback); Ok(ExecutionResult::Value(result)) } } #[derive(Instruction)] #[opcode = 71] #[signature = "SUB $D $L $R"] pub struct Subtract { operand_values: OperandValues, } impl Executable for Subtract { fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> { let callback = |left: RegisterValue, right: RegisterValue| { (left - right) as RegisterValue }; let result = instructions::basic_register_execution(self, vm, callback); Ok(ExecutionResult::Value(result)) } } #[derive(Instruction)] #[opcode = 72] #[signature = "MUL $D $L $R"] pub struct Multiply { operand_values: OperandValues, } impl Executable for Multiply { fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> { let callback = |left: RegisterValue, right: RegisterValue| { (left * right) as RegisterValue }; let result = instructions::basic_register_execution(self, vm, callback); Ok(ExecutionResult::Value(result)) } } #[derive(Instruction)] #[opcode = 73] #[signature = "DIV $D $L $R"] pub struct Divide { operand_values: OperandValues, } impl Executable for Divide { fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> { let destination = self.operand_values[0].as_register_id(); let left = self.get_register_value_for_operand(1, vm).unwrap(); let right = self.get_register_value_for_operand(2, vm).unwrap(); let value = left / right; let remainder = (left % right) as u32; vm.set_register(destination, value).unwrap(); vm.set_remainder(remainder); Ok(ExecutionResult::Value(value)) } } #[cfg(test)] mod tests { use crate::instructions::math::{Add, Divide, Multiply, Subtract}; use crate::program::Program; use crate::vm::Kaylee; #[test] fn test_add() { let program = Program::from(vec![ Add::OPCODE, 29, 0, 2, Add::OPCODE, 30, 1, 3, Add::OPCODE, 31, 29, 30, ]); let mut vm = Kaylee::new(); vm.set_register(0, 12).unwrap(); vm.set_register(1, 10).unwrap(); vm.set_register(2, 500).unwrap(); vm.set_register(3, 7).unwrap(); vm.run(program); assert_eq!(512, vm.register(29).unwrap()); assert_eq!(17, vm.register(30).unwrap()); assert_eq!(529, vm.register(31).unwrap()); } #[test] fn test_subtract() { let program = Program::from(vec![ Subtract::OPCODE, 29, 0, 2, Subtract::OPCODE, 30, 1, 3, Subtract::OPCODE, 31, 29, 30, ]); let mut vm = Kaylee::new(); vm.set_register(0, 222).unwrap(); vm.set_register(1, 14).unwrap(); vm.set_register(2, 22).unwrap(); vm.set_register(3, 3).unwrap(); vm.run(program); assert_eq!(200, vm.register(29).unwrap()); assert_eq!(11, vm.register(30).unwrap()); assert_eq!(189, vm.register(31).unwrap()); } #[test] fn test_multiply() { let program = Program::from(vec![ Multiply::OPCODE, 29, 0, 2, Multiply::OPCODE, 30, 1, 3, Multiply::OPCODE, 31, 29, 30, ]); let mut vm = Kaylee::new(); vm.set_register(0, 2).unwrap(); vm.set_register(1, 4).unwrap(); vm.set_register(2, 6).unwrap(); vm.set_register(3, 8).unwrap(); vm.run(program); assert_eq!(12, vm.register(29).unwrap()); assert_eq!(32, vm.register(30).unwrap()); assert_eq!(384, vm.register(31).unwrap()); } #[test] fn test_divide_no_remainder() { let program = Program::from(vec![ Divide::OPCODE, 31, 0, 1, ]); let mut vm = Kaylee::new(); vm.set_register(0, 16).unwrap(); vm.set_register(1, 2).unwrap(); vm.run(program); assert_eq!(8, vm.register(31).unwrap()); assert_eq!(0, vm.remainder()); } #[test] fn test_divide_with_remainder() { let program = Program::from(vec![ Divide::OPCODE, 31, 0, 1, ]); let mut vm = Kaylee::new(); vm.set_register(0, 13).unwrap(); vm.set_register(1, 5).unwrap(); vm.run(program); assert_eq!(2, vm.register(31).unwrap()); assert_eq!(3, vm.remainder()); } #[test] fn test_math() { let program = Program::from(ve
}
c![ Add::OPCODE, 29, 0, 2, Add::OPCODE, 30, 29, 2, Subtract::OPCODE, 30, 29, 1, Add::OPCODE, 28, 3, 4, Multiply::OPCODE, 31, 3, 2, Divide::OPCODE, 3, 29, 30, Subtract::OPCODE, 4, 2, 30, Add::OPCODE, 0, 3, 28, Multiply::OPCODE, 1, 3, 4, Divide::OPCODE, 31, 28, 30, ]); let mut vm = Kaylee::new(); vm.set_register(0, 2).unwrap(); vm.set_register(1, 4).unwrap(); vm.set_register(2, 6).unwrap(); vm.set_register(3, 8).unwrap(); vm.set_register(4, 9).unwrap(); vm.run(program); assert_eq!(19, vm.register(0).unwrap()); assert_eq!(4, vm.register(1).unwrap()); assert_eq!(6, vm.register(2).unwrap()); assert_eq!(2, vm.register(3).unwrap()); assert_eq!(2, vm.register(4).unwrap()); assert_eq!(17, vm.register(28).unwrap()); assert_eq!(8, vm.register(29).unwrap()); assert_eq!(4, vm.register(30).unwrap()); assert_eq!(4, vm.register(31).unwrap()); assert_eq!(1, vm.remainder()); }
function_block-function_prefixed
[ { "content": "/// Decodes the operand values from the Instruction Stream\n\npub fn consume_and_parse_values(signature: InstructionSignature, instructions: &Program, program_counter: &mut usize) -> Result<OperandValues, InstructionDecodeError> {\n\n let mut operand_values: OperandValues = [OperandValue::None, OperandValue::None, OperandValue::None];\n\n\n\n let original_pc = *program_counter;\n\n for (index, bytes) in signature.operands.iter().enumerate() {\n\n match bytes {\n\n OperandType::None => {\n\n operand_values[index] = OperandValue::None;\n\n }\n\n OperandType::RegisterId | OperandType::ConstantByte => {\n\n operand_values[index] = OperandValue::Byte(instructions[*program_counter]);\n\n *program_counter += 1;\n\n }\n\n OperandType::ConstantHalfWord => {\n\n operand_values[index] = OperandValue::HalfWord(((instructions[*program_counter] as HalfWord) << 8) | instructions[*program_counter + 1] as u16);\n\n *program_counter += 2;\n\n }\n\n OperandType::ConstantWord => {\n\n // @todo: This should really be u24\n\n let a = (instructions[*program_counter] as Word) << 16;\n", "file_path": "kaylee/src/instructions.rs", "rank": 0, "score": 229354.2923247929 }, { "content": "/// Helper for a common instruction execution. Executes callback with the values from two operands, setting a destination register\n\nfn basic_register_execution<I: Instruction, F: Fn(RegisterValue, RegisterValue) -> RegisterValue>(instruction: &I, vm: &mut Kaylee, callback: F) -> RegisterValue {\n\n let destination = instruction.operand_values()[0].as_register_id();\n\n\n\n let left = instruction.get_register_value_for_operand(1, vm).unwrap();\n\n let right = instruction.get_register_value_for_operand(2, vm).unwrap();\n\n\n\n let result = callback(left, right);\n\n\n\n vm.set_register(destination, result as RegisterValue).unwrap();\n\n result\n\n}\n\n\n\n/// Potential types of Operands\n\npub enum OperandType {\n\n None,\n\n RegisterId,\n\n ConstantByte,\n\n ConstantHalfWord,\n\n ConstantWord,\n\n}\n\n\n", "file_path": "kaylee/src/instructions.rs", "rank": 1, "score": 181678.84145716747 }, { "content": "/// Decode the next instruction in the Program stream\n\npub fn decode_next_instruction(instructions: &Program, program_counter: &mut usize) -> Option<Result<Box<dyn Instruction>, InstructionDecodeError>> {\n\n // @todo: I am not super happy with this decoding scheme. It should probably grab the entire slice (4 bytes) and handle them together\n\n if *program_counter >= instructions.len() {\n\n return None;\n\n }\n\n\n\n let opcode: Byte = instructions[*program_counter];\n\n *program_counter += 1;\n\n\n\n Some(match opcode {\n\n Halt::OPCODE => build::<Halt>(instructions, program_counter),\n\n Load::OPCODE => build::<Load>(instructions, program_counter),\n\n\n\n Add::OPCODE => build::<Add>(instructions, program_counter),\n\n Subtract::OPCODE => build::<Subtract>(instructions, program_counter),\n\n Multiply::OPCODE => build::<Multiply>(instructions, program_counter),\n\n Divide::OPCODE => build::<Divide>(instructions, program_counter),\n\n\n\n Jump::OPCODE => build::<Jump>(instructions, program_counter),\n\n JumpForward::OPCODE => build::<JumpForward>(instructions, program_counter),\n", "file_path": "kaylee/src/instructions.rs", "rank": 2, "score": 181649.60724646287 }, { "content": "/// Build the Instruction TraitObject from the program stream\n\npub fn build<T: 'static + Instruction>(instructions: &Program, program_counter: &mut usize) -> Result<Box<dyn Instruction>, InstructionDecodeError> {\n\n Ok(\n\n Box::new(\n\n T::new(\n\n consume_and_parse_values(\n\n T::signature(),\n\n instructions,\n\n program_counter,\n\n )?\n\n )\n\n )\n\n )\n\n}\n\n\n", "file_path": "kaylee/src/instructions.rs", "rank": 3, "score": 178520.44871765937 }, { "content": "/// Prints an instruction in an Instruction Stream in a human readable format\n\npub fn display_instruction_with_values<T: 'static + Instruction>(instruction: &T) -> String {\n\n let mut output = String::new();\n\n output.push_str(T::signature().identifier.as_str());\n\n\n\n for (index, operand_type) in T::signature().operands.iter().enumerate() {\n\n match operand_type {\n\n OperandType::None => {}\n\n OperandType::RegisterId => {\n\n let value = instruction.operand_value(index).unwrap().as_constant_value();\n\n output.push_str(format!(\" ${value}\").as_str())\n\n }\n\n _ => {\n\n let value = instruction.operand_value(index).unwrap().as_constant_value();\n\n output.push_str(format!(\" #{value}\").as_str())\n\n }\n\n }\n\n }\n\n\n\n output\n\n}\n\n\n", "file_path": "kaylee/src/instructions.rs", "rank": 4, "score": 110821.61275452662 }, { "content": "pub fn parse_hex(hex: &str) -> Result<Vec<u8>, ParseIntError> {\n\n let split = hex.split(\" \").collect::<Vec<&str>>();\n\n let mut results: Vec<u8> = vec![];\n\n for hex_string in split {\n\n let byte = u8::from_str_radix(&hex_string, 16);\n\n match byte {\n\n Ok(result) => { results.push(result) }\n\n Err(e) => { return Err(e) }\n\n }\n\n }\n\n\n\n Ok(results)\n\n}", "file_path": "kaylee/src/shared.rs", "rank": 5, "score": 110369.56948931821 }, { "content": "/// Parse any source string into a Parsed vector of strings\n\n/// Does not actually parse to token enumerations. It simply splits a source into substrings.\n\n/// The assembler takes these split strings and assembles them into true bytecode\n\npub fn parse_asm(s: &str) -> IResult<&str, Parsed, (&str, ErrorKind)> {\n\n // separated_list0(many0(newline), line)(s)\n\n separated_list0(newline, line)(s)\n\n}\n\n\n", "file_path": "kaylee/src/asm/parser.rs", "rank": 6, "score": 98020.86935465212 }, { "content": "/// Parse a single line into a vector of tokens\n\npub fn line(s: &str) -> IResult<&str, Vec<&str>, (&str, ErrorKind)> {\n\n delimited(multispace0, instruction_parser, space0)(s)\n\n}\n\n\n", "file_path": "kaylee/src/asm/parser.rs", "rank": 7, "score": 97269.14614025429 }, { "content": "/// Defines the Instruction itself\n\n/// This is built automatically with the derive(Instruction) macro\n\npub trait Instruction: Executable {\n\n // Also requires a `pub const OPCODE: u8`\n\n\n\n /// Create a new instruction with Concrete Values\n\n fn new(operand_values: OperandValues) -> Self where Self: Sized;\n\n\n\n /// Return the Instruction Signature\n\n fn signature() -> InstructionSignature where Self: Sized;\n\n\n\n /// Return the Instruction Documentation\n\n fn documentation() -> InstructionDocumentation where Self: Sized;\n\n\n\n /// Return a human-readable form of the instruction\n\n fn display(&self) -> String;\n\n\n\n /// Return the concrete OperandValues\n\n fn operand_values(&self) -> &OperandValues;\n\n\n\n /// Return a specific, concrete OperandValue\n\n fn operand_value(&self, index: usize) -> Result<&OperandValue, String> {\n", "file_path": "kaylee/src/instructions.rs", "rank": 8, "score": 94553.75575940356 }, { "content": "/// Allows an Instruction to be executable\n\npub trait Executable {\n\n // @todo: The only thing (other than the OPCODE constant) that is actually required w/o macro\n\n fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error>;\n\n}\n\n\n", "file_path": "kaylee/src/instructions.rs", "rank": 9, "score": 91856.91661469336 }, { "content": "/// Parse a single instruction into an operation and operands\n\nfn instruction_parser(s: &str) -> IResult<&str, Vec<&str>, (&str, ErrorKind)> {\n\n separated_list1(space1, alt((operation_keyword, operand_parser)))(s)\n\n}\n\n\n", "file_path": "kaylee/src/asm/parser.rs", "rank": 10, "score": 85907.16643691741 }, { "content": "#[proc_macro_derive(Instruction, attributes(opcode, signature))]\n\npub fn derive_instruction(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n let struct_name = input.ident;\n\n\n\n let mut opcode: u8 = 0;\n\n let mut help = String::new();\n\n let mut identifier = String::new();\n\n let mut operands = [\n\n quote! { OperandType::None },\n\n quote! { OperandType::None },\n\n quote! { OperandType::None },\n\n ];\n\n\n\n let attributes = input.attrs;\n\n for attribute in attributes {\n\n let meta = attribute.parse_meta().unwrap();\n\n if let Meta::NameValue(value) = meta {\n\n if let Some(ident) = value.path.get_ident() {\n\n match ident.to_string().as_str() {\n\n \"opcode\" => {\n", "file_path": "kaylee_derive/src/lib.rs", "rank": 11, "score": 81470.42044090791 }, { "content": "#[proc_macro_attribute]\n\npub fn values(_args: TokenStream, input: TokenStream) -> TokenStream {\n\n // We need to add the operand_values field.\n\n // @todo: This is probably not the best place for this, but it has to be in an attribute, not the derive()\n\n let mut ast = parse_macro_input!(input as DeriveInput);\n\n match &mut ast.data {\n\n syn::Data::Struct(ref mut struct_data) => {\n\n match &mut struct_data.fields {\n\n syn::Fields::Named(fields) => {\n\n fields\n\n .named\n\n .push(syn::Field::parse_named.parse2(quote! {\n\n operand_values: OperandValues\n\n }).unwrap());\n\n }\n\n _ => {\n\n ()\n\n }\n\n }\n\n\n\n return quote! {\n\n #ast\n\n }.into();\n\n }\n\n _ => panic!(\"`add_field` has to be used with structs \"),\n\n }\n\n}\n", "file_path": "kaylee_derive/src/lib.rs", "rank": 12, "score": 79289.98299889735 }, { "content": "type OperandValues = [OperandValue; 3];\n\n\n\n/// Value for an operand in the Instruction Stream\n\n#[derive(PartialOrd, PartialEq, Debug)]\n\npub enum OperandValue {\n\n Byte(Byte),\n\n HalfWord(HalfWord),\n\n Word(Word),\n\n None,\n\n}\n\n\n\n/// Decode a single operand from a byte in the Instruction Stream\n\nimpl TryFrom<Byte> for OperandValue {\n\n type Error = ();\n\n\n\n fn try_from(value: Byte) -> Result<Self, Self::Error> {\n\n Ok(OperandValue::Byte(value))\n\n }\n\n}\n\n\n", "file_path": "kaylee/src/instructions.rs", "rank": 13, "score": 71687.67371507669 }, { "content": "/// Parse an operand\n\nfn operand_parser(s: &str) -> IResult<&str, &str, (&str, ErrorKind)> {\n\n preceded(alt((tag(\"$\"), tag(\"#\"))), digit1)(s)\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use nom::Err::Error;\n\n use nom::error::ErrorKind;\n\n\n\n use crate::asm::parser::{instruction_parser, is_valid_keyword_character, operand_parser, operation_keyword, parse_asm};\n\n\n\n #[test]\n\n pub fn test_is_valid_keyword_character() {\n\n assert!(is_valid_keyword_character('A'));\n\n assert!(is_valid_keyword_character('a'));\n\n assert!(is_valid_keyword_character('.'));\n\n assert!(is_valid_keyword_character('_'));\n\n\n\n assert_eq!(false, is_valid_keyword_character('&'));\n", "file_path": "kaylee/src/asm/parser.rs", "rank": 14, "score": 70697.77945065006 }, { "content": "/// Parse a single keyword into a keyword token\n\nfn operation_keyword(s: &str) -> IResult<&str, &str, (&str, ErrorKind)> {\n\n preceded(space0, take_while1(is_valid_keyword_character))(s)\n\n}\n\n\n", "file_path": "kaylee/src/asm/parser.rs", "rank": 15, "score": 70697.77945065006 }, { "content": "///\n\n/// Errors/ Panics\n\n/// - `AssemblerError`: If any RegisterIds are out of bounds\n\n/// - `RuntimeError`: If the target ProgramIndex is out of bounds\n\n///\n\n/// Examples\n\n/// ```asm\n\n/// JUMPE $0 $1 $2 // `34 00 01 02` - Jumps to the value of R1 if R2 and R3 are equal\n\n/// ```\n\n#[derive(Instruction)]\n\n#[opcode = 53]\n\n#[signature = \"JUMPE $D $L $R\"]\n\npub struct JumpEqual {\n\n operand_values: OperandValues,\n\n}\n\n\n\nimpl Executable for JumpEqual {\n\n fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> {\n\n let destination = self.get_register_value_for_operand(0, vm).unwrap();\n\n let left = self.get_register_value_for_operand(1, vm).unwrap();\n", "file_path": "kaylee/src/instructions/program.rs", "rank": 16, "score": 51845.036199628164 }, { "content": "/// ```\n\n#[derive(Instruction)]\n\n#[opcode = 50]\n\n#[signature = \"JUMP #3\"]\n\npub struct Jump {\n\n operand_values: OperandValues,\n\n}\n\n\n\nimpl Executable for Jump {\n\n fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> {\n\n let destination = self.operand_values[0].as_program_index();\n\n\n\n vm.set_program_counter(destination);\n\n Ok(ExecutionResult::Jumped(destination))\n\n }\n\n}\n\n\n\n/// JumpForward: Moves the program forward a certain number of instructions\n\n/// Operands:\n\n/// - 0: `#NUM_OF_INSTRUCTIONS` | 3 Bytes | ConstantValue | Number of instructions to move forward\n", "file_path": "kaylee/src/instructions/program.rs", "rank": 17, "score": 51843.489753748625 }, { "content": " let right = self.get_register_value_for_operand(2, vm).unwrap();\n\n\n\n if left == right {\n\n vm.set_program_counter(destination as RegisterId);\n\n return Ok(ExecutionResult::Jumped(vm.program_counter()));\n\n }\n\n\n\n Ok(ExecutionResult::NoAction)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::instructions::data::Load;\n\n use crate::instructions::machine::Halt;\n\n use crate::instructions::program::{Jump, JumpBackward, JumpEqual, JumpForward};\n\n use crate::program::Program;\n\n use crate::vm::Kaylee;\n\n\n\n #[test]\n", "file_path": "kaylee/src/instructions/program.rs", "rank": 18, "score": 51841.889200507445 }, { "content": "#[signature = \"JUMPB #3\"]\n\npub struct JumpBackward {\n\n operand_values: OperandValues,\n\n}\n\n\n\nimpl Executable for JumpBackward {\n\n fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> {\n\n let backward = self.operand_values[0].as_constant_value();\n\n let steps = ((backward + 1) * 4) as usize;\n\n\n\n vm.set_program_counter(vm.program_counter() - steps);\n\n Ok(ExecutionResult::Jumped(vm.program_counter()))\n\n }\n\n}\n\n\n\n/// JumpEqual: Moves the program counter to the value of a register if the value of two registers is equal\n\n/// Operands:\n\n/// - 0: `$D` | 1 Byte | RegisterId | RegisterId that holds the target ProgramIndex\n\n/// - 1: `$L` | 1 Byte | RegisterId | RegisterId of the left term\n\n/// - 2: `$R` | 1 Byte | RegisterId | RegisterId of the right term\n", "file_path": "kaylee/src/instructions/program.rs", "rank": 19, "score": 51839.98084855011 }, { "content": "///\n\n/// Errors/ Panics\n\n/// - `AssemblerError`: If the ConstantValue is a value larger than 3 bytes\n\n/// - `RuntimeError`: If the target ProgramIndex is out of bounds\n\n///\n\n/// Examples\n\n/// ```asm\n\n/// JUMPF #4 // `33 00 01 FF` - Jumps forward 4 instructions (16 bytes)\n\n/// ```\n\n#[derive(Instruction)]\n\n#[opcode = 51]\n\n#[signature = \"JUMPF #3\"]\n\npub struct JumpForward {\n\n operand_values: OperandValues,\n\n}\n\n\n\nimpl Executable for JumpForward {\n\n fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> {\n\n let forward = self.operand_values[0].as_constant_value();\n\n let steps = (forward * 4) as usize;\n", "file_path": "kaylee/src/instructions/program.rs", "rank": 20, "score": 51838.9461525095 }, { "content": "//! Instructions for navigating and manipulating the program\n\n//! Opcodes reserved: 50 - 69\n\nuse std::fmt::Error;\n\n\n\nuse kaylee_derive::Instruction;\n\n\n\nuse crate::instructions::{display_instruction_with_values, Executable, Instruction, InstructionDocumentation, InstructionSignature, OperandType, OperandValues};\n\nuse crate::vm::{ExecutionResult, Kaylee, RegisterId};\n\n\n\n/// Jump: Resets the program counter to a constant value\n\n/// Operands:\n\n/// - 0: `#ADDRESS` | 3 Bytes | ProgramIndex | ProgramIndex to jump to\n\n///\n\n/// Errors/ Panics\n\n/// - `AssemblerError`: If the ProgramIndex is a value larger than 3 bytes\n\n/// - `RuntimeError`: If the target ProgramIndex is out of bounds\n\n///\n\n/// Examples\n\n/// ```asm\n\n/// JUMP #500 // `32 00 01 FF` - Jumps to program index 500\n", "file_path": "kaylee/src/instructions/program.rs", "rank": 21, "score": 51835.8368113755 }, { "content": "\n\n vm.set_program_counter(vm.program_counter() + steps);\n\n Ok(ExecutionResult::Jumped(vm.program_counter()))\n\n }\n\n}\n\n\n\n/// JumpBackward: Moves the program backward a certain number of instructions\n\n/// Operands:\n\n/// - 0: `#NUM_OF_INSTRUCTIONS` | 3 Bytes | ConstantValue | Number of instructions to move forward\n\n///\n\n/// Errors/ Panics\n\n/// - `AssemblerError`: If the ConstantValue is a value larger than 3 bytes\n\n/// - `RuntimeError`: If the target ProgramIndex is out of bounds\n\n///\n\n/// Examples\n\n/// ```asm\n\n/// JUMPF #4 // `34 00 01 FF` - Jumps backward 4 instructions (16 bytes)\n\n/// ```\n\n#[derive(Instruction)]\n\n#[opcode = 52]\n", "file_path": "kaylee/src/instructions/program.rs", "rank": 22, "score": 51831.79499674127 }, { "content": " fn test_jump() {\n\n let program = Program::from(vec![\n\n // A bunch of random load instructions\n\n Load::OPCODE, 0, 0, 100,\n\n Load::OPCODE, 1, 0, 100,\n\n Jump::OPCODE, 0, 0, 24,\n\n Load::OPCODE, 2, 0, 100,\n\n Load::OPCODE, 3, 0, 100,\n\n Load::OPCODE, 4, 0, 100,\n\n Load::OPCODE, 5, 0, 100,\n\n ]);\n\n\n\n let mut vm = Kaylee::new();\n\n vm.run(program);\n\n\n\n // Should set these\n\n assert_eq!(100, vm.register(0).unwrap());\n\n assert_eq!(100, vm.register(1).unwrap());\n\n\n\n // Should skip these\n", "file_path": "kaylee/src/instructions/program.rs", "rank": 23, "score": 51827.693055297015 }, { "content": " assert_eq!(28, vm.program_counter());\n\n }\n\n\n\n #[test]\n\n fn test_jump_backward() {\n\n let program = Program::from(vec![\n\n Load::OPCODE, 0, 0, 100, // Jump to here, execute\n\n\n\n Halt::OPCODE, 0, 0, 0, // Stop\n\n\n\n Load::OPCODE, 1, 0, 100,\n\n Load::OPCODE, 2, 0, 100, // Start here (12)\n\n Load::OPCODE, 3, 0, 100, // Execute\n\n\n\n JumpBackward::OPCODE, 0, 0, 5, // Jump\n\n\n\n Load::OPCODE, 4, 0, 100,\n\n Load::OPCODE, 5, 0, 100,\n\n ]);\n\n\n", "file_path": "kaylee/src/instructions/program.rs", "rank": 24, "score": 51826.765285194146 }, { "content": " let mut vm = Kaylee::new();\n\n vm.set_program_counter(12);\n\n vm.run(program);\n\n\n\n assert_eq!(100, vm.register(0).unwrap());\n\n assert_eq!(0, vm.register(1).unwrap());\n\n assert_eq!(100, vm.register(2).unwrap());\n\n assert_eq!(100, vm.register(3).unwrap());\n\n assert_eq!(0, vm.register(4).unwrap());\n\n assert_eq!(0, vm.register(5).unwrap());\n\n\n\n // And check on the counter itself\n\n assert_eq!(8, vm.program_counter());\n\n }\n\n\n\n #[test]\n\n fn test_jump_if_equal() {\n\n let program = Program::from(vec![\n\n // A bunch of random load instructions\n\n Load::OPCODE, 0, 0, 100,\n", "file_path": "kaylee/src/instructions/program.rs", "rank": 25, "score": 51826.102969229156 }, { "content": " assert_eq!(0, vm.register(2).unwrap());\n\n assert_eq!(0, vm.register(3).unwrap());\n\n assert_eq!(0, vm.register(4).unwrap());\n\n\n\n // And hit this one at the end\n\n assert_eq!(100, vm.register(5).unwrap());\n\n\n\n // And check on the counter itself\n\n assert_eq!(28, vm.program_counter());\n\n }\n\n\n\n #[test]\n\n fn test_jump_forward() {\n\n let program = Program::from(vec![\n\n // A bunch of random load instructions\n\n Load::OPCODE, 0, 0, 100,\n\n Load::OPCODE, 1, 0, 100,\n\n JumpForward::OPCODE, 0, 0, 3,\n\n Load::OPCODE, 2, 0, 100,\n\n Load::OPCODE, 3, 0, 100,\n", "file_path": "kaylee/src/instructions/program.rs", "rank": 26, "score": 51825.78144492201 }, { "content": " assert_eq!(0, vm.register(2).unwrap());\n\n assert_eq!(0, vm.register(3).unwrap());\n\n assert_eq!(0, vm.register(4).unwrap());\n\n\n\n // And hit this one at the end\n\n assert_eq!(100, vm.register(5).unwrap());\n\n\n\n // And check on the counter itself\n\n assert_eq!(28, vm.program_counter());\n\n }\n\n\n\n #[test]\n\n fn test_dont_jump_if_not_equal() {\n\n let program = Program::from(vec![\n\n // A bunch of random load instructions\n\n Load::OPCODE, 0, 0, 100,\n\n Load::OPCODE, 1, 0, 100,\n\n JumpEqual::OPCODE, 30, 29, 28,\n\n Load::OPCODE, 2, 0, 100,\n\n Load::OPCODE, 3, 0, 100,\n", "file_path": "kaylee/src/instructions/program.rs", "rank": 27, "score": 51825.71371676264 }, { "content": " Load::OPCODE, 1, 0, 100,\n\n JumpEqual::OPCODE, 30, 29, 28,\n\n Load::OPCODE, 2, 0, 100,\n\n Load::OPCODE, 3, 0, 100,\n\n Load::OPCODE, 4, 0, 100,\n\n Load::OPCODE, 5, 0, 100,\n\n ]);\n\n\n\n let mut vm = Kaylee::new();\n\n vm.set_register(30, 24).unwrap();\n\n vm.set_register(29, 200).unwrap();\n\n vm.set_register(28, 200).unwrap();\n\n\n\n vm.run(program);\n\n\n\n // Should set these\n\n assert_eq!(100, vm.register(0).unwrap());\n\n assert_eq!(100, vm.register(1).unwrap());\n\n\n\n // Should skip these\n", "file_path": "kaylee/src/instructions/program.rs", "rank": 28, "score": 51822.774836836965 }, { "content": " Load::OPCODE, 4, 0, 100,\n\n Load::OPCODE, 5, 0, 100,\n\n ]);\n\n\n\n let mut vm = Kaylee::new();\n\n vm.set_register(30, 24).unwrap();\n\n vm.set_register(29, 300).unwrap();\n\n vm.set_register(28, 200).unwrap();\n\n\n\n vm.run(program);\n\n\n\n // Should set these\n\n assert_eq!(100, vm.register(0).unwrap());\n\n assert_eq!(100, vm.register(1).unwrap());\n\n assert_eq!(100, vm.register(2).unwrap());\n\n assert_eq!(100, vm.register(3).unwrap());\n\n assert_eq!(100, vm.register(4).unwrap());\n\n assert_eq!(100, vm.register(5).unwrap());\n\n\n\n // And check on the counter itself\n\n assert_eq!(28, vm.program_counter());\n\n }\n\n}", "file_path": "kaylee/src/instructions/program.rs", "rank": 29, "score": 51821.91296563602 }, { "content": " Load::OPCODE, 4, 0, 100,\n\n Load::OPCODE, 5, 0, 100,\n\n ]);\n\n\n\n let mut vm = Kaylee::new();\n\n vm.run(program);\n\n\n\n // Should set these\n\n assert_eq!(100, vm.register(0).unwrap());\n\n assert_eq!(100, vm.register(1).unwrap());\n\n\n\n // Should skip these\n\n assert_eq!(0, vm.register(2).unwrap());\n\n assert_eq!(0, vm.register(3).unwrap());\n\n assert_eq!(0, vm.register(4).unwrap());\n\n\n\n // And hit this one at the end\n\n assert_eq!(100, vm.register(5).unwrap());\n\n\n\n // And check on the counter itself\n", "file_path": "kaylee/src/instructions/program.rs", "rank": 30, "score": 51821.75890109476 }, { "content": "fn main() {\n\n let mut repl = Repl::new();\n\n repl.run();\n\n}\n", "file_path": "kaylee/src/main.rs", "rank": 31, "score": 39747.42800966529 }, { "content": "/// Determine if a keyword is a valid operation\n\nfn is_valid_keyword_character(c: char) -> bool {\n\n is_alphabetic(c as u8) || c == '.' || c == '_'\n\n}\n\n\n", "file_path": "kaylee/src/asm/parser.rs", "rank": 32, "score": 32914.276469052056 }, { "content": "use crate::instructions::{decode_next_instruction, Instruction};\n\nuse crate::program::{Program, ProgramIndex};\n\n\n\n// The id used for each register, key in the vector\n\npub type RegisterId = usize;\n\n\n\n// The value the VM uses\n\npub type RegisterValue = i32;\n\n\n\n// A single Opcode or register contents, or whatever\n\npub type Byte = u8;\n\n\n\n// For when a register value is 2 slots\n\npub type HalfWord = u16;\n\npub type Word = u32;\n\npub type DoubleWord = u32;\n\n\n\npub enum ExecutionResult {\n\n Halted,\n\n NoAction,\n", "file_path": "kaylee/src/vm.rs", "rank": 33, "score": 29727.750162221335 }, { "content": " // @todo: graceful shutdown of the machine/process\n\n }\n\n }\n\n }\n\n\n\n pub fn run_next(&mut self, program: &Program) {\n\n match decode_next_instruction(program, &mut self.program_counter) {\n\n Some(Ok(instruction)) => self.execute_instruction(instruction),\n\n None => println!(\"Execution Finished\"),\n\n Some(Err(_error)) => panic!(\"received an error\"),\n\n };\n\n }\n\n\n\n fn execute_instruction(&mut self, instruction: Box<dyn Instruction>) {\n\n instruction.execute(self).unwrap();\n\n\n\n // I should probably do something with these results, or pass them back\n\n // match instruction.execute(self) {\n\n // Ok(ExecutionResult::Value(value)) => println!(\"{value}\"),\n\n // Ok(ExecutionResult::Halted) => println!(\"Halting\"),\n", "file_path": "kaylee/src/vm.rs", "rank": 34, "score": 29725.092861677167 }, { "content": " Kaylee {\n\n registers: [0; Kaylee::REGISTER_COUNT],\n\n remainder: 0,\n\n program_counter: 0,\n\n halted: false,\n\n }\n\n }\n\n\n\n /// This will run until one of the following conditions is met\n\n /// 1. The Program reaches completes its final instruction\n\n /// 2. The VM `halt` flag is set, which will complete the current instruction and then halt\n\n pub fn run(&mut self, program: Program) {\n\n while let Some(result) = decode_next_instruction(&program, &mut self.program_counter) {\n\n match result {\n\n Ok(instruction) => { self.execute_instruction(instruction) }\n\n Err(_error) => { panic!(\"Error decoding instruction\") }\n\n }\n\n\n\n if self.halted {\n\n break;\n", "file_path": "kaylee/src/vm.rs", "rank": 35, "score": 29723.770086586323 }, { "content": " Value(RegisterValue),\n\n Jumped(ProgramIndex),\n\n Equality(bool),\n\n}\n\n\n\npub enum ExecutionError {\n\n Unknown(String),\n\n}\n\n\n\npub struct Kaylee {\n\n registers: [RegisterValue; Kaylee::REGISTER_COUNT],\n\n program_counter: RegisterId,\n\n remainder: u32,\n\n halted: bool,\n\n}\n\n\n\nimpl Kaylee {\n\n pub const REGISTER_COUNT: usize = 32;\n\n\n\n pub fn new() -> Self {\n", "file_path": "kaylee/src/vm.rs", "rank": 36, "score": 29722.781901519626 }, { "content": " if register > Kaylee::REGISTER_COUNT - 1 {\n\n return Err(());\n\n }\n\n\n\n self.registers[register] = value;\n\n Ok(())\n\n }\n\n\n\n pub(crate) fn halt(&mut self) {\n\n self.halted = true;\n\n }\n\n\n\n pub(crate) fn remainder(&self) -> u32 {\n\n self.remainder\n\n }\n\n\n\n pub(crate) fn set_remainder(&mut self, remainder: u32) {\n\n self.remainder = remainder\n\n }\n\n\n", "file_path": "kaylee/src/vm.rs", "rank": 37, "score": 29718.67553692366 }, { "content": " // Ok(ExecutionResult::Jumped(index)) => println!(\"Jumped to {index}\"),\n\n // Ok(ExecutionResult::Equality(flag)) => println!(\"Jumped to {flag}\"),\n\n // Ok(ExecutionResult::NoAction) => println!(\"No Action\"),\n\n // Err(_) => panic!(\"Error\")\n\n // }\n\n }\n\n\n\n pub(crate) fn register(&self, register: RegisterId) -> Result<RegisterValue, ()> {\n\n if register > Kaylee::REGISTER_COUNT - 1 {\n\n return Err(());\n\n }\n\n\n\n Ok(*&self.registers[register].clone())\n\n }\n\n\n\n pub(crate) fn all_registers(&self) -> [RegisterValue; Kaylee::REGISTER_COUNT] {\n\n *&self.registers.clone()\n\n }\n\n\n\n pub(crate) fn set_register(&mut self, register: RegisterId, value: RegisterValue) -> Result<(), ()> {\n", "file_path": "kaylee/src/vm.rs", "rank": 38, "score": 29717.929917583893 }, { "content": " pub(crate) fn program_counter(&self) -> ProgramIndex {\n\n self.program_counter\n\n }\n\n\n\n pub(crate) fn set_program_counter(&mut self, index: ProgramIndex) {\n\n self.program_counter = index\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n}\n", "file_path": "kaylee/src/vm.rs", "rank": 39, "score": 29717.685425560776 }, { "content": "impl<'a> TryFrom<Parsed<'a>> for Program {\n\n type Error = AssemblerError;\n\n\n\n fn try_from(parsed: Parsed) -> Result<Self, Self::Error> {\n\n let assembler = Assembler::new();\n\n assembler.assemble_parsed_asm(parsed)\n\n }\n\n}\n\n\n\nimpl TryFrom<Source> for Program {\n\n type Error = AssemblerError;\n\n\n\n fn try_from(source: Source) -> Result<Self, Self::Error> {\n\n // let parsed = Parsed::try_from(source);\n\n let parsed = parse_asm(source.body.as_str());\n\n match parsed {\n\n Ok(success) => {\n\n success.1.try_into()\n\n }\n\n Err(_) => Err(AssemblerError::Other(String::from(\"Parsing error\")))\n\n }\n\n }\n\n}\n\n\n\n// impl IndexMut<ProgramIndex> for Program {\n\n// fn index_mut(&mut self, index: ProgramIndex) -> &mut Self::Output {\n\n// todo!()\n\n// }\n\n// }", "file_path": "kaylee/src/program.rs", "rank": 40, "score": 28977.479757117813 }, { "content": "use std::ops::Index;\n\nuse std::vec::IntoIter;\n\n\n\nuse crate::asm::{Parsed, Source};\n\nuse crate::asm::assembler::{Assembler, AssemblerError};\n\nuse crate::asm::parser::parse_asm;\n\nuse crate::vm::Byte;\n\n\n\npub type ProgramIndex = usize;\n\n\n\n#[derive(PartialEq, Debug)]\n\npub struct Program {\n\n bytes: Vec<Byte>,\n\n}\n\n\n\nimpl Index<ProgramIndex> for Program {\n\n type Output = Byte;\n\n\n\n fn index(&self, index: ProgramIndex) -> &Self::Output {\n\n &self.bytes[index]\n", "file_path": "kaylee/src/program.rs", "rank": 41, "score": 28977.24809342025 }, { "content": " self.bytes.extend(iter)\n\n }\n\n}\n\n\n\nimpl Program {\n\n pub fn new() -> Self {\n\n Program {\n\n bytes: Vec::new()\n\n }\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n self.bytes.len()\n\n }\n\n\n\n pub fn bytes(&self) -> &Vec<u8> {\n\n &self.bytes\n\n }\n\n}\n\n\n", "file_path": "kaylee/src/program.rs", "rank": 42, "score": 28971.827557617013 }, { "content": " }\n\n}\n\n\n\nimpl IntoIterator for Program {\n\n type Item = u8;\n\n type IntoIter = IntoIter<Byte>;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.bytes.into_iter()\n\n }\n\n}\n\n\n\nimpl From<Vec<Byte>> for Program {\n\n fn from(bytes: Vec<Byte>) -> Self {\n\n Program { bytes }\n\n }\n\n}\n\n\n\nimpl Extend<Byte> for Program {\n\n fn extend<T: IntoIterator<Item=Byte>>(&mut self, iter: T) {\n", "file_path": "kaylee/src/program.rs", "rank": 43, "score": 28971.494795976378 }, { "content": "enum SignatureState {\n\n Identifier,\n\n Operands,\n\n}\n\n\n", "file_path": "kaylee_derive/src/lib.rs", "rank": 44, "score": 26799.939399940904 }, { "content": "use std::fmt::Error;\n\n\n\nuse linkme::distributed_slice;\n\n\n\nuse crate::instructions::compare::{Equal, GreaterThan, GreaterThanOrEqual, LessThan, LessThanOrEqual, NotEqual};\n\nuse crate::instructions::data::Load;\n\nuse crate::instructions::machine::Halt;\n\nuse crate::instructions::math::{Add, Divide, Multiply, Subtract};\n\nuse crate::instructions::program::{Jump, JumpBackward, JumpEqual, JumpForward};\n\nuse crate::program::{Program, ProgramIndex};\n\nuse crate::vm::{Byte, ExecutionResult, HalfWord, Kaylee, RegisterId, RegisterValue, Word};\n\n\n\nmod machine;\n\nmod data;\n\nmod math;\n\nmod program;\n\nmod compare;\n\nmod logical;\n\nmod system;\n\nmod library;\n", "file_path": "kaylee/src/instructions.rs", "rank": 45, "score": 24938.04888079622 }, { "content": " item = Some(registered_instruction);\n\n break;\n\n }\n\n }\n\n\n\n item\n\n }\n\n}\n\n\n\n/// Errors concerning decoding instruction bytecode\n\n#[derive(Debug)]\n\npub enum InstructionDecodeError {\n\n InvalidValueSize,\n\n IllegalOpcode,\n\n}\n\n\n\n/// Decode the next instruction in the Program stream\n", "file_path": "kaylee/src/instructions.rs", "rank": 46, "score": 24927.222301969836 }, { "content": " OperandValue::Byte(value) => value.to_string(),\n\n OperandValue::HalfWord(value) => value.to_string(),\n\n OperandValue::Word(value) => value.to_string(),\n\n OperandValue::None => panic!(\"Did not receive a destination register\")\n\n }\n\n }\n\n}\n\n\n\n/// Defines an Instruction's Signature\n\npub struct InstructionSignature {\n\n pub identifier: String,\n\n pub operands: [OperandType; 3],\n\n}\n\n\n\n/// Defines an Instruction's documentation\n\npub struct InstructionDocumentation {\n\n pub name: String,\n\n pub help: String,\n\n}\n\n\n", "file_path": "kaylee/src/instructions.rs", "rank": 47, "score": 24926.674663050027 }, { "content": "mod misc;\n\n\n\n/// Type for the three operand slots allowed for each instruction\n\npub type RegisteredInstruction = (&'static str, u8, [OperandType; 3]);\n\n\n\n/// Data Repository for Registered Instructions. \n\n/// Not intended to be directly accessed. Use `InstructionRegistry` instead.\n\n#[distributed_slice]\n\npub static _INSTRUCTION_REGISTRY: [RegisteredInstruction] = [..];\n\n\n\n/// Link-time built registry of all allowed instructions, including signatures.\n\n/// Useful for parsing, listing, and examining instructions\n\npub struct InstructionRegistry {}\n\n\n\nimpl InstructionRegistry {\n\n /// Get a RegisteredInstruction from the InstructionRegistry if it exists\n\n pub fn get(operation: &str) -> Option<&RegisteredInstruction> {\n\n let mut item: Option<&RegisteredInstruction> = None;\n\n for registered_instruction in _INSTRUCTION_REGISTRY {\n\n if registered_instruction.0 == operation {\n", "file_path": "kaylee/src/instructions.rs", "rank": 48, "score": 24926.648697143177 }, { "content": "/// Decode a single operand from a Halfword in the Instruction Stream\n\nimpl TryFrom<HalfWord> for OperandValue {\n\n type Error = ();\n\n\n\n fn try_from(value: HalfWord) -> Result<Self, Self::Error> {\n\n Ok(OperandValue::HalfWord(value))\n\n }\n\n}\n\n\n\nimpl OperandValue {\n\n /// Get the OperandValue as a RegisterId\n\n // @todo: I tried to do these conversions using TryFrom and a generic `into<T>(&self) -> T` function, but neither worked.\n\n // @todo: There is certainly a more idiomatic way\n\n fn as_register_id(&self) -> RegisterId {\n\n match self {\n\n OperandValue::Byte(value) => *value as usize,\n\n OperandValue::HalfWord(value) => *value as usize,\n\n OperandValue::Word(value) => *value as usize,\n\n OperandValue::None => panic!(\"Did not receive a destination register\")\n\n }\n", "file_path": "kaylee/src/instructions.rs", "rank": 49, "score": 24925.759471512123 }, { "content": " let b = (instructions[*program_counter + 1] as Word) << 8;\n\n let c = instructions[*program_counter + 2] as Word;\n\n\n\n let value = (a | b | c) as u32;\n\n\n\n operand_values[index] = OperandValue::Word(value);\n\n\n\n *program_counter += 3;\n\n }\n\n };\n\n }\n\n\n\n if (original_pc + 3) != *program_counter {\n\n *program_counter = original_pc + 3;\n\n }\n\n Ok(operand_values)\n\n}\n\n\n", "file_path": "kaylee/src/instructions.rs", "rank": 50, "score": 24923.840015432455 }, { "content": " JumpBackward::OPCODE => build::<JumpBackward>(instructions, program_counter),\n\n JumpEqual::OPCODE => build::<JumpEqual>(instructions, program_counter),\n\n\n\n Equal::OPCODE => build::<Equal>(instructions, program_counter),\n\n NotEqual::OPCODE => build::<NotEqual>(instructions, program_counter),\n\n GreaterThan::OPCODE => build::<GreaterThan>(instructions, program_counter),\n\n LessThan::OPCODE => build::<LessThan>(instructions, program_counter),\n\n GreaterThanOrEqual::OPCODE => build::<GreaterThanOrEqual>(instructions, program_counter),\n\n LessThanOrEqual::OPCODE => build::<LessThanOrEqual>(instructions, program_counter),\n\n\n\n _ => {\n\n Err(InstructionDecodeError::IllegalOpcode)\n\n }\n\n })\n\n}\n\n\n", "file_path": "kaylee/src/instructions.rs", "rank": 51, "score": 24923.414280800494 }, { "content": " if index > 2 {\n\n return Err(\"Index Out Of Bounds\".to_string());\n\n }\n\n\n\n Ok(&self.operand_values()[index])\n\n }\n\n\n\n /// Get a concrete value from a register by looking at the target in an OperandValue\n\n fn get_register_value_for_operand(&self, operand_value_index: usize, vm: &mut Kaylee) -> Result<RegisterValue, ()> {\n\n let register = self.operand_values()[operand_value_index].as_register_id();\n\n vm.register(register)\n\n }\n\n}", "file_path": "kaylee/src/instructions.rs", "rank": 52, "score": 24921.902492542507 }, { "content": " }\n\n\n\n /// Get the OperandValue as a Program Index Target\n\n fn as_program_index(&self) -> ProgramIndex {\n\n self.as_register_id() as ProgramIndex\n\n }\n\n\n\n /// Get the OperandValue as a constant literal value (integer)\n\n fn as_constant_value(&self) -> RegisterValue {\n\n match self {\n\n OperandValue::Byte(value) => *value as RegisterValue,\n\n OperandValue::HalfWord(value) => *value as RegisterValue,\n\n OperandValue::Word(value) => *value as RegisterValue,\n\n OperandValue::None => panic!(\"Did not receive a destination register\")\n\n }\n\n }\n\n\n\n /// Get the OperandValue as a string\n\n pub(crate) fn as_string(&self) -> String {\n\n match self {\n", "file_path": "kaylee/src/instructions.rs", "rank": 53, "score": 24921.772353825694 }, { "content": "#[derive(Instruction)]\n\n#[opcode = 114]\n\n#[signature = \"GTE $D $L $R\"]\n\npub struct GreaterThanOrEqual {\n\n operand_values: OperandValues,\n\n}\n\n\n\nimpl Executable for GreaterThanOrEqual {\n\n fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> {\n\n let callback = |left: RegisterValue, right: RegisterValue| { (left >= right) as RegisterValue };\n\n\n\n let result = instructions::basic_register_execution(self, vm, callback);\n\n match result {\n\n 1 => Ok(ExecutionResult::Equality(true)),\n\n 0 => Ok(ExecutionResult::Equality(false)),\n\n _ => panic!(\"Equality returned something other than a 0 or 1\")\n\n }\n\n }\n\n}\n\n\n", "file_path": "kaylee/src/instructions/compare.rs", "rank": 58, "score": 23994.69830935697 }, { "content": "\n\nimpl Executable for LessThanOrEqual {\n\n fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> {\n\n let callback = |left: RegisterValue, right: RegisterValue| { (left <= right) as RegisterValue };\n\n\n\n let result = instructions::basic_register_execution(self, vm, callback);\n\n match result {\n\n 1 => Ok(ExecutionResult::Equality(true)),\n\n 0 => Ok(ExecutionResult::Equality(false)),\n\n _ => panic!(\"Equality returned something other than a 0 or 1\")\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::instructions::compare::{Equal, GreaterThan, GreaterThanOrEqual, LessThan, LessThanOrEqual, NotEqual};\n\n use crate::program::Program;\n\n use crate::vm::Kaylee;\n\n\n", "file_path": "kaylee/src/instructions/compare.rs", "rank": 59, "score": 23993.818896850855 }, { "content": "///\n\n/// Examples\n\n/// ```asm\n\n/// LT $01 $10 $30 // `6E 01 0A 1E` - Loads true/false into register 1 based on comparison from values in registers 10 and 30\n\n/// LT $40 $01 $10 // `6E 28 01 0A` - AssemblerError because 40 is not a valid register\n\n/// ```\n\n#[derive(Instruction)]\n\n#[opcode = 113]\n\n#[signature = \"LT $D $L $R\"]\n\npub struct LessThan {\n\n operand_values: OperandValues,\n\n}\n\n\n\nimpl Executable for LessThan {\n\n fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> {\n\n let callback = |left: RegisterValue, right: RegisterValue| { (left < right) as RegisterValue };\n\n\n\n let result = instructions::basic_register_execution(self, vm, callback);\n\n match result {\n\n 1 => Ok(ExecutionResult::Equality(true)),\n", "file_path": "kaylee/src/instructions/compare.rs", "rank": 60, "score": 23993.625583791134 }, { "content": "#[signature = \"NEQ $D $L $R\"]\n\npub struct NotEqual {\n\n operand_values: OperandValues,\n\n}\n\n\n\nimpl Executable for NotEqual {\n\n fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> {\n\n let callback = |left: RegisterValue, right: RegisterValue| { (left != right) as RegisterValue };\n\n\n\n let result = instructions::basic_register_execution(self, vm, callback);\n\n match result {\n\n 1 => Ok(ExecutionResult::Equality(true)),\n\n 0 => Ok(ExecutionResult::Equality(false)),\n\n _ => panic!(\"Equality returned something other than a 0 or 1\")\n\n }\n\n }\n\n}\n\n\n\n/// GreaterThan: Stores a boolean in a destination with the comparison result from two register values\n\n/// Operands:\n", "file_path": "kaylee/src/instructions/compare.rs", "rank": 61, "score": 23993.33128645453 }, { "content": "/// ```asm\n\n/// EQ $01 $10 $30 // `6E 01 0A 1E` - Loads true/false into register 1 based on comparison from values in registers 10 and 30\n\n/// EQ $40 $01 $10 // `6E 28 01 0A` - AssemblerError because 40 is not a valid register\n\n/// ```\n\n#[derive(Instruction)]\n\n#[opcode = 110]\n\n#[signature = \"EQ $D $L $R\"]\n\npub struct Equal {\n\n operand_values: OperandValues,\n\n}\n\n\n\nimpl Executable for Equal {\n\n fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> {\n\n let callback = |left: RegisterValue, right: RegisterValue| { (left == right) as RegisterValue };\n\n\n\n let result = instructions::basic_register_execution(self, vm, callback);\n\n match result {\n\n 1 => Ok(ExecutionResult::Equality(true)),\n\n 0 => Ok(ExecutionResult::Equality(false)),\n\n _ => panic!(\"Equality returned something other than a 0 or 1\")\n", "file_path": "kaylee/src/instructions/compare.rs", "rank": 62, "score": 23992.42912359814 }, { "content": " fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> {\n\n let callback = |left: RegisterValue, right: RegisterValue| { (left > right) as RegisterValue };\n\n\n\n let result = instructions::basic_register_execution(self, vm, callback);\n\n match result {\n\n 1 => Ok(ExecutionResult::Equality(true)),\n\n 0 => Ok(ExecutionResult::Equality(false)),\n\n _ => panic!(\"Equality returned something other than a 0 or 1\")\n\n }\n\n }\n\n}\n\n\n\n/// LessThan: Stores a boolean in a destination with the comparison result from two register values\n\n/// Operands:\n\n/// - 0: `$D` | 1 Byte | RegisterId | RegisterId of the destination register (0-31)\n\n/// - 1: `$L` | 1 Byte | RegisterId | RegisterId of the left term\n\n/// - 2: `$R` | 1 Byte | RegisterId | RegisterId of the right term\n\n///\n\n/// Errors/ Panics\n\n/// - `AssemblerError` or `ProgramPanic`: If any register is out of bounds\n", "file_path": "kaylee/src/instructions/compare.rs", "rank": 66, "score": 23989.26635803724 }, { "content": "//! Instructions for comparisons\n\n//! Opcodes reserved: 100 - 119\n\nuse std::fmt::Error;\n\n\n\nuse kaylee_derive::Instruction;\n\n\n\nuse crate::instructions;\n\nuse crate::instructions::{display_instruction_with_values, Executable, Instruction, InstructionDocumentation, InstructionSignature, OperandType, OperandValues};\n\nuse crate::vm::{ExecutionResult, Kaylee, RegisterValue};\n\n\n\n/// Equal: Stores a boolean in a destination with the comparison result from two register values\n\n/// Operands:\n\n/// - 0: `$D` | 1 Byte | RegisterId | RegisterId of the destination register (0-31)\n\n/// - 1: `$L` | 1 Byte | RegisterId | RegisterId of the left term\n\n/// - 2: `$R` | 1 Byte | RegisterId | RegisterId of the right term\n\n///\n\n/// Errors/ Panics\n\n/// - `AssemblerError` or `ProgramPanic`: If any register is out of bounds\n\n///\n\n/// Examples\n", "file_path": "kaylee/src/instructions/compare.rs", "rank": 67, "score": 23988.485523083953 }, { "content": "#[derive(Instruction)]\n\n#[opcode = 1]\n\n#[signature = \"HALT\"]\n\npub struct Halt {\n\n operand_values: OperandValues,\n\n}\n\n\n\nimpl Executable for Halt {\n\n fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> {\n\n vm.halt();\n\n Ok(ExecutionResult::Halted)\n\n }\n\n}", "file_path": "kaylee/src/instructions/machine.rs", "rank": 69, "score": 23987.202134983287 }, { "content": "/// - 0: `$D` | 1 Byte | RegisterId | RegisterId of the destination register (0-31)\n\n/// - 1: `$L` | 1 Byte | RegisterId | RegisterId of the left term\n\n/// - 2: `$R` | 1 Byte | RegisterId | RegisterId of the right term\n\n///\n\n/// Errors/ Panics\n\n/// - `AssemblerError` or `ProgramPanic`: If any register is out of bounds\n\n///\n\n/// Examples\n\n/// ```asm\n\n/// GT $01 $10 $30 // `6E 01 0A 1E` - Loads true/false into register 1 based on comparison from values in registers 10 and 30\n\n/// GT $40 $01 $10 // `6E 28 01 0A` - AssemblerError because 40 is not a valid register\n\n/// ```\n\n#[derive(Instruction)]\n\n#[opcode = 112]\n\n#[signature = \"GT $D $L $R\"]\n\npub struct GreaterThan {\n\n operand_values: OperandValues,\n\n}\n\n\n\nimpl Executable for GreaterThan {\n", "file_path": "kaylee/src/instructions/compare.rs", "rank": 70, "score": 23984.72116327103 }, { "content": "/// ```asm\n\n/// LOAD $1 #500 // `1E 01 01 FF` - Loads 500 into Register 1\n\n/// LOAD $31 #01 // `1E 1F 00 01` - Loads 1 into Register 31\n\n/// LOAD $40 #10 // `1E 28 00 0A` - Assembler Error because 40 is not a valid register\n\n/// LOAD $15 #1,000,000 // `1E 0F 00 0A` - Assembler Error because constant value is out of bounds\n\n/// ```\n\n#[derive(Instruction)]\n\n#[opcode = 30]\n\n#[signature = \"LOAD $D #2\"]\n\npub struct Load {\n\n operand_values: OperandValues,\n\n}\n\n\n\nimpl Executable for Load {\n\n fn execute(&self, vm: &mut Kaylee) -> Result<ExecutionResult, Error> {\n\n let destination = self.operand_value(0).unwrap().as_register_id();\n\n let value = self.operand_value(1).unwrap().as_constant_value();\n\n\n\n vm.set_register(destination, value).unwrap();\n\n Ok(ExecutionResult::Value(value))\n", "file_path": "kaylee/src/instructions/data.rs", "rank": 71, "score": 23984.271867618245 }, { "content": "/// LessThanOrEqual: Stores a boolean in a destination with the comparison result from two register values\n\n/// Operands:\n\n/// - 0: `$D` | 1 Byte | RegisterId | RegisterId of the destination register (0-31)\n\n/// - 1: `$L` | 1 Byte | RegisterId | RegisterId of the left term\n\n/// - 2: `$R` | 1 Byte | RegisterId | RegisterId of the right term\n\n///\n\n/// Errors/ Panics\n\n/// - `AssemblerError` or `ProgramPanic`: If any register is out of bounds\n\n///\n\n/// Examples\n\n/// ```asm\n\n/// LTE $01 $10 $30 // `6E 01 0A 1E` - Loads true/false into register 1 based on comparison from values in registers 10 and 30\n\n/// LTE $40 $01 $10 // `6E 28 01 0A` - AssemblerError because 40 is not a valid register\n\n/// ```\n\n#[derive(Instruction)]\n\n#[opcode = 115]\n\n#[signature = \"LTE $D $L $R\"]\n\npub struct LessThanOrEqual {\n\n operand_values: OperandValues,\n\n}\n", "file_path": "kaylee/src/instructions/compare.rs", "rank": 72, "score": 23982.66230684102 }, { "content": "//! Instructions for manipulating data (registers and memory)\n\n//! Opcodes reserved: 30 - 49\n\nuse std::fmt::Error;\n\n\n\nuse kaylee_derive::Instruction;\n\n\n\nuse crate::instructions::{display_instruction_with_values, Executable, Instruction, InstructionDocumentation, InstructionSignature, OperandType, OperandValues};\n\nuse crate::vm::{ExecutionResult, Kaylee};\n\n\n\n/// LOAD: Loads a value into a designated register\n\n/// Operands:\n\n/// - 0: `$D` | 1 Byte | RegisterId | RegisterId of the destination register (0-31)\n\n/// - 1: `#2` | 2 Bytes | HalfWord | Literal value to be loaded\n\n/// - 2: NOT USED, given to Operand 1\n\n///\n\n/// Errors/ Panics\n\n/// - `AssemblerError` or `ProgramPanic`: If register is out of bounds\n\n/// - `AssemblerError` or `ProgramPanic`: If Constant value is too large for 2 bytes\n\n///\n\n/// Examples\n", "file_path": "kaylee/src/instructions/data.rs", "rank": 73, "score": 23982.43992021733 }, { "content": "//! Instructions for controlling the Virtual Machine\n\n//! Opcodes reserved: 0 - 29\n\nuse std::fmt::Error;\n\n\n\nuse kaylee_derive::Instruction;\n\n\n\nuse crate::instructions::{display_instruction_with_values, Executable, Instruction, InstructionDocumentation, InstructionSignature, OperandType, OperandValues};\n\nuse crate::vm::{ExecutionResult, Kaylee};\n\n\n\n/// Halt: Gracefully ends the program and shuts down the process\n\n/// Operands:\n\n/// - None\n\n///\n\n/// Errors/ Panics\n\n/// - None\n\n///\n\n/// Examples\n\n/// ```asm\n\n/// HALT // `01 00 00 00`\n\n/// ```\n", "file_path": "kaylee/src/instructions/machine.rs", "rank": 75, "score": 23981.79578147334 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::instructions::data::Load;\n\n use crate::program::Program;\n\n use crate::vm::Kaylee;\n\n\n\n #[test]\n\n fn test_load() {\n\n let program = Program::from(vec![\n\n Load::OPCODE, 4, 1, 244, // LOAD $4 #500\n\n Load::OPCODE, 30, 0, 12, // LOAD $6 #12\n\n ]);\n\n\n\n let mut vm = Kaylee::new();\n\n vm.run(program);\n\n\n\n assert_eq!(500, vm.register(4).unwrap());\n\n assert_eq!(12, vm.register(30).unwrap());\n\n }\n\n}", "file_path": "kaylee/src/instructions/data.rs", "rank": 76, "score": 23979.707321430506 }, { "content": " }\n\n }\n\n}\n\n\n\n/// NotEqual: Stores a boolean in a destination with the comparison result from two register values\n\n/// Operands:\n\n/// - 0: `$D` | 1 Byte | RegisterId | RegisterId of the destination register (0-31)\n\n/// - 1: `$L` | 1 Byte | RegisterId | RegisterId of the left term\n\n/// - 2: `$R` | 1 Byte | RegisterId | RegisterId of the right term\n\n///\n\n/// Errors/ Panics\n\n/// - `AssemblerError` or `ProgramPanic`: If any register is out of bounds\n\n///\n\n/// Examples\n\n/// ```asm\n\n/// NEQ $01 $10 $30 // `6E 01 0A 1E` - Loads true/false into register 1 based on comparison from values in registers 10 and 30\n\n/// NEQ $40 $01 $10 // `6E 28 01 0A` - AssemblerError because 40 is not a valid register\n\n/// ```\n\n#[derive(Instruction)]\n\n#[opcode = 111]\n", "file_path": "kaylee/src/instructions/compare.rs", "rank": 78, "score": 23979.02310104407 }, { "content": " 0 => Ok(ExecutionResult::Equality(false)),\n\n _ => panic!(\"Equality returned something other than a 0 or 1\")\n\n }\n\n }\n\n}\n\n\n\n/// GreaterThanOrEqual: Stores a boolean in a destination with the comparison result from two register values\n\n/// Operands:\n\n/// - 0: `$D` | 1 Byte | RegisterId | RegisterId of the destination register (0-31)\n\n/// - 1: `$L` | 1 Byte | RegisterId | RegisterId of the left term\n\n/// - 2: `$R` | 1 Byte | RegisterId | RegisterId of the right term\n\n///\n\n/// Errors/ Panics\n\n/// - `AssemblerError` or `ProgramPanic`: If any register is out of bounds\n\n///\n\n/// Examples\n\n/// ```asm\n\n/// GTE $01 $10 $30 // `6E 01 0A 1E` - Loads true/false into register 1 based on comparison from values in registers 10 and 30\n\n/// GTE $40 $01 $10 // `6E 28 01 0A` - AssemblerError because 40 is not a valid register\n\n/// ```\n", "file_path": "kaylee/src/instructions/compare.rs", "rank": 80, "score": 23977.39869584796 }, { "content": " #[test]\n\n fn test_equal() {\n\n let program = Program::from(vec![\n\n Equal::OPCODE, 30, 1, 2, // Pass\n\n Equal::OPCODE, 31, 3, 4, // Fail\n\n ]);\n\n\n\n let mut vm = Kaylee::new();\n\n vm.set_register(1, 100).unwrap();\n\n vm.set_register(2, 100).unwrap();\n\n vm.set_register(3, 200).unwrap();\n\n vm.set_register(4, 500).unwrap();\n\n\n\n vm.run(program);\n\n\n\n assert_eq!(1, vm.register(30).unwrap());\n\n assert_eq!(0, vm.register(31).unwrap());\n\n }\n\n\n\n #[test]\n", "file_path": "kaylee/src/instructions/compare.rs", "rank": 83, "score": 23972.176858279585 }, { "content": " fn test_not_equal() {\n\n let program = Program::from(vec![\n\n NotEqual::OPCODE, 30, 1, 2, // Pass\n\n NotEqual::OPCODE, 31, 3, 4, // Fail\n\n ]);\n\n\n\n let mut vm = Kaylee::new();\n\n vm.set_register(1, 100).unwrap();\n\n vm.set_register(2, 200).unwrap();\n\n vm.set_register(3, 300).unwrap();\n\n vm.set_register(4, 300).unwrap();\n\n\n\n vm.run(program);\n\n\n\n assert_eq!(1, vm.register(30).unwrap());\n\n assert_eq!(0, vm.register(31).unwrap());\n\n }\n\n\n\n #[test]\n\n fn test_greater_than() {\n", "file_path": "kaylee/src/instructions/compare.rs", "rank": 84, "score": 23972.103897461744 }, { "content": " let program = Program::from(vec![\n\n GreaterThan::OPCODE, 30, 1, 2, // Pass\n\n GreaterThan::OPCODE, 31, 3, 4, // Fail\n\n ]);\n\n\n\n let mut vm = Kaylee::new();\n\n vm.set_register(1, 300).unwrap();\n\n vm.set_register(2, 200).unwrap();\n\n vm.set_register(3, 200).unwrap();\n\n vm.set_register(4, 300).unwrap();\n\n\n\n vm.run(program);\n\n\n\n assert_eq!(1, vm.register(30).unwrap());\n\n assert_eq!(0, vm.register(31).unwrap());\n\n }\n\n\n\n #[test]\n\n fn test_less_than() {\n\n let program = Program::from(vec![\n", "file_path": "kaylee/src/instructions/compare.rs", "rank": 85, "score": 23971.99821691209 }, { "content": " LessThan::OPCODE, 30, 1, 2, // Pass\n\n LessThan::OPCODE, 31, 3, 4, // Fail\n\n ]);\n\n\n\n let mut vm = Kaylee::new();\n\n vm.set_register(1, 100).unwrap();\n\n vm.set_register(2, 200).unwrap();\n\n vm.set_register(3, 400).unwrap();\n\n vm.set_register(4, 300).unwrap();\n\n\n\n vm.run(program);\n\n\n\n assert_eq!(1, vm.register(30).unwrap());\n\n assert_eq!(0, vm.register(31).unwrap());\n\n }\n\n\n\n #[test]\n\n fn test_greater_than_or_equal() {\n\n let program = Program::from(vec![\n\n GreaterThanOrEqual::OPCODE, 28, 1, 2, // Pass\n", "file_path": "kaylee/src/instructions/compare.rs", "rank": 86, "score": 23971.857347372857 }, { "content": "\n\n #[test]\n\n fn test_less_than_or_equal() {\n\n let program = Program::from(vec![\n\n LessThanOrEqual::OPCODE, 28, 1, 2, // Pass\n\n LessThanOrEqual::OPCODE, 29, 3, 4, // Pass\n\n LessThanOrEqual::OPCODE, 30, 5, 6, // Fail\n\n ]);\n\n\n\n let mut vm = Kaylee::new();\n\n vm.set_register(1, 100).unwrap();\n\n vm.set_register(2, 200).unwrap();\n\n\n\n vm.set_register(3, 200).unwrap();\n\n vm.set_register(4, 200).unwrap();\n\n\n\n vm.set_register(5, 400).unwrap();\n\n vm.set_register(6, 300).unwrap();\n\n\n\n vm.run(program);\n\n\n\n assert_eq!(1, vm.register(28).unwrap());\n\n assert_eq!(1, vm.register(29).unwrap());\n\n assert_eq!(0, vm.register(30).unwrap());\n\n }\n\n}\n", "file_path": "kaylee/src/instructions/compare.rs", "rank": 87, "score": 23971.06173552637 }, { "content": " GreaterThanOrEqual::OPCODE, 29, 3, 4, // Pass\n\n GreaterThanOrEqual::OPCODE, 30, 5, 6, // Fail\n\n ]);\n\n\n\n let mut vm = Kaylee::new();\n\n vm.set_register(1, 200).unwrap();\n\n vm.set_register(2, 100).unwrap();\n\n\n\n vm.set_register(3, 200).unwrap();\n\n vm.set_register(4, 200).unwrap();\n\n\n\n vm.set_register(5, 200).unwrap();\n\n vm.set_register(6, 300).unwrap();\n\n\n\n vm.run(program);\n\n\n\n assert_eq!(1, vm.register(28).unwrap());\n\n assert_eq!(1, vm.register(29).unwrap());\n\n assert_eq!(0, vm.register(30).unwrap());\n\n }\n", "file_path": "kaylee/src/instructions/compare.rs", "rank": 89, "score": 23967.547559582843 }, { "content": "//! Instructions for Misc or overflow operations\n\n//! Opcodes reserved: 220 - 255", "file_path": "kaylee/src/instructions/misc.rs", "rank": 91, "score": 23963.758984053115 }, { "content": "//! Instructions for performing logical operations (shift, or, and, etc)\n\n//! Opcodes reserved: 120 - 129", "file_path": "kaylee/src/instructions/logical.rs", "rank": 92, "score": 23963.58962617454 }, { "content": "//! Instructions for the \"standard library\"\n\n//! @todo: This and system may be merged, there is a lot of overlap in my mind\n\n//! Opcodes reserved: 180 - 219", "file_path": "kaylee/src/instructions/library.rs", "rank": 93, "score": 23963.278489282642 }, { "content": "//! Instructions for interacting the the operating environment\n\n//! These will be things like file manipulation, environment variables, networking, etc\n\n//! Opcodes reserved: 130 - 179", "file_path": "kaylee/src/instructions/system.rs", "rank": 94, "score": 23963.06643616429 }, { "content": "use crate::instructions::{InstructionRegistry, OperandType};\n\nuse crate::program::Program;\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum AssemblerError {\n\n Other(String),\n\n}\n\n\n\npub struct Assembler {\n\n // There will be state needed eventually\n\n}\n\n\n\nimpl Assembler {\n\n pub fn new() -> Self {\n\n Assembler {}\n\n }\n\n\n\n /// @todo: This is awful. Absolutely no error checking\n\n pub fn assemble_parsed_asm(&self, parsed: Vec<Vec<&str>>) -> Result<Program, AssemblerError> {\n\n let mut bytes: Vec<u8> = Vec::new();\n", "file_path": "kaylee/src/asm/assembler.rs", "rank": 95, "score": 19.597033588841768 }, { "content": "// #[macro_use]\n\nextern crate nom;\n\n\n\npub mod shared;\n\npub mod vm;\n\npub mod repl;\n\npub mod instructions;\n\npub mod asm;\n\npub mod program;\n", "file_path": "kaylee/src/lib.rs", "rank": 96, "score": 17.73277170243752 }, { "content": "use std;\n\nuse std::io;\n\nuse std::io::Write;\n\n\n\nuse nom::IResult;\n\n\n\nuse crate::asm::assembler::Assembler;\n\nuse crate::asm::parser::{line, parse_asm};\n\nuse crate::instructions::decode_next_instruction;\n\nuse crate::program::Program;\n\nuse crate::shared::parse_hex;\n\nuse crate::vm::Kaylee;\n\n\n\n/// Core structure for the REPL for the Assembler\n\npub struct Repl {\n\n command_buffer: Vec<String>,\n\n vm: Kaylee,\n\n}\n\n\n\nimpl Repl {\n", "file_path": "kaylee/src/repl.rs", "rank": 97, "score": 16.509882909813683 }, { "content": " pub const OPCODE: u8 = #opcode;\n\n }\n\n\n\n impl Instruction for #struct_name {\n\n fn new(operand_values: OperandValues) -> Self {\n\n #struct_name { operand_values }\n\n }\n\n\n\n fn signature() -> InstructionSignature where Self: Sized {\n\n InstructionSignature {\n\n identifier: String::from(#identifier),\n\n operands: [#op1, #op2, #op3]\n\n }\n\n }\n\n\n\n fn documentation() -> InstructionDocumentation where Self: Sized {\n\n InstructionDocumentation {\n\n name: String::from(#name),\n\n help: String::from(#help),\n\n }\n", "file_path": "kaylee_derive/src/lib.rs", "rank": 98, "score": 16.473833366814908 }, { "content": "### Standard Library and `vm` object\n\nThere is no standard library that is truly accessible. There is a rust-written set of methods attached to types. That's it.\n\n\n\nTo interact with the outside world, the always-accessible `vm` object can be used to write to the console, get environment data, and various other things.\n\n\n\nNone of these are written in Kaylee. They are all directly written in the Virtual Machine in Rust\n\n\n\n## Error Handling\n\nErrors are always explicit and basically just panic or don't.\n\n\n\nAny value (literally every type) can either be its static type OR a special `error` value.\n\nThis and panic are the only two options. \n\nWhich means all `vm` functions that execute against the machine return something, even if just `null|error`.\n\nIf that value is captured, then the machine doesn't panic until that value is used.\n\nIf that value is NOT captured, it panics in place.\n\nAny `error` object attempting to be used results in a panic. But you can handle errors in a few ways.\n\n\n\n### 1. Let it panic (Simplest)\n\n```\n\n// We don't capture the output, so this will panic in place.\n\nvm.read_file(\"/does/not/exist\");\n\n\n\n// Here we capture the output, so it panics when we try to use the value\n\nlet contents: string = vm.read_file(\"/does/not/exist\"); // Does not panic here\n\n\n\n// Does some other stuff\n\n\n\nvm.print(contents); // Panics here\n\n```\n\n\n\n### 2. Explicitly check for an error (Recommended)\n\n```\n\nlet contents: string = vm.read_file(\"/does/not/exist\");\n\nif (contents is error) {\n\n // Do something\n\n error.message(); error.code(); etc\n\n}\n\n```\n\n\n", "file_path": "docs/index.md", "rank": 99, "score": 15.72924737760251 } ]
Rust
mgl_resource_derive/src/lib.rs
fcard/MGL
9d41d30ce58451b80aa6e0d255b0c398c679b86b
#![feature(box_patterns)] extern crate proc_macro; use proc_macro2::{TokenStream, Span}; use syn::*; use quote::*; macro_rules! ident { ($name: ident) => { Ident::new(stringify!($name), Span::call_site()) } } #[proc_macro_derive(Resource, attributes(array_field, sub_resource, ignore_field))] pub fn derive_resource(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let input = parse_macro_input!(input as DeriveInput); let name = input.ident; let fields = named_fields(&input.data); let parse = impl_parse_key_values(&fields); proc_macro::TokenStream::from(quote! { impl<T: ResourceAst> Resource<T> for #name { #parse } }) } fn named_fields(data: &Data) -> FieldsNamed { match data { &Data::Struct(ref data) => { match data.fields { Fields::Named(ref fields) => { fields.clone() } _ => panic!("Only structs with named fields are allowed!") } } _ => panic!("Only structs are allowed!") } } fn impl_parse_key_values(fields: &FieldsNamed) -> TokenStream { let mut matches = Vec::new(); let source_ast = quote! { source_ast }; let key = quote! { key }; let value = quote! { value }; let array_index = quote! { array_index }; let sub_field_key = quote! { sub_field_key }; let module = quote! { crate::resources::resource_trait }; for field in &fields.named { let field_name = &field.ident.clone(); let field_str = field_str(&field); let field_attrs = field_attributes(&field); let field_sub = field_attrs.sub; let field_array = field_attrs.array; let field_set; let array_pre_code; let field_indexing; let no_field_assert; if field_array { array_pre_code = quote! { let #array_index = #module::KeyInspector::get_array_index(#field_str, #key)?; if #array_index >= self.#field_name.len() { self.#field_name.resize_with(#array_index + 1, Default::default); } }; field_indexing = quote! { [#array_index] }; } else { array_pre_code = quote! {}; field_indexing = quote! {}; } let full_field = quote! { #field_name#field_indexing }; if field_sub { field_set = quote! { let #sub_field_key = #module::KeyInspector::get_sub_field_key(#field_str, #key)?; self.#full_field.parse_key_value(#source_ast, &#sub_field_key, #value)?; } } else { field_set = quote! { self.#full_field = #module::parse_field_default(#value)?; } } if field_array || field_sub { no_field_assert = quote! {}; } else { no_field_assert = quote! { #module::KeyInspector::assert_field_has_no_index(#field_str, #key)?; }; } if !field_attrs.ignore { matches.push(quote! { #field_str => { #array_pre_code #no_field_assert #field_set }}); } } quote! { fn parse_key_value(&mut self, #source_ast: &T, #key: &Key, #value: &IExpr) -> #module::Result<()> { match #key.name_of().as_ref() { #(#matches),*, field => { return #module::MglError::invalid_field(field, #module::InvalidFieldKind::NotFound) } } Ok(()) } } } fn field_str(field: &Field) -> String { field.ident.as_ref().map(Ident::to_string).unwrap_or(String::new()) } struct FieldAttributes { sub: bool, array: bool, ignore: bool, } fn field_attributes(field: &Field) -> FieldAttributes { let mut attributes = FieldAttributes { sub: false, array: false, ignore: false }; for attr in field.attrs.clone() { if attr.path.is_ident(&ident!(sub_resource)) { attributes.sub = true; } else if attr.path.is_ident(&ident!(array_field)) { attributes.array = true; } else if attr.path.is_ident(&ident!(ignore_field)) { attributes.ignore = true; } } attributes }
#![feature(box_patterns)] extern crate proc_macro; use proc_macro2::{TokenStream, Span}; use syn::*; use quote::*; macro_rules! ident { ($name: ident) => { Ident::new(stringify!($name), Span::call_site()) } } #[proc_macro_derive(Resource, attributes(array_field, sub_resource, ignore_field))] pub fn derive_resource(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let input = parse_macro_input!(input as DeriveInput); let name = input.ident; let fields = named_fields(&input.data); let parse = impl_parse_key_values(&fields); proc_macro::TokenStream::from(quote! { impl<T: ResourceAst> Resource<T> for #name { #parse } }) } fn named_fields(data: &Data) -> FieldsNamed { match data { &Data::Struct(ref data) => { match data.fields { Fields::Named(ref fields) => { fields.clone() } _ => panic!("Only structs with named fields are allowed!") } } _ => panic!("Only structs are allowed!") } } fn impl_parse_key_values(fields: &FieldsNamed) -> TokenStream { let mut matches = Vec::new(); let source_ast = quote! { source_ast }; let key = quote! { key }; let value = quote! { value }; let array_index = quote! { array_index }; let sub_field_key = quote! { sub_field_key }; let module = quote! { crate::resources::resource_trait }; for field in &fields.named { let field_name = &field.ident.clone(); let field_str = field_str(&field); let field_attrs = field_attributes(&field); let field_sub = field_attrs.sub; let field_array = field_attrs.array; let field_set; let array_pre_code; let field_indexing; let no_field_assert; if field_array { array_pre_code = quote! { let #array_index = #module::KeyInspector::get_array_index(#field_str, #key)?; if #array_index >= self.#field_name.len() { self.#field_name.resize_with(#array_index + 1, Default::default); } }; field_indexing = quote! { [#array_index] }; } else { array_pre_code = quote! {}; field_indexing = quote! {}; } let full_field = quote! { #field_name#field_indexing }; if field_sub { field_set = quote! { let #sub_field_key = #module::KeyInspector::get_sub_field_key(#field_str, #key)?; self.#full_field.parse_key_value(#source_ast, &#sub_field_key, #value)?; } } else { field_set = quote! { self.#full_field = #module::parse_field_default(#value)?; } } if field_array || field_sub { no_field_assert = quote! {}; } else { no_field_assert = quote! { #module::KeyInspector::assert_field_has_no_index(#field_str, #key)?; }; } if !field_attrs.ignore { matches.push(quote! { #field_str => { #array_pre_code #no_field_assert #field_set }}); } } quote! { fn parse_key_value(&mut self, #source_ast: &T, #key: &Key, #value: &IExpr) -> #module::Result<()> { match #key.name_of().as_ref() { #(#matches),*, field => { return #module::MglError::invalid_field(field, #module::InvalidFieldKind::NotFound) } } Ok(()) } } } fn field_str(field: &Field) -> String { field.ident.as_ref().map(Ident::to_string).unwrap_or(String::new()) } struct FieldAttributes { sub: bool, array: bool, ignore: bool, } fn field_attributes(field: &Field) -> FieldAttributes { let mut attributes = FieldAttributes { sub: false, array: false, ignore: false }; for attr in field.attrs.clone() { if attr.path.is_ident(&ident!(sub_resource)) { attributes.sub = true; }
else if attr.path.is_ident(&ident!(array_field)) { attributes.array = true; } else if attr.path.is_ident(&ident!(ignore_field)) { attributes.ignore = true; } } attributes }
function_block-function_prefix_line
[ { "content": "pub fn parse_key(mut tks: InnerTokens) -> Key {\n\n let name = tks.next().unwrap().as_str();\n\n let mut key = Key::name(name);\n\n\n\n while let Some(rule) = tks.peek().map(|p| p.as_rule()) {\n\n match rule {\n\n Rule::name => {\n\n let right = parse_key(tks);\n\n key = Key::dot(key, right);\n\n break;\n\n }\n\n\n\n Rule::key_indexing => {\n\n let index = parse_expression(tks.next().unwrap());\n\n key = Key::indexing(name, index);\n\n }\n\n\n\n _ => unreachable!()\n\n }\n\n }\n\n return key;\n\n}\n\n\n\n\n", "file_path": "src/parser/declarations.rs", "rank": 1, "score": 200269.35884344397 }, { "content": "pub fn module_add(module: &Module, addition: String) -> Module {\n\n let mut new_module = module.clone();\n\n new_module.push(addition);\n\n new_module\n\n}\n\n\n", "file_path": "src/resources/project.rs", "rank": 4, "score": 183009.87061059114 }, { "content": "pub fn full_name_for(kind_module: &str, name: &str, module: &Module) -> ResourceName {\n\n let mut names: Vec<_> = module.iter().map(String::as_ref).collect();\n\n names.push(name);\n\n names.insert(0, kind_module);\n\n ResourceName::new(&names)\n\n}\n\n\n", "file_path": "src/resources/project.rs", "rank": 6, "score": 178974.64043883662 }, { "content": "pub fn path_file_name(p: &Path) -> String {\n\n String::from(\n\n p.file_name()\n\n .unwrap_or(OsStr::new(\"\"))\n\n .to_str()\n\n .unwrap_or(\"\")\n\n )\n\n}\n", "file_path": "src/utility/files.rs", "rank": 7, "score": 178768.71448687528 }, { "content": "pub fn parse_key_value(tk: Tokens) -> KeyValue {\n\n let mut parts = tk.into_inner();\n\n let key = parse_key(parts.next().unwrap().into_inner());\n\n let value = parse_expression(parts.next().unwrap());\n\n KeyValue::new(key, value)\n\n}\n\n\n", "file_path": "src/parser/declarations.rs", "rank": 8, "score": 168868.1389743816 }, { "content": "fn build_resource_name(name: &ResourceName) -> String {\n\n match &name {\n\n &ResourceName::Name(name) => name.clone(),\n\n &ResourceName::InModule(m, n) => {\n\n format!(\"{}__{}\", m, build_resource_name(&*n))\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "src/compiler/script.rs", "rank": 9, "score": 166799.23718534395 }, { "content": "pub fn path_string(p: &Path) -> String {\n\n String::from(p.to_str().unwrap_or(FILE_NAME_DEFAULT))\n\n}\n\n\n", "file_path": "src/utility/files.rs", "rank": 11, "score": 160792.60036156702 }, { "content": "fn parse_object_name(value: Key) -> Result<ResourceName> {\n\n match value.index_of().unwrap().as_ref() {\n\n Expression::Name(name) => {\n\n Ok(ResourceName::new(&[\"object\", &name]))\n\n }\n\n\n\n Expression::Resource(name) => {\n\n if name.top_module_is(\"object\") {\n\n Ok(name.clone())\n\n\n\n } else {\n\n Ok(ResourceName::InModule(String::from(\"object\"), box name.clone()))\n\n }\n\n }\n\n\n\n _ => Err(InvalidIndexType(String::from(\"resource name\")))\n\n }\n\n}\n\n\n", "file_path": "src/event/parse.rs", "rank": 12, "score": 160285.12246607992 }, { "content": "pub fn full_name(resource: &ResourceDeclaration, m: &Module) -> ResourceName {\n\n full_name_for(&resource.kind.module(), &resource.name, m)\n\n}\n", "file_path": "src/resources/project.rs", "rank": 13, "score": 151374.46032023738 }, { "content": "pub fn build_script(s: Script) -> String {\n\n let source = &s.source;\n\n\n\n let mut builder = StatementBuilder::new(true, 0);\n\n builder.argument_vars(&source.args);\n\n builder.build_statement(source.body.as_ref());\n\n\n\n format!(\"{}\", builder.result)\n\n}\n\n\n", "file_path": "src/compiler/script.rs", "rank": 14, "score": 151106.2958577913 }, { "content": "fn interpret_pretty(matches: &ArgMatches) -> bool {\n\n match matches.value_of(\"pretty\").unwrap_or(\"yes\") {\n\n \"yes\" | \"true\" => true,\n\n \"no\" | \"false\" => false,\n\n _ => unreachable!()\n\n }\n\n}\n\n\n\n\n", "file_path": "src/command_line.rs", "rank": 15, "score": 139864.59659492158 }, { "content": "fn parse_key_code(value: Key) -> Result<KeyCode> {\n\n match value.index_of().unwrap().as_ref() {\n\n Expression::Str(name) => {\n\n if name.len() == 1 && name.chars().all(char::is_alphabetic) {\n\n Ok(KeyCode::Character(name.chars().next().unwrap()))\n\n\n\n } else {\n\n match name.as_ref() {\n\n \"no_key\" => Ok(KeyCode::NoKey),\n\n \"any_key\" => Ok(KeyCode::AnyKey),\n\n \"left\" => Ok(KeyCode::Left),\n\n \"right\" => Ok(KeyCode::Right),\n\n \"down\" => Ok(KeyCode::Down),\n\n \"up\" => Ok(KeyCode::Up),\n\n \"enter\" => Ok(KeyCode::Enter),\n\n \"escape\" => Ok(KeyCode::Escape),\n\n \"space\" => Ok(KeyCode::Space),\n\n \"shift\" => Ok(KeyCode::Shift),\n\n \"control\" => Ok(KeyCode::Control),\n\n \"alt\" => Ok(KeyCode::Alt),\n", "file_path": "src/event/parse.rs", "rank": 16, "score": 136676.15129131346 }, { "content": "struct ResourceKeyValues(Vec<KeyValue>);\n\n\n\nimpl ResourceAst for ResourceKeyValues {\n\n fn key_values(&self) -> &[KeyValue] {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl ResourceDefault<ResourceKeyValues> for ResourceTest {\n\n fn default(_: &ResourceKeyValues) -> Result<Self> {\n\n Ok(Default::default())\n\n }\n\n}\n\n\n", "file_path": "src/tests/resources/resource_trait.rs", "rank": 17, "score": 133458.30533874244 }, { "content": "fn name(s: &str) -> IExpr {\n\n IExpr::new(Expression::name(s))\n\n}\n\n\n\n\n", "file_path": "src/tests/parser/expressions.rs", "rank": 19, "score": 128041.26065809595 }, { "content": "fn parse_other_event(value: Key) -> Result<OtherEvent> {\n\n match value.index_of().unwrap().as_ref() {\n\n Expression::Str(name) => {\n\n match name.as_ref() {\n\n \"outside\" => Ok(OtherEvent::RoomOutside),\n\n \"boundary\" => Ok(OtherEvent::RoomBoundary),\n\n \"game_start\" => Ok(OtherEvent::GameStart),\n\n \"game_end\" => Ok(OtherEvent::GameEnd),\n\n \"room_start\" => Ok(OtherEvent::RoomStart),\n\n \"room_end\" => Ok(OtherEvent::RoomEnd),\n\n \"no_more_lives\" => Ok(OtherEvent::NoMoreLives),\n\n \"no_more_health\" => Ok(OtherEvent::NoMoreHealth),\n\n \"animation_end\" => Ok(OtherEvent::AnimationEnd),\n\n \"end_of_path\" => Ok(OtherEvent::EndOfPath),\n\n \"close_button\" => Ok(OtherEvent::CloseButton),\n\n \"user0\" => Ok(OtherEvent::User0),\n\n \"user1\" => Ok(OtherEvent::User1),\n\n \"user2\" => Ok(OtherEvent::User2),\n\n \"user3\" => Ok(OtherEvent::User3),\n\n \"user4\" => Ok(OtherEvent::User4),\n", "file_path": "src/event/parse.rs", "rank": 20, "score": 126678.06518369529 }, { "content": "pub fn parse_field_default<T: FromExpression>(expr: &IExpr) -> Result<T> {\n\n T::try_from(expr.clone())\n\n}\n\n\n\n\n\n// Helper traits and types\n\n\n", "file_path": "src/resources/resource_trait.rs", "rank": 21, "score": 126502.55032988732 }, { "content": "fn parse_alarm_kind(value: Key) -> Result<Alarm> {\n\n match value.index_of().unwrap().as_ref() {\n\n Expression::Num(n) => {\n\n match n.as_ref() {\n\n \"0\" => Ok(Alarm::Alarm0),\n\n \"1\" => Ok(Alarm::Alarm1),\n\n \"2\" => Ok(Alarm::Alarm2),\n\n \"3\" => Ok(Alarm::Alarm3),\n\n \"4\" => Ok(Alarm::Alarm4),\n\n \"5\" => Ok(Alarm::Alarm5),\n\n \"6\" => Ok(Alarm::Alarm6),\n\n \"7\" => Ok(Alarm::Alarm7),\n\n \"8\" => Ok(Alarm::Alarm8),\n\n \"9\" => Ok(Alarm::Alarm9),\n\n \"10\" => Ok(Alarm::Alarm10),\n\n \"11\" => Ok(Alarm::Alarm11),\n\n _ => Err(UnknownAlarmKind)\n\n }\n\n }\n\n _ => Err(InvalidIndexType(String::from(\"number\")))\n\n }\n\n}\n\n\n", "file_path": "src/event/parse.rs", "rank": 22, "score": 124063.14414356192 }, { "content": "fn parse_event_value(expr: &IExpr) -> Result<ResourceName> {\n\n match expr.as_ref() {\n\n &Expression::Name(ref name) => {\n\n Ok(ResourceName::new(&[\"script\", &*name]))\n\n }\n\n\n\n &Expression::Resource(ref resource_name) => {\n\n if resource_name.top_module_is(\"script\") {\n\n Ok(resource_name.clone())\n\n } else {\n\n Ok(ResourceName::InModule(String::from(\"script\"), box resource_name.clone()))\n\n }\n\n }\n\n _ => {\n\n MglError::convert_expression(expr.clone(), \"ResourceName\")\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/resources/object.rs", "rank": 23, "score": 121647.28554762377 }, { "content": "fn parse_step_kind(value: Key) -> Result<StepKind> {\n\n match value.index_of().unwrap().as_ref() {\n\n Expression::Str(name) => {\n\n match name.as_ref() {\n\n \"normal\" => Ok(StepKind::Normal),\n\n \"begin\" => Ok(StepKind::Begin),\n\n \"end\" => Ok(StepKind::End),\n\n _ => Err(UnknownStepKind)\n\n }\n\n }\n\n _ => Err(InvalidIndexType(String::from(\"string\")))\n\n }\n\n}\n\n\n", "file_path": "src/event/parse.rs", "rank": 24, "score": 121597.98091845697 }, { "content": "fn parse_draw_kind(value: Key) -> Result<DrawKind> {\n\n match value.index_of().unwrap().as_ref() {\n\n Expression::Str(name) => {\n\n match name.as_ref() {\n\n \"begin\" => Ok(DrawKind::Begin),\n\n \"end\" => Ok(DrawKind::End),\n\n \"pre\" => Ok(DrawKind::Pre),\n\n \"post\" => Ok(DrawKind::Post),\n\n \"gui\" => Ok(DrawKind::Gui),\n\n \"gui_begin\" => Ok(DrawKind::GuiBegin),\n\n \"gui_end\" => Ok(DrawKind::GuiEnd),\n\n _ => Err(UnknownDrawKind)\n\n }\n\n }\n\n _ => Err(InvalidIndexType(String::from(\"string\")))\n\n }\n\n}\n\n\n", "file_path": "src/event/parse.rs", "rank": 25, "score": 121597.98091845697 }, { "content": "fn parse_mouse_action(value: Key) -> Result<MouseAction> {\n\n match value.index_of().unwrap().as_ref() {\n\n Expression::Str(name) => {\n\n match name.as_ref() {\n\n \"no_button\" => Ok(MouseAction::NoButton),\n\n \"left_button\" => Ok(MouseAction::LeftButton),\n\n \"right_button\" => Ok(MouseAction::RightButton),\n\n \"middle_button\" => Ok(MouseAction::MiddleButton),\n\n \"left_press\" => Ok(MouseAction::LeftPress),\n\n \"right_press\" => Ok(MouseAction::RightPress),\n\n \"middle_press\" => Ok(MouseAction::MiddlePress),\n\n \"left_release\" => Ok(MouseAction::LeftRelease),\n\n \"right_release\" => Ok(MouseAction::RightRelease),\n\n \"middle_release\" => Ok(MouseAction::MiddleRelease),\n\n \"mouse_enter\" => Ok(MouseAction::MouseEnter),\n\n \"mouse_leave\" => Ok(MouseAction::MouseLeave),\n\n \"mouse_wheel_up\" => Ok(MouseAction::MouseWheelUp),\n\n \"mouse_wheel_down\" => Ok(MouseAction::MouseWheelDown),\n\n \"global_left_button\" => Ok(MouseAction::GlobalLeftButton),\n\n \"global_right_button\" => Ok(MouseAction::GlobalRightButton),\n", "file_path": "src/event/parse.rs", "rank": 26, "score": 121597.98091845697 }, { "content": "pub fn parse_expression(top_tokens: Tokens) -> IExpr {\n\n let mut expression = None;\n\n\n\n for tk in top_tokens.clone().into_inner() {\n\n let tk_clone = tk.clone();\n\n\n\n expression = Some(IExpr::new(\n\n match tk.as_rule() {\n\n Rule::name => Expression::name(tk.as_str()),\n\n Rule::string => Expression::string(tk.as_str()),\n\n Rule::number => Expression::num(tk.as_str()),\n\n Rule::boolean => Expression::boolean(tk.as_str().parse().unwrap()),\n\n\n\n Rule::variable_name => parse_expression(tk).content(),\n\n Rule::parentheses => Expression::parentheses(parse_expression(tk)),\n\n\n\n Rule::resource => {\n\n let names: Vec<_> = tk.into_inner().map(|p| p.as_str()).collect();\n\n Expression::resource(&names)\n\n }\n", "file_path": "src/parser/expressions.rs", "rank": 27, "score": 116379.62003366035 }, { "content": "#[test]\n\nfn test_expression_bool_true() {\n\n assert_parse_expr(\"true\", Expression::boolean(true))\n\n}\n\n\n", "file_path": "src/tests/parser/expressions.rs", "rank": 28, "score": 112227.94785905059 }, { "content": "#[test]\n\nfn test_declaration_key_value() {\n\n assert_parse_declaration(\"x: 1\", KeyValue::new(key(\"x\"), expr(\"1\")));\n\n assert_parse_declaration(\"x.y: true\", KeyValue::new(key(\"x.y\"), expr(\"true\")));\n\n assert_parse_declaration(\"x[0]: \\\"k\\\"\", KeyValue::new(key(\"x[0]\"), expr(\"\\\"k\\\"\")));\n\n}\n\n\n\n\n", "file_path": "src/tests/parser/declarations.rs", "rank": 29, "score": 111882.25489803778 }, { "content": "#[test]\n\nfn test_declaration_key_name() {\n\n assert_parse_declaration(\"x\", Key::name(\"x\"))\n\n}\n\n\n", "file_path": "src/tests/parser/declarations.rs", "rank": 30, "score": 111813.04884138211 }, { "content": "pub fn fix_unary_precedence(ast: IExpr, op: UnaryOp, e: IExpr) -> IExpr {\n\n use Expression::*;\n\n\n\n match e.content_clone() {\n\n BinaryOp(bin_op, left, right) => {\n\n if bin_op.priority() <= op.priority() {\n\n ast.with_content(Expression::unary_op(op, e))\n\n\n\n } else {\n\n let new_left = ast.with_content(Expression::unary_op(op, left));\n\n e.with_content(Expression::binary_op(bin_op, new_left, right))\n\n }\n\n }\n\n\n\n TernaryOp(condition, left, right) => {\n\n let new_cond = ast.with_content(Expression::unary_op(op, condition));\n\n e.with_content(Expression::ternary_op(new_cond, left, right))\n\n }\n\n\n\n _ => {\n\n ast.with_content(Expression::unary_op(op, e))\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "src/ast/precedence.rs", "rank": 31, "score": 111266.85930799597 }, { "content": "#[test]\n\nfn test_utility_files_path_string() {\n\n unsafe {\n\n let p1 = PathBuf::from(\"\");\n\n let p2 = PathBuf::from(\"a\");\n\n let p3 = PathBuf::from(from_utf8_unchecked(&[128u8]));\n\n\n\n assert_eq!(path_string(&p1), String::from(\"\"));\n\n assert_eq!(path_string(&p2), String::from(\"a\"));\n\n assert_eq!(path_string(&p3), String::from(FILE_NAME_DEFAULT));\n\n }\n\n}\n\n\n", "file_path": "src/tests/crate_utility/files.rs", "rank": 32, "score": 106617.97691747446 }, { "content": "fn interpret_input_argument(matches: &ArgMatches) -> Vec<PathBuf> {\n\n let mut input = Vec::new();\n\n\n\n if let Some(files) = matches.value_of(\"input\") {\n\n for file in files.split(\",\") {\n\n input.push(Path::new(file).to_path_buf());\n\n }\n\n }\n\n\n\n return input;\n\n}\n\n\n\n\n", "file_path": "src/command_line.rs", "rank": 33, "score": 106395.82884625823 }, { "content": "pub fn fix_binary_precedence(ast: IExpr, op: BinaryOp, left: IExpr, right: IExpr) -> IExpr {\n\n use Expression::*;\n\n\n\n match right.content_clone() {\n\n BinaryOp(right_op, right_left, right_right) => {\n\n if right_op.priority() < op.priority() {\n\n ast.with_content(Expression::binary_op(op, left, right))\n\n\n\n } else {\n\n let new_left = ast.with_content(Expression::binary_op(op, left, right_left));\n\n right.with_content(Expression::binary_op(right_op, new_left, right_right))\n\n }\n\n }\n\n\n\n TernaryOp(condition, right_left, right_right) => {\n\n let new_cond = ast.with_content(Expression::binary_op(op, left, condition));\n\n right.with_content(Expression::ternary_op(new_cond, right_left, right_right))\n\n }\n\n\n\n _ => {\n\n ast.with_content(Expression::binary_op(op, left, right))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ast/precedence.rs", "rank": 34, "score": 106353.05854738856 }, { "content": "pub fn interpret_arguments() -> Command {\n\n let matches = generate_app().get_matches();\n\n\n\n Command {\n\n project_file: interpret_project_argument(&matches),\n\n files: interpret_input_argument(&matches),\n\n action: interpret_subcommand(&matches),\n\n }\n\n}\n\n\n\n\n", "file_path": "src/command_line.rs", "rank": 35, "score": 104530.70137083292 }, { "content": "fn build_indexing<T,U>(v: &T, op: Accessor, keys: &Vec<U>) -> String\n\n where T: AsRef<Expression>,\n\n U: AsRef<Expression> {\n\n\n\n let mut result = build_expression(v);\n\n\n\n result.push('[');\n\n result.push_str(op.as_str());\n\n result.push_str(&join_arguments(keys));\n\n result.push(']');\n\n\n\n result\n\n}\n\n\n\n\n", "file_path": "src/compiler/script.rs", "rank": 36, "score": 103563.39199107082 }, { "content": "fn script(source: &str) -> String {\n\n build_script(Script::global(function(source)))\n\n}\n\n\n", "file_path": "src/tests/compiler/script.rs", "rank": 37, "score": 100748.62923455605 }, { "content": "fn func(body: &str) -> String {\n\n format!(\"function f() {{{}\\n}}\", body)\n\n}\n\n\n", "file_path": "src/tests/compiler/script.rs", "rank": 38, "score": 100748.62923455605 }, { "content": "fn is_mgl_file(file: &PathBuf) -> bool {\n\n file.is_file() && file.extension().unwrap_or(OsStr::new(\"\")) == \"mgl\"\n\n}\n\n\n\nimpl AstFileTree {\n\n pub fn new_root() -> Self {\n\n AstFileTree::Root(\n\n box AstFileTree::Node(String::new(), Vec::new())\n\n )\n\n }\n\n\n\n pub fn from_project(project_file: Option<PathBuf>) -> TopResult<AstFileTree> {\n\n if let Some(project) = project_file {\n\n let mut source_directory = project.parent().unwrap().to_path_buf();\n\n source_directory.push(\"src\");\n\n Ok(AstFileTree::Root(box AstFileTree::from_directory(&source_directory)?))\n\n\n\n } else {\n\n Ok(AstFileTree::new_root())\n\n }\n", "file_path": "src/compiler/file_reader.rs", "rank": 39, "score": 97672.50715738551 }, { "content": "pub fn parse_while(tk: Tokens) -> Statement {\n\n let mut parts = tk.into_inner();\n\n let cond = parse_expression(parts.next().unwrap());\n\n let body = parse_statement(parts.next().unwrap());\n\n Statement::while_op(cond, body)\n\n}\n\n\n\n\n", "file_path": "src/parser/statements.rs", "rank": 40, "score": 96565.18852824293 }, { "content": "pub fn parse_if(tk: Tokens) -> Statement {\n\n let mut parts = tk.into_inner();\n\n let cond = parse_expression(parts.next().unwrap());\n\n let body = parse_statement(parts.next().unwrap());\n\n let or_else = parts.next().map(parse_statement);\n\n Statement::if_op(cond, body, or_else)\n\n}\n\n\n\n\n", "file_path": "src/parser/statements.rs", "rank": 41, "score": 96565.18852824293 }, { "content": "pub fn parse_for(tk: Tokens) -> Statement {\n\n let mut parts = tk.into_inner();\n\n let name = parts.next().unwrap().as_str();\n\n let range = parse_for_range(parts.next().unwrap());\n\n let body = parse_statement(parts.next().unwrap());\n\n Statement::for_op(name, range, body)\n\n}\n\n\n\n\n", "file_path": "src/parser/statements.rs", "rank": 42, "score": 96565.18852824293 }, { "content": "pub fn parse_with(tk: Tokens) -> Statement {\n\n let mut parts = tk.into_inner();\n\n let expr = parse_expression(parts.next().unwrap());\n\n let body = parse_statement(parts.next().unwrap());\n\n Statement::with(expr, body)\n\n}\n\n\n\n\n", "file_path": "src/parser/statements.rs", "rank": 43, "score": 96565.18852824293 }, { "content": "fn titlecase_rules(rules: &[Rule]) -> Vec<String> {\n\n rules.iter().map(|r| titlecase(&format!(\"{:?}\", r).replace(\"_\", \" \"))).collect()\n\n}\n\n\n", "file_path": "src/error/messages.rs", "rank": 44, "score": 96066.41216488642 }, { "content": "#[derive(Debug, PartialEq, Default, Resource)]\n\nstruct SubResourceTest {\n\n sub_field: u32,\n\n}\n\n\n", "file_path": "src/tests/resources/resource_trait.rs", "rank": 45, "score": 95658.45772232092 }, { "content": "pub fn parse_body(tk: Tokens) -> Statement {\n\n let mut body_statements = Vec::new();\n\n\n\n for statement_tk in tk.into_inner() {\n\n body_statements.push(parse_statement(statement_tk));\n\n }\n\n\n\n Statement::body(&body_statements)\n\n}\n\n\n\n\n", "file_path": "src/parser/statements.rs", "rank": 46, "score": 94754.05095791681 }, { "content": "pub fn parse_statement(tk: Tokens) -> IStat {\n\n let tk_clone = tk.clone();\n\n\n\n IStat::new(\n\n match tk.as_rule() {\n\n Rule::return_statement => {\n\n Statement::return_op(parse_expression(tk))\n\n }\n\n\n\n Rule::statement_call => {\n\n Statement::call(parse_expression(tk))\n\n }\n\n\n\n Rule::body => {\n\n parse_body(tk)\n\n }\n\n\n\n Rule::with_statement => {\n\n parse_with(tk)\n\n }\n", "file_path": "src/parser/statements.rs", "rank": 47, "score": 94754.05095791681 }, { "content": "pub fn parse_for_range(tk: Tokens) -> ForRange {\n\n let mut parts = tk.into_inner();\n\n let start = parse_expression(parts.next().unwrap());\n\n\n\n if let Some(end) = parts.next().map(parse_expression) {\n\n let by = parts.next().map(parse_expression);\n\n ForRange::integer(start, end, by)\n\n\n\n } else {\n\n ForRange::array(start)\n\n }\n\n}\n\n\n\n\n", "file_path": "src/parser/statements.rs", "rank": 48, "score": 94754.05095791681 }, { "content": "pub fn parse_assignment(tk: Tokens) -> Statement {\n\n let mut parts = tk.into_inner();\n\n let left = parse_expression(parts.next().unwrap());\n\n let right = parse_expression(parts.next().unwrap());\n\n Statement::assignment(left, right)\n\n}\n\n\n\n\n", "file_path": "src/parser/statements.rs", "rank": 49, "score": 94754.05095791681 }, { "content": "pub fn parse_var(tk: Tokens) -> Statement {\n\n let mut vars = Vec::new();\n\n\n\n for var in tk.into_inner() {\n\n match var.as_rule() {\n\n Rule::name => vars.push(VarDeclaration::name(var.as_str())),\n\n Rule::var_assignment => vars.push(parse_var_assignment(var)),\n\n _ => unreachable!()\n\n }\n\n }\n\n\n\n Statement::var(&vars)\n\n}\n\n\n\n\n", "file_path": "src/parser/statements.rs", "rank": 50, "score": 94754.05095791681 }, { "content": "fn build_for_range(var: &str, f: &ForRange) -> String {\n\n match f {\n\n ForRange::Integer(from, to, Some(by)) => {\n\n let from = build_expression(from);\n\n let to = build_expression(to);\n\n let by = build_expression(by);\n\n format!(\"var {v} = {}; {v} != {}; {v} += {}\", from, to, by, v=var)\n\n }\n\n _ => unreachable!()\n\n }\n\n}\n\n\n\n\n\nimpl StatementBuilder {\n\n fn new(root: bool, indentation: usize) -> Self {\n\n StatementBuilder {\n\n root,\n\n result: String::new(),\n\n indentation\n\n }\n", "file_path": "src/compiler/script.rs", "rank": 51, "score": 93583.65798322455 }, { "content": "pub fn parse_function(tk: Tokens) -> FunctionDeclaration {\n\n let mut parts = tk.into_inner();\n\n let name = parts.next().unwrap().as_str();\n\n let args = parts.next().unwrap().into_inner().map(|p| p.as_str()).collect::<Vec<_>>();\n\n let body = parts.next().unwrap();\n\n\n\n FunctionDeclaration::new(name, &args, parse_statement(body))\n\n}\n\n\n\n\n", "file_path": "src/parser/declarations.rs", "rank": 52, "score": 93049.69587035535 }, { "content": "pub fn parse_instance(tk: Tokens) -> InstanceDeclaration {\n\n let mut parts = tk.into_inner();\n\n let name = parts.next().unwrap().as_str();\n\n let object = parse_expression(parts.next().unwrap());\n\n let keyvals = parts.map(parse_key_value).collect::<Vec<_>>();\n\n\n\n InstanceDeclaration::new(object, name, &keyvals)\n\n}\n\n\n\n\n", "file_path": "src/parser/declarations.rs", "rank": 53, "score": 93049.69587035535 }, { "content": "pub fn parse_var_assignment(tk: Tokens) -> VarDeclaration {\n\n let mut parts = tk.into_inner();\n\n let left = parts.next().unwrap().as_str();\n\n let right = parse_expression(parts.next().unwrap());\n\n VarDeclaration::assignment(left, right)\n\n}\n\n\n\n\n", "file_path": "src/parser/statements.rs", "rank": 54, "score": 91442.95003753944 }, { "content": "pub fn parse_declaration(tk: Tokens) -> Option<Declaration> {\n\n match tk.as_rule() {\n\n Rule::function_declaration => {\n\n Some(Declaration::Function(parse_function(tk)))\n\n }\n\n\n\n Rule::instance_declaration => {\n\n Some(Declaration::Instance(parse_instance(tk)))\n\n }\n\n\n\n Rule::object_declaration => {\n\n Some(Declaration::Resource(parse_resource(tk, Object)))\n\n }\n\n\n\n Rule::wrapper_declaration => {\n\n Some(Declaration::Resource(parse_resource(tk, Wrapper)))\n\n }\n\n\n\n Rule::room_declaration => {\n\n Some(Declaration::Resource(parse_resource(tk, Room)))\n", "file_path": "src/parser/declarations.rs", "rank": 55, "score": 90625.41079356635 }, { "content": "fn item<T>(name: ResourceName,res: Result<T>) -> Item<T> {\n\n Item::File(name, res.unwrap())\n\n}\n\n\n", "file_path": "src/tests/resources/project.rs", "rank": 56, "score": 90431.46132971981 }, { "content": "pub fn code<'a>(c: &'a str) -> ParserContext<'a> {\n\n ParserContext {\n\n code: c,\n\n file: SourceFile::None,\n\n verbose_errors: true,\n\n }\n\n}\n\n\n\nimpl<'a> ParserContext<'a> {\n\n pub fn new(c: &'a str) -> Self {\n\n ParserContext {\n\n code: c,\n\n file: SourceFile::None,\n\n verbose_errors: true,\n\n }\n\n }\n\n\n\n pub fn with_file(self, file: SourceFile) -> Self {\n\n ParserContext {\n\n code: self.code,\n", "file_path": "src/parser/context.rs", "rank": 57, "score": 90136.19703324295 }, { "content": "pub trait ResourceAst {\n\n fn key_values(&self) -> &[KeyValue];\n\n}\n\n\n\nimpl ResourceAst for InstanceDeclaration {\n\n fn key_values(&self) -> &[KeyValue] {\n\n &self.key_value_pairs\n\n }\n\n}\n\n\n\nimpl ResourceAst for ResourceDeclaration {\n\n fn key_values(&self) -> &[KeyValue] {\n\n &self.key_value_pairs\n\n }\n\n}\n\n\n\nimpl ResourceAst for (ResourceDeclaration, InstanceItems) {\n\n fn key_values(&self) -> &[KeyValue] {\n\n &self.0.key_value_pairs\n\n }\n\n}\n\n\n\n\n", "file_path": "src/resources/resource_trait.rs", "rank": 58, "score": 89810.38888142147 }, { "content": "fn build_expression<T: AsRef<Expression>>(e: &T) -> String {\n\n use crate::ast::BinaryOp::Dot;\n\n use Expression::*;\n\n let ex = build_expression;\n\n\n\n match &e.as_ref() {\n\n &Str(string) => format!(\"\\\"{}\\\"\", string),\n\n &Num(number) => number.clone(),\n\n &Bool(true) => String::from(\"true\"),\n\n &Bool(false) => String::from(\"false\"),\n\n &Name(name) => name.clone(),\n\n &Resource(name) => build_resource_name(&name),\n\n &Parentheses(e) => format!(\"({})\", build_expression(&e)),\n\n &UnaryOp(op, e) => format!(\"{}{}\", op.as_str(), build_expression(&e)),\n\n &BinaryOp(Dot, a, b) => format!(\"{}.{}\", ex(&a), ex(&b)),\n\n &BinaryOp(op, a, b) => format!(\"{} {} {}\", ex(&a), op.as_str(), ex(&b)),\n\n &Call(f, ref args) => build_call(&f, args),\n\n &Indexing(v, a, k) => build_indexing(&v, *a, &k),\n\n &TernaryOp(_,_,_) => unreachable!()\n\n }\n\n}\n\n\n", "file_path": "src/compiler/script.rs", "rank": 59, "score": 89689.93787124727 }, { "content": "pub fn parser_error(c: ParserContext, e: Error) -> MglError {\n\n use Rule::*;\n\n use ParserErrorKind::*;\n\n\n\n if let ErrorVariant::ParsingError {positives, ..} = e.variant.clone() {\n\n if let LineColLocation::Pos((line, column)) = e.line_col.clone() {\n\n println!(\"{}\", line);\n\n let text = c.code.split(\"\\n\").nth(line-1).unwrap().clone();\n\n\n\n let err = |kind: ParserErrorKind| {\n\n MglError::Parser {\n\n error_kind: kind,\n\n verbose: c.verbose_errors,\n\n text: String::from(text),\n\n line,\n\n column,\n\n rules: positives.clone()\n\n }\n\n };\n\n\n", "file_path": "src/parser/error.rs", "rank": 60, "score": 86892.99164068271 }, { "content": "fn join_arguments<T: AsRef<Expression>>(v: &Vec<T>) -> String {\n\n v.iter().map(|e| build_expression(e.as_ref())).collect::<Vec<_>>().join(\", \")\n\n}\n\n\n", "file_path": "src/compiler/script.rs", "rank": 61, "score": 86147.12728453796 }, { "content": "pub fn read_file_as_ast(path: &PathBuf) -> Result<Top> {\n\n match read_to_string(path) {\n\n Ok(mgl) => {\n\n let source = SourceFile::new(path.clone());\n\n parse_code(ParserContext::new(&*mgl).with_file(source))\n\n }\n\n\n\n Err(e) => {\n\n eprintln!(\"An error has occured while trying to read '{}': {}\\n\", path_string(&path), e);\n\n parse_code(\"\")\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/compiler/file_reader.rs", "rank": 62, "score": 86066.27712435287 }, { "content": "fn interpret_subcommand(matches: &ArgMatches) -> Action {\n\n match matches.subcommand() {\n\n (\"compile\", _) => Action::Compile,\n\n (\"project\", m) => Action::Project(interpret_pretty(&m.unwrap())),\n\n (\"ast\", m) => Action::ShowAst(interpret_pretty(&m.unwrap())),\n\n (\"scripts\", _) => Action::Scripts,\n\n _ => unreachable!()\n\n }\n\n}\n\n\n", "file_path": "src/command_line.rs", "rank": 63, "score": 84057.20159076189 }, { "content": "pub fn parse_resource(tk: Tokens, kind: ResourceKind) -> ResourceDeclaration {\n\n let mut methods = Vec::new();\n\n let mut keyvalues = Vec::new();\n\n let mut instances = Vec::new();\n\n\n\n let mut parts = tk.into_inner();\n\n let name = parts.next().unwrap().as_str();\n\n\n\n for item in parts {\n\n match item.as_rule() {\n\n Rule::function_declaration => methods.push(parse_function(item)),\n\n Rule::instance_declaration => instances.push(parse_instance(item)),\n\n Rule::key_value => keyvalues.push(parse_key_value(item)),\n\n\n\n _ => unreachable!()\n\n }\n\n }\n\n\n\n ResourceDeclaration::new(kind, name, &keyvalues, &methods, &instances)\n\n}\n\n\n", "file_path": "src/parser/declarations.rs", "rank": 64, "score": 83940.60380428513 }, { "content": "#[test]\n\nfn test_statement_return() {\n\n assert_parse_statement!(\"return a + b\", Statement::return_op(expr(\"a + b\")))\n\n}\n\n\n", "file_path": "src/tests/parser/statements.rs", "rank": 65, "score": 82616.90555823703 }, { "content": "#[test]\n\nfn test_expression_string() {\n\n assert_parse_expr(\"\\\"hello world\\\"\", Expression::string(\"hello world\"))\n\n}\n\n\n", "file_path": "src/tests/parser/expressions.rs", "rank": 66, "score": 82403.42841240187 }, { "content": "#[test]\n\nfn test_expression_name_1() {\n\n assert_parse_expr(\"x\", Expression::name(\"x\"))\n\n}\n\n\n", "file_path": "src/tests/parser/expressions.rs", "rank": 67, "score": 82403.42841240187 }, { "content": "#[test]\n\nfn test_expression_name_2() {\n\n assert_parse_expr(\"foo_bar123\", Expression::name(\"foo_bar123\"))\n\n}\n\n\n", "file_path": "src/tests/parser/expressions.rs", "rank": 68, "score": 82403.42841240187 }, { "content": "pub fn parse_code<'a, C: Ctx<'a>>(c: C) -> Result<Top> {\n\n let top_expressions = parse_top(c)?;\n\n let mut declarations = Vec::new();\n\n\n\n for top_expression in top_expressions {\n\n if let Some(declaration) = parse_declaration(top_expression) {\n\n declarations.push(declaration);\n\n }\n\n }\n\n\n\n Ok(Top::new(&declarations))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 69, "score": 81841.95965834231 }, { "content": "fn build_call<T,U>(caller: &T, args: &Vec<U>) -> String\n\n where T: AsRef<Expression>,\n\n U: AsRef<Expression> {\n\n\n\n let mut result = build_expression(caller);\n\n\n\n result.push('(');\n\n result.push_str(&join_arguments(args));\n\n result.push(')');\n\n\n\n result\n\n}\n\n\n", "file_path": "src/compiler/script.rs", "rank": 70, "score": 81459.90289238466 }, { "content": "pub fn valid_paths(entries: ReadDir) -> impl Iterator<Item = PathBuf> {\n\n entries.filter_map(Result::ok).map(|entry| entry.path())\n\n}\n\n\n", "file_path": "src/utility/files.rs", "rank": 71, "score": 80702.1581290309 }, { "content": "#[test]\n\nfn test_compiler_script_return() {\n\n assert_script_eq(\"function f() {return 0\\n}\", \"return 0;\");\n\n}\n\n\n", "file_path": "src/tests/compiler/script.rs", "rank": 72, "score": 80667.88126007849 }, { "content": "#[test]\n\nfn test_declaration_resource_module() {\n\n let resource_module = |code: &str| {\n\n let ResourceDeclaration { kind, .. } = code.parse::<ResourceDeclaration>().unwrap();\n\n kind.module()\n\n };\n\n\n\n assert_eq!(resource_module(\"object a {}\"), \"object\");\n\n assert_eq!(resource_module(\"wrapper a {}\"), \"wrapper\");\n\n assert_eq!(resource_module(\"sprite a {}\"), \"sprite\");\n\n assert_eq!(resource_module(\"sound a {}\"), \"sound\");\n\n assert_eq!(resource_module(\"room a {}\"), \"room\");\n\n}\n\n\n", "file_path": "src/tests/parser/declarations.rs", "rank": 73, "score": 80639.88529662053 }, { "content": "#[test]\n\nfn test_expression_bool_false() {\n\n assert_parse_expr(\"false\", Expression::boolean(false))\n\n}\n\n\n", "file_path": "src/tests/parser/expressions.rs", "rank": 74, "score": 80626.15810409213 }, { "content": "#[test]\n\nfn test_ast_convert_bool() {\n\n let ok = bool::try_from(expr(\"true\"));\n\n let err = bool::try_from(expr(\"0\"));\n\n\n\n assert_eq!(ok, Ok(true));\n\n assert_eq!(err, MglError::convert_expression(expr(\"0\"), \"bool\"));\n\n}\n\n\n", "file_path": "src/tests/ast/convert.rs", "rank": 75, "score": 80626.15810409213 }, { "content": "#[test]\n\nfn test_resources_object_fields() {\n\n let o = Object::new(resource(r#\"\n\n object o {\n\n sprite: s\n\n persistent: true\n\n }\n\n \"#)).unwrap();\n\n\n\n assert_eq!(o.sprite, Some(ResourceName::new(&[\"s\"])));\n\n assert_eq!(o.persistent, true);\n\n\n\n let e1 = Object::new(resource(\"object e { k: 1\\n }\"));\n\n let e2 = Object::new(resource(\"object e { events: 1\\n }\"));\n\n assert_eq!(e1, MglError::invalid_field(\"k\", InvalidFieldKind::NotFound));\n\n assert_eq!(e2, MglError::invalid_field(\"events\", InvalidFieldKind::NotFound));\n\n}\n\n\n\n\n", "file_path": "src/tests/resources/object.rs", "rank": 76, "score": 80598.81511774044 }, { "content": "#[test]\n\nfn test_ast_operator_names() {\n\n assert_operator_name!(BinaryOp, \".\");\n\n assert_operator_name!(BinaryOp, \"+\");\n\n assert_operator_name!(BinaryOp, \"-\");\n\n assert_operator_name!(BinaryOp, \"*\");\n\n assert_operator_name!(BinaryOp, \"/\");\n\n assert_operator_name!(BinaryOp, \"||\");\n\n assert_operator_name!(BinaryOp, \"&&\");\n\n assert_operator_name!(BinaryOp, \"<\");\n\n assert_operator_name!(BinaryOp, \">\");\n\n assert_operator_name!(BinaryOp, \">=\");\n\n assert_operator_name!(BinaryOp, \"<=\");\n\n assert_operator_name!(BinaryOp, \"==\");\n\n assert_operator_name!(BinaryOp, \"!=\");\n\n assert_operator_name!(UnaryOp, \"-\");\n\n assert_operator_name!(UnaryOp, \"!\");\n\n assert_operator_name!(Accessor, \"\");\n\n assert_operator_name!(Accessor, \"|\");\n\n assert_operator_name!(Accessor, \"?\");\n\n assert_operator_name!(Accessor, \"#\");\n\n assert_operator_name!(Accessor, \"@\");\n\n}\n", "file_path": "src/tests/ast/operators.rs", "rank": 77, "score": 80461.09157620832 }, { "content": "#[test]\n\nfn test_ast_convert_string() {\n\n let ok = String::try_from(expr(\"\\\"abc\\\"\"));\n\n let err = String::try_from(expr(\"0\"));\n\n\n\n assert_eq!(ok, Ok(String::from(\"abc\")));\n\n assert_eq!(err, MglError::convert_expression(expr(\"0\"), \"String\"));\n\n}\n\n\n", "file_path": "src/tests/ast/convert.rs", "rank": 78, "score": 80461.09157620832 }, { "content": "#[test]\n\nfn test_declaration_key_predicate() {\n\n assert!(key(\"x\").is_name());\n\n assert!(key(\"x.y\").is_dot());\n\n assert!(key(\"x[0]\").is_indexing());\n\n\n\n assert!(!key(\"x\").is_dot());\n\n assert!(!key(\"x\").is_indexing());\n\n\n\n assert!(!key(\"x.y\").is_name());\n\n assert!(!key(\"x.y\").is_indexing());\n\n\n\n assert!(!key(\"x[0]\").is_name());\n\n assert!(!key(\"x[0]\").is_dot());\n\n}\n\n\n", "file_path": "src/tests/parser/declarations.rs", "rank": 79, "score": 80433.17621102094 }, { "content": "#[test]\n\nfn test_declaration_key_methods() {\n\n assert_eq!(key(\"x\").name_of(), String::from(\"x\"));\n\n assert_eq!(key(\"x[0]\").name_of(), String::from(\"x\"));\n\n assert_eq!(key(\"x.y\").name_of(), String::from(\"x\"));\n\n\n\n assert_eq!(key(\"x\").index_of(), None);\n\n assert_eq!(key(\"x[0]\").index_of(), Some(&expr(\"0\")));\n\n\n\n assert_eq!(key(\"x\").leftmost_index_of(), None);\n\n assert_eq!(key(\"x.y\").leftmost_index_of(), None);\n\n assert_eq!(key(\"x[0]\").leftmost_index_of(), Some(&expr(\"0\")));\n\n assert_eq!(key(\"x[0].y\").leftmost_index_of(), Some(&expr(\"0\")));\n\n\n\n assert_eq!(key(\"x\").left_of(), None);\n\n assert_eq!(key(\"x.y\").left_of(), Some(&key(\"x\")));\n\n}\n\n\n", "file_path": "src/tests/parser/declarations.rs", "rank": 80, "score": 80433.17621102094 }, { "content": "#[test]\n\nfn test_declaration_key_dot() {\n\n let x = Key::name(\"x\");\n\n let y = Key::name(\"y\");\n\n let z = Key::name(\"z\");\n\n assert_parse_declaration(\"x.y\", Key::dot(x.clone(), y.clone()));\n\n assert_parse_declaration(\"x.y.z\", Key::dot(x.clone(), Key::dot(y.clone(), z.clone())));\n\n}\n\n\n", "file_path": "src/tests/parser/declarations.rs", "rank": 81, "score": 80433.17621102094 }, { "content": "#[test]\n\nfn test_declaration_key_mixed() {\n\n let x = Key::name(\"x\");\n\n let y = Key::name(\"y\");\n\n assert_parse_declaration(\"x[0].y\", Key::dot(Key::indexing(\"x\", expr(\"0\")), y.clone()));\n\n assert_parse_declaration(\"x.y[0]\", Key::dot(x.clone(), Key::indexing(\"y\", expr(\"0\"))));\n\n}\n\n\n", "file_path": "src/tests/parser/declarations.rs", "rank": 82, "score": 80433.17621102094 }, { "content": "#[test]\n\nfn test_declaration_key_index() {\n\n assert_parse_declaration(\"x[0]\", Key::indexing(\"x\", expr(\"0\")));\n\n}\n\n\n", "file_path": "src/tests/parser/declarations.rs", "rank": 83, "score": 80433.17621102094 }, { "content": "pub fn read_resource_tree(project_file: Option<PathBuf>) -> TopResult<Project> {\n\n let file_tree = AstFileTree::from_project(project_file)?;\n\n Project::from_ast_file_tree(file_tree, Module::new())\n\n}\n\n\n", "file_path": "src/compiler/resource_tree.rs", "rank": 84, "score": 80066.67637607513 }, { "content": "pub fn parse_unwrap<T: FromStr<Err=MglError>>(code: &str) -> T {\n\n code.parse().unwrap()\n\n}\n\n\n\nmacro parse_unwrap_aliases($($func: ident -> $T: ty;)*) {\n\n $(\n\n pub fn $func(code: &str) -> $T {\n\n parse_unwrap(code)\n\n }\n\n )*\n\n}\n\n\n\nparse_unwrap_aliases! {\n\n expr -> IExpr;\n\n statement -> IStat;\n\n declaration -> Declaration;\n\n key -> Key;\n\n function -> FunctionDeclaration;\n\n instance -> InstanceDeclaration;\n\n resource -> ResourceDeclaration;\n", "file_path": "src/tests/utility.rs", "rank": 85, "score": 79029.19283612676 }, { "content": "pub fn parse_top<'a, C: Ctx<'a>>(c: C) -> Result<InnerTokens<'a>> {\n\n Ok(parse_mgl(Rule::top, c.into())?.into_inner())\n\n}\n\n\n", "file_path": "src/parser/grammar.rs", "rank": 86, "score": 78973.78525822095 }, { "content": "#[test]\n\nfn test_ast_convert_resource_name() {\n\n let ok1 = ResourceName::try_from(expr(\"name\"));\n\n let ok2 = ResourceName::try_from(expr(\"name::sub\"));\n\n let err = ResourceName::try_from(expr(\"0\"));\n\n\n\n assert_eq!(ok1, Ok(ResourceName::new(&[\"name\"])));\n\n assert_eq!(ok2, Ok(ResourceName::new(&[\"name\", \"sub\"])));\n\n assert_eq!(err, MglError::convert_expression(expr(\"0\"), \"ResourceName\"));\n\n}\n\n\n", "file_path": "src/tests/ast/convert.rs", "rank": 87, "score": 78636.75103238002 }, { "content": "#[test]\n\nfn test_parser_error_missing_name() {\n\n test_parser_error(\"object {}\", MissingName);\n\n}\n\n\n", "file_path": "src/tests/parser/error.rs", "rank": 88, "score": 78636.75103238002 }, { "content": "fn interpret_project_argument(matches: &ArgMatches) -> Option<PathBuf> {\n\n if matches.is_present(\"no-project\") {\n\n return None\n\n }\n\n\n\n match matches.value_of(\"project-file\") {\n\n None => find_project_file(),\n\n\n\n Some(file) => {\n\n let path = Path::new(file).to_path_buf();\n\n\n\n if path.exists() {\n\n Some(path)\n\n\n\n } else {\n\n eprintln!(\"NOTE: The given project file was not found, looking for one...\\n\");\n\n find_project_file()\n\n }\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "src/command_line.rs", "rank": 89, "score": 78326.41660585314 }, { "content": "#[test]\n\nfn test_utility_files_valid_paths() {\n\n let mut counter = 0;\n\n for _path in valid_paths(read_dir(\"examples/project\").unwrap()) {\n\n counter += 1;\n\n }\n\n assert_eq!(counter, 2);\n\n}\n", "file_path": "src/tests/crate_utility/files.rs", "rank": 90, "score": 77088.22366137269 }, { "content": "#[test]\n\nfn test_ast_convert_option_resource_name() {\n\n let ok = <Option<ResourceName>>::try_from(expr(\"name\"));\n\n let err = <Option<ResourceName>>::try_from(expr(\"0\"));\n\n\n\n assert_eq!(ok, Ok(Some(ResourceName::new(&[\"name\"]))));\n\n assert_eq!(err, MglError::convert_expression(expr(\"0\"), \"ResourceName\"));\n\n}\n\n\n", "file_path": "src/tests/ast/convert.rs", "rank": 91, "score": 76919.97140268108 }, { "content": "pub fn parse_mgl<'a, C: Ctx<'a>>(rule: Rule, c: C) -> Result<Tokens<'a>> {\n\n let c = c.into();\n\n match MglParser::parse(rule, &c.code.clone()) {\n\n Ok(pairs) => {\n\n for pair in pairs {\n\n if pair.as_rule() == rule {\n\n return Ok(Tokens::new(pair, c.file));\n\n }\n\n }\n\n unreachable!()\n\n }\n\n\n\n Err(err) => Err(parser_error(c, err))\n\n }\n\n}\n", "file_path": "src/parser/grammar.rs", "rank": 92, "score": 75201.40989479945 }, { "content": "pub trait Resource<T: ResourceAst>: Sized {\n\n fn parse_key_value(&mut self, source: &T, key: &Key, value: &IExpr) -> Result<()>;\n\n}\n\n\n", "file_path": "src/resources/resource_trait.rs", "rank": 93, "score": 74792.06860448045 }, { "content": "pub trait ResourceDefault<T: ResourceAst>: Sized {\n\n fn default(source: &T) -> Result<Self>;\n\n}\n\n\n\n// Helper functions\n\n\n", "file_path": "src/resources/resource_trait.rs", "rank": 94, "score": 73958.18983523695 }, { "content": "pub trait FromExpression = TryFrom<IExpr, Error=MglError>;\n\n\n", "file_path": "src/resources/resource_trait.rs", "rank": 95, "score": 70894.3716005548 }, { "content": "struct StatementBuilder {\n\n root: bool,\n\n result: String,\n\n indentation: usize,\n\n}\n\n\n", "file_path": "src/compiler/script.rs", "rank": 96, "score": 67328.73853023673 }, { "content": "pub trait ResourceCreate<T: ResourceAst>: Sized + ResourceDefault<T> {\n\n fn new(source: T) -> Result<Self>;\n\n}\n\n\n\nimpl<S: ResourceDefault<T> + Resource<T>, T: ResourceAst> ResourceCreate<T> for S {\n\n fn new(source: T) -> Result<Self> {\n\n let mut resource = Self::default(&source)?;\n\n for KeyValue { key, value } in source.key_values() {\n\n resource.parse_key_value(&source, key, value)?;\n\n }\n\n Ok(resource)\n\n }\n\n}\n\n\n\n\n\n\n\npub struct KeyInspector;\n\n\n\nimpl KeyInspector {\n\n pub fn assert_field_has_no_index(field: &str, key: &Key) -> Result<()> {\n", "file_path": "src/resources/resource_trait.rs", "rank": 97, "score": 66094.84769090245 }, { "content": "#[derive(Debug, PartialEq, Default, Resource)]\n\nstruct ResourceTest {\n\n normal_field_1: i64,\n\n normal_field_2: String,\n\n\n\n #[array_field]\n\n array_field: Vec<u64>,\n\n\n\n #[sub_resource]\n\n sub_resource: SubResourceTest,\n\n\n\n #[array_field]\n\n #[sub_resource]\n\n sub_array_field: Vec<SubResourceTest>,\n\n\n\n #[ignore_field]\n\n other: i32,\n\n}\n\n\n", "file_path": "src/tests/resources/resource_trait.rs", "rank": 98, "score": 65076.81354165047 }, { "content": "pub trait Operator {\n\n fn priority(self) -> i64;\n\n fn from_str(s: &str) -> Self;\n\n fn as_str(&self) -> &'static str;\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum BinaryOp {\n\n Dot,\n\n Add,\n\n Sub,\n\n Mul,\n\n Div,\n\n Or,\n\n And,\n\n Lt,\n\n Gt,\n\n Geq,\n\n Leq,\n\n Eq,\n", "file_path": "src/ast/operators.rs", "rank": 99, "score": 62371.01277153505 } ]
Rust
sdks/rust/src/sdk.rs
xxtanisxx/agones
3f00aa67518a8f3e43b02415c228808f1511d3cb
use std::{env, time::Duration}; use tonic::transport::Channel; mod api { tonic::include_proto!("agones.dev.sdk"); } use api::sdk_client::SdkClient; pub use api::GameServer; pub type WatchStream = tonic::Streaming<GameServer>; use crate::{alpha::Alpha, errors::Result}; #[inline] fn empty() -> api::Empty { api::Empty {} } #[derive(Clone)] pub struct Sdk { client: SdkClient<Channel>, alpha: Alpha, } impl Sdk { pub async fn new(port: Option<u16>, keep_alive: Option<Duration>) -> Result<Self> { let addr: http::Uri = format!( "http://localhost:{}", port.unwrap_or_else(|| { env::var("AGONES_SDK_GRPC_PORT") .ok() .and_then(|s| s.parse().ok()) .unwrap_or(9357) }) ) .parse()?; let builder = tonic::transport::channel::Channel::builder(addr) .connect_timeout(Duration::from_secs(30)) .keep_alive_timeout(keep_alive.unwrap_or_else(|| Duration::from_secs(30))); let channel = builder.connect_lazy()?; let mut client = SdkClient::new(channel.clone()); let alpha = Alpha::new(channel); tokio::time::timeout(Duration::from_secs(30), async { let mut connect_interval = tokio::time::interval(Duration::from_millis(100)); loop { connect_interval.tick().await; if client.get_game_server(empty()).await.is_ok() { break; } } }) .await?; Ok(Self { client, alpha }) } #[inline] pub fn alpha(&self) -> &Alpha { &self.alpha } pub async fn ready(&mut self) -> Result<()> { Ok(self.client.ready(empty()).await.map(|_| ())?) } pub async fn allocate(&mut self) -> Result<()> { Ok(self.client.allocate(empty()).await.map(|_| ())?) } pub async fn shutdown(&mut self) -> Result<()> { Ok(self.client.shutdown(empty()).await.map(|_| ())?) } pub fn health_check(&self) -> tokio::sync::mpsc::Sender<()> { let mut health_client = self.clone(); let (tx, mut rx) = tokio::sync::mpsc::channel(10); tokio::task::spawn(async move { let health_stream = async_stream::stream! { while rx.recv().await.is_some() { yield empty(); } }; let _ = health_client.client.health(health_stream).await; }); tx } pub async fn set_label( &mut self, key: impl Into<String>, value: impl Into<String>, ) -> Result<()> { Ok(self .client .set_label(api::KeyValue { key: key.into(), value: value.into(), }) .await .map(|_| ())?) } pub async fn set_annotation( &mut self, key: impl Into<String>, value: impl Into<String>, ) -> Result<()> { Ok(self .client .set_annotation(api::KeyValue { key: key.into(), value: value.into(), }) .await .map(|_| ())?) } pub async fn get_gameserver(&mut self) -> Result<GameServer> { Ok(self .client .get_game_server(empty()) .await .map(|res| res.into_inner())?) } pub async fn reserve(&mut self, duration: Duration) -> Result<()> { Ok(self .client .reserve(api::Duration { seconds: std::cmp::max(duration.as_secs() as i64, 1), }) .await .map(|_| ())?) } pub async fn watch_gameserver(&mut self) -> Result<WatchStream> { Ok(self .client .watch_game_server(empty()) .await .map(|stream| stream.into_inner())?) } }
use std::{env, time::Duration}; use tonic::transport::Channel; mod api { tonic::include_proto!("agones.dev.sdk"); } use api::sdk_client::SdkClient; pub use api::GameServer; pub type WatchStream = tonic::Streaming<GameServer>; use crate::{alpha::Alpha, errors::Result}; #[inline] fn empty() -> api::Empty { api::Empty {} } #[derive(Clone)] pub struct Sdk { client: SdkClient<Channel>, alpha: Alpha, } impl Sdk {
#[inline] pub fn alpha(&self) -> &Alpha { &self.alpha } pub async fn ready(&mut self) -> Result<()> { Ok(self.client.ready(empty()).await.map(|_| ())?) } pub async fn allocate(&mut self) -> Result<()> { Ok(self.client.allocate(empty()).await.map(|_| ())?) } pub async fn shutdown(&mut self) -> Result<()> { Ok(self.client.shutdown(empty()).await.map(|_| ())?) } pub fn health_check(&self) -> tokio::sync::mpsc::Sender<()> { let mut health_client = self.clone(); let (tx, mut rx) = tokio::sync::mpsc::channel(10); tokio::task::spawn(async move { let health_stream = async_stream::stream! { while rx.recv().await.is_some() { yield empty(); } }; let _ = health_client.client.health(health_stream).await; }); tx } pub async fn set_label( &mut self, key: impl Into<String>, value: impl Into<String>, ) -> Result<()> { Ok(self .client .set_label(api::KeyValue { key: key.into(), value: value.into(), }) .await .map(|_| ())?) } pub async fn set_annotation( &mut self, key: impl Into<String>, value: impl Into<String>, ) -> Result<()> { Ok(self .client .set_annotation(api::KeyValue { key: key.into(), value: value.into(), }) .await .map(|_| ())?) } pub async fn get_gameserver(&mut self) -> Result<GameServer> { Ok(self .client .get_game_server(empty()) .await .map(|res| res.into_inner())?) } pub async fn reserve(&mut self, duration: Duration) -> Result<()> { Ok(self .client .reserve(api::Duration { seconds: std::cmp::max(duration.as_secs() as i64, 1), }) .await .map(|_| ())?) } pub async fn watch_gameserver(&mut self) -> Result<WatchStream> { Ok(self .client .watch_game_server(empty()) .await .map(|stream| stream.into_inner())?) } }
pub async fn new(port: Option<u16>, keep_alive: Option<Duration>) -> Result<Self> { let addr: http::Uri = format!( "http://localhost:{}", port.unwrap_or_else(|| { env::var("AGONES_SDK_GRPC_PORT") .ok() .and_then(|s| s.parse().ok()) .unwrap_or(9357) }) ) .parse()?; let builder = tonic::transport::channel::Channel::builder(addr) .connect_timeout(Duration::from_secs(30)) .keep_alive_timeout(keep_alive.unwrap_or_else(|| Duration::from_secs(30))); let channel = builder.connect_lazy()?; let mut client = SdkClient::new(channel.clone()); let alpha = Alpha::new(channel); tokio::time::timeout(Duration::from_secs(30), async { let mut connect_interval = tokio::time::interval(Duration::from_millis(100)); loop { connect_interval.tick().await; if client.get_game_server(empty()).await.is_ok() { break; } } }) .await?; Ok(Self { client, alpha }) }
function_block-full_function
[ { "content": "struct SDK::SDKImpl {\n\n std::string host_;\n\n std::shared_ptr<grpc::Channel> channel_;\n\n std::unique_ptr<agones::dev::sdk::SDK::Stub> stub_;\n\n std::unique_ptr<grpc::ClientWriter<agones::dev::sdk::Empty>> health_;\n\n std::unique_ptr<grpc::ClientContext> health_context_;\n\n};\n\n\n\nSDK::SDK() : pimpl_{std::make_unique<SDKImpl>()} {\n\n const char* port = std::getenv(\"AGONES_SDK_GRPC_PORT\");\n\n pimpl_->host_ = std::string(\"localhost:\") + (port ? port : \"9357\");\n\n pimpl_->channel_ =\n\n grpc::CreateChannel(pimpl_->host_, grpc::InsecureChannelCredentials());\n\n}\n\n\n\nSDK::~SDK() {}\n\n\n\nbool SDK::Connect() {\n\n if (!pimpl_->channel_->WaitForConnected(\n\n gpr_time_add(gpr_now(GPR_CLOCK_REALTIME),\n", "file_path": "sdks/cpp/src/agones/sdk.cc", "rank": 1, "score": 216448.9990088184 }, { "content": "fn run_player_tracking_features(mut alpha: agones::alpha::Alpha) -> Result<(), String> {\n\n use tokio::runtime::Handle;\n\n\n\n println!(\"rust: Setting player capacity...\");\n\n Handle::current().block_on(async {\n\n alpha\n\n .set_player_capacity(10)\n\n .await\n\n .map_err(|e| format!(\"Could not run SetPlayerCapacity(): {:#?}. Exiting!\", e))\n\n })?;\n\n\n\n println!(\"rust: Getting player capacity...\");\n\n let capacity = Handle::current().block_on(async {\n\n alpha\n\n .get_player_capacity()\n\n .await\n\n .map_err(|e| format!(\"Could not run GetPlayerCapacity(): {}. Exiting!\", e))\n\n })?;\n\n println!(\"rust: Player capacity: {}\", capacity);\n\n\n", "file_path": "test/sdk/rust/src/main.rs", "rank": 2, "score": 184491.43936700013 }, { "content": "type SDKClient interface {\n\n\t// PlayerConnect increases the SDK’s stored player count by one, and appends this playerID to GameServer.Status.Players.IDs.\n\n\t//\n\n\t// GameServer.Status.Players.Count and GameServer.Status.Players.IDs are then set to update the player count and id list a second from now,\n\n\t// unless there is already an update pending, in which case the update joins that batch operation.\n\n\t//\n\n\t// PlayerConnect returns true and adds the playerID to the list of playerIDs if this playerID was not already in the\n\n\t// list of connected playerIDs.\n\n\t//\n\n\t// If the playerID exists within the list of connected playerIDs, PlayerConnect will return false, and the list of\n\n\t// connected playerIDs will be left unchanged.\n\n\t//\n\n\t// An error will be returned if the playerID was not already in the list of connected playerIDs but the player capacity for\n\n\t// the server has been reached. The playerID will not be added to the list of playerIDs.\n\n\t//\n\n\t// Warning: Do not use this method if you are manually managing GameServer.Status.Players.IDs and GameServer.Status.Players.Count\n\n\t// through the Kubernetes API, as indeterminate results will occur.\n\n\tPlayerConnect(ctx context.Context, in *PlayerID, opts ...grpc.CallOption) (*Bool, error)\n\n\t// Decreases the SDK’s stored player count by one, and removes the playerID from GameServer.Status.Players.IDs.\n\n\t//\n\n\t// GameServer.Status.Players.Count and GameServer.Status.Players.IDs are then set to update the player count and id list a second from now,\n\n\t// unless there is already an update pending, in which case the update joins that batch operation.\n\n\t//\n\n\t// PlayerDisconnect will return true and remove the supplied playerID from the list of connected playerIDs if the\n\n\t// playerID value exists within the list.\n\n\t//\n\n\t// If the playerID was not in the list of connected playerIDs, the call will return false, and the connected playerID list\n\n\t// will be left unchanged.\n\n\t//\n\n\t// Warning: Do not use this method if you are manually managing GameServer.status.players.IDs and GameServer.status.players.Count\n\n\t// through the Kubernetes API, as indeterminate results will occur.\n\n\tPlayerDisconnect(ctx context.Context, in *PlayerID, opts ...grpc.CallOption) (*Bool, error)\n\n\t// Update the GameServer.Status.Players.Capacity value with a new capacity.\n\n\tSetPlayerCapacity(ctx context.Context, in *Count, opts ...grpc.CallOption) (*Empty, error)\n\n\t// Retrieves the current player capacity. This is always accurate from what has been set through this SDK,\n\n\t// even if the value has yet to be updated on the GameServer status resource.\n\n\t//\n\n\t// If GameServer.Status.Players.Capacity is set manually through the Kubernetes API, use SDK.GameServer() or SDK.WatchGameServer() instead to view this value.\n\n\tGetPlayerCapacity(ctx context.Context, in *Empty, opts ...grpc.CallOption) (*Count, error)\n\n\t// Retrieves the current player count. This is always accurate from what has been set through this SDK,\n\n\t// even if the value has yet to be updated on the GameServer status resource.\n\n\t//\n\n\t// If GameServer.Status.Players.Count is set manually through the Kubernetes API, use SDK.GameServer() or SDK.WatchGameServer() instead to view this value.\n\n\tGetPlayerCount(ctx context.Context, in *Empty, opts ...grpc.CallOption) (*Count, error)\n\n\t// Returns if the playerID is currently connected to the GameServer. This is always accurate from what has been set through this SDK,\n\n\t// even if the value has yet to be updated on the GameServer status resource.\n\n\t//\n\n\t// If GameServer.Status.Players.IDs is set manually through the Kubernetes API, use SDK.GameServer() or SDK.WatchGameServer() instead to determine connected status.\n\n\tIsPlayerConnected(ctx context.Context, in *PlayerID, opts ...grpc.CallOption) (*Bool, error)\n\n\t// Returns the list of the currently connected player ids. This is always accurate from what has been set through this SDK,\n\n\t// even if the value has yet to be updated on the GameServer status resource.\n\n\t//\n\n\t// If GameServer.Status.Players.IDs is set manually through the Kubernetes API, use SDK.GameServer() or SDK.WatchGameServer() instead to view this value.\n\n\tGetConnectedPlayers(ctx context.Context, in *Empty, opts ...grpc.CallOption) (*PlayerIDList, error)\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 3, "score": 170201.5740486386 }, { "content": "func NewSDKClient(cc *grpc.ClientConn) SDKClient {\n\n\treturn &sDKClient{cc}\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 4, "score": 167541.63175614455 }, { "content": "func RegisterSDKHandlerClient(ctx context.Context, mux *runtime.ServeMux, client SDKClient) error {\n\n\n\n\tmux.Handle(\"POST\", pattern_SDK_PlayerConnect_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\n\t\tctx, cancel := context.WithCancel(req.Context())\n\n\t\tdefer cancel()\n\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\n\t\trctx, err := runtime.AnnotateContext(ctx, mux, req)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\t\tresp, md, err := request_SDK_PlayerConnect_0(rctx, inboundMarshaler, client, req, pathParams)\n\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\n\n\t\tforward_SDK_PlayerConnect_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\n\n\t})\n\n\n\n\tmux.Handle(\"POST\", pattern_SDK_PlayerDisconnect_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\n\t\tctx, cancel := context.WithCancel(req.Context())\n\n\t\tdefer cancel()\n\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\n\t\trctx, err := runtime.AnnotateContext(ctx, mux, req)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\t\tresp, md, err := request_SDK_PlayerDisconnect_0(rctx, inboundMarshaler, client, req, pathParams)\n\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\n\n\t\tforward_SDK_PlayerDisconnect_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\n\n\t})\n\n\n\n\tmux.Handle(\"PUT\", pattern_SDK_SetPlayerCapacity_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\n\t\tctx, cancel := context.WithCancel(req.Context())\n\n\t\tdefer cancel()\n\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\n\t\trctx, err := runtime.AnnotateContext(ctx, mux, req)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\t\tresp, md, err := request_SDK_SetPlayerCapacity_0(rctx, inboundMarshaler, client, req, pathParams)\n\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\n\n\t\tforward_SDK_SetPlayerCapacity_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\n\n\t})\n\n\n\n\tmux.Handle(\"GET\", pattern_SDK_GetPlayerCapacity_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\n\t\tctx, cancel := context.WithCancel(req.Context())\n\n\t\tdefer cancel()\n\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\n\t\trctx, err := runtime.AnnotateContext(ctx, mux, req)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\t\tresp, md, err := request_SDK_GetPlayerCapacity_0(rctx, inboundMarshaler, client, req, pathParams)\n\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\n\n\t\tforward_SDK_GetPlayerCapacity_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\n\n\t})\n\n\n\n\tmux.Handle(\"GET\", pattern_SDK_GetPlayerCount_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\n\t\tctx, cancel := context.WithCancel(req.Context())\n\n\t\tdefer cancel()\n\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\n\t\trctx, err := runtime.AnnotateContext(ctx, mux, req)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\t\tresp, md, err := request_SDK_GetPlayerCount_0(rctx, inboundMarshaler, client, req, pathParams)\n\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\n\n\t\tforward_SDK_GetPlayerCount_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\n\n\t})\n\n\n\n\tmux.Handle(\"GET\", pattern_SDK_IsPlayerConnected_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\n\t\tctx, cancel := context.WithCancel(req.Context())\n\n\t\tdefer cancel()\n\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\n\t\trctx, err := runtime.AnnotateContext(ctx, mux, req)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\t\tresp, md, err := request_SDK_IsPlayerConnected_0(rctx, inboundMarshaler, client, req, pathParams)\n\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\n\n\t\tforward_SDK_IsPlayerConnected_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\n\n\t})\n\n\n\n\tmux.Handle(\"GET\", pattern_SDK_GetConnectedPlayers_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\n\t\tctx, cancel := context.WithCancel(req.Context())\n\n\t\tdefer cancel()\n\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\n\t\trctx, err := runtime.AnnotateContext(ctx, mux, req)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\t\tresp, md, err := request_SDK_GetConnectedPlayers_0(rctx, inboundMarshaler, client, req, pathParams)\n\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\n\n\t\tforward_SDK_GetConnectedPlayers_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\n\n\t})\n\n\n\n\treturn nil\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 5, "score": 162485.54535356865 }, { "content": "type Empty struct {\n\n\tXXX_NoUnkeyedLiteral struct{} `json:\"-\"`\n\n\tXXX_unrecognized []byte `json:\"-\"`\n\n\tXXX_sizecache int32 `json:\"-\"`\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 6, "score": 161145.98988516335 }, { "content": "func (*Empty) Descriptor() ([]byte, []int) {\n\n\treturn fileDescriptor_alpha_adf85771d71a9075, []int{0}\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 7, "score": 158174.589934107 }, { "content": "func (m *Empty) String() string { return proto.CompactTextString(m) }\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 8, "score": 158174.589934107 }, { "content": "func (m *Empty) Reset() { *m = Empty{} }\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 9, "score": 158174.589934107 }, { "content": "type sDKClient struct {\n\n\tcc *grpc.ClientConn\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 10, "score": 157330.23267787794 }, { "content": "func (*Empty) ProtoMessage() {}\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 11, "score": 155315.22054486553 }, { "content": "func (m *Empty) XXX_Size() int {\n\n\treturn xxx_messageInfo_Empty.Size(m)\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 12, "score": 155315.22054486553 }, { "content": "func (dst *Empty) XXX_Merge(src proto.Message) {\n\n\txxx_messageInfo_Empty.Merge(dst, src)\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 13, "score": 155315.22054486553 }, { "content": "func (m *Empty) XXX_Unmarshal(b []byte) error {\n\n\treturn xxx_messageInfo_Empty.Unmarshal(m, b)\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 14, "score": 155315.22054486553 }, { "content": "func (m *Empty) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {\n\n\treturn xxx_messageInfo_Empty.Marshal(b, m, deterministic)\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 15, "score": 155315.22054486553 }, { "content": "func (m *Empty) XXX_DiscardUnknown() {\n\n\txxx_messageInfo_Empty.DiscardUnknown(m)\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 16, "score": 152561.4653326737 }, { "content": "var xxx_messageInfo_Empty proto.InternalMessageInfo\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 17, "score": 152561.4653326737 }, { "content": "func (c *sDKClient) IsPlayerConnected(ctx context.Context, in *PlayerID, opts ...grpc.CallOption) (*Bool, error) {\n\n\tout := new(Bool)\n\n\terr := c.cc.Invoke(ctx, \"/agones.dev.sdk.alpha.SDK/IsPlayerConnected\", in, out, opts...)\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\treturn out, nil\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 18, "score": 151751.8974111557 }, { "content": "func (c *sDKClient) PlayerDisconnect(ctx context.Context, in *PlayerID, opts ...grpc.CallOption) (*Bool, error) {\n\n\tout := new(Bool)\n\n\terr := c.cc.Invoke(ctx, \"/agones.dev.sdk.alpha.SDK/PlayerDisconnect\", in, out, opts...)\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\treturn out, nil\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 19, "score": 151751.8974111557 }, { "content": "func (c *sDKClient) PlayerConnect(ctx context.Context, in *PlayerID, opts ...grpc.CallOption) (*Bool, error) {\n\n\tout := new(Bool)\n\n\terr := c.cc.Invoke(ctx, \"/agones.dev.sdk.alpha.SDK/PlayerConnect\", in, out, opts...)\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\treturn out, nil\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 20, "score": 151751.89741115572 }, { "content": "using Microsoft.VisualStudio.TestTools.UnitTesting;\n\nusing Moq;\n\nusing System;\n\n\n\nnamespace Agones.Tests\n\n{\n\n [TestClass]\n\n public class AgonesAlphaSDKClientTests\n\n {\n\n [TestMethod]\n\n public async Task GetPlayerCapacity_Sends_OK()\n\n {\n\n var mockClient = new Mock<SDK.SDKClient>();\n\n var mockSdk = new AgonesSDK();\n\n var expected = new Count() { Count_ = 1 };\n\n var fakeCall = TestCalls.AsyncUnaryCall(Task.FromResult(expected), Task.FromResult(new Metadata()), () => Status.DefaultSuccess, () => new Metadata(), () => { });\n\n\n\n mockClient.Setup(m => m.GetPlayerCapacityAsync(It.IsAny<Empty>(), It.IsAny<Metadata>(), It.IsAny<DateTime?>(), It.IsAny<CancellationToken>())).Returns(fakeCall);\n\n mockSdk.alpha.client = mockClient.Object;\n\n\n", "file_path": "sdks/csharp/test/AgonesAlphaSDKClientTests.cs", "rank": 21, "score": 150514.09540685453 }, { "content": " var result = await mockSdk.Alpha().GetPlayerCapacityAsync();\n\n Assert.AreEqual(expected.Count_, result);\n\n }\n\n\n\n [TestMethod]\n\n public async Task SetPlayerCapacity_Sends_OK()\n\n {\n\n var mockClient = new Mock<SDK.SDKClient>();\n\n var mockSdk = new AgonesSDK();\n\n var expected = StatusCode.OK;\n\n var fakeCall = TestCalls.AsyncUnaryCall(Task.FromResult(new Empty()), Task.FromResult(new Metadata()), () => Status.DefaultSuccess, () => new Metadata(), () => { });\n\n\n\n mockClient.Setup(m => m.SetPlayerCapacityAsync(It.IsAny<Count>(), It.IsAny<Metadata>(), It.IsAny<DateTime?>(), It.IsAny<CancellationToken>())).Returns(fakeCall);\n\n mockSdk.alpha.client = mockClient.Object;\n\n\n\n var result = await mockSdk.Alpha().SetPlayerCapacityAsync(1);\n\n Assert.AreEqual(expected, result.StatusCode);\n\n }\n\n\n\n [TestMethod]\n", "file_path": "sdks/csharp/test/AgonesAlphaSDKClientTests.cs", "rank": 22, "score": 150509.23800603984 }, { "content": " public async Task GetConnectedPlayers_Sends_OK()\n\n {\n\n var mockClient = new Mock<SDK.SDKClient>();\n\n var mockSdk = new AgonesSDK();\n\n var expected = new List<string> { \"player1\", \"player2\" };\n\n var playerList = new PlayerIDList() { List = { expected } };\n\n var fakeCall = TestCalls.AsyncUnaryCall(Task.FromResult(playerList), Task.FromResult(new Metadata()), () => Status.DefaultSuccess, () => new Metadata(), () => { });\n\n\n\n mockClient.Setup(m => m.GetConnectedPlayersAsync(It.IsAny<Empty>(), It.IsAny<Metadata>(), It.IsAny<DateTime?>(), It.IsAny<CancellationToken>())).Returns(fakeCall);\n\n mockSdk.alpha.client = mockClient.Object;\n\n\n\n var result = await mockSdk.Alpha().GetConnectedPlayersAsync();\n\n CollectionAssert.AreEquivalent(expected, result);\n\n }\n\n }\n\n}\n", "file_path": "sdks/csharp/test/AgonesAlphaSDKClientTests.cs", "rank": 23, "score": 150508.77841434 }, { "content": " var fakeCall = TestCalls.AsyncUnaryCall(Task.FromResult(expected), Task.FromResult(new Metadata()), () => Status.DefaultSuccess, () => new Metadata(), () => { });\n\n\n\n mockClient.Setup(m => m.PlayerDisconnectAsync(It.IsAny<PlayerID>(), It.IsAny<Metadata>(), It.IsAny<DateTime?>(), It.IsAny<CancellationToken>())).Returns(fakeCall);\n\n mockSdk.alpha.client = mockClient.Object;\n\n\n\n var result = await mockSdk.Alpha().PlayerDisconnectAsync(\"test\");\n\n Assert.AreEqual(expected.Bool_, result);\n\n }\n\n\n\n [TestMethod]\n\n public async Task GetPlayerCount_Sends_OK()\n\n {\n\n var mockClient = new Mock<SDK.SDKClient>();\n\n var mockSdk = new AgonesSDK();\n\n var expected = new Count() { Count_ = 1 };\n\n var fakeCall = TestCalls.AsyncUnaryCall(Task.FromResult(expected), Task.FromResult(new Metadata()), () => Status.DefaultSuccess, () => new Metadata(), () => { });\n\n\n\n mockClient.Setup(m => m.GetPlayerCountAsync(It.IsAny<Empty>(), It.IsAny<Metadata>(), It.IsAny<DateTime?>(), It.IsAny<CancellationToken>())).Returns(fakeCall);\n\n mockSdk.alpha.client = mockClient.Object;\n\n\n", "file_path": "sdks/csharp/test/AgonesAlphaSDKClientTests.cs", "rank": 24, "score": 150508.73378126582 }, { "content": " var result = await mockSdk.Alpha().GetPlayerCountAsync();\n\n Assert.AreEqual(expected.Count_, result);\n\n }\n\n\n\n [TestMethod]\n\n public async Task IsPlayerConnected_Sends_OK()\n\n {\n\n var mockClient = new Mock<SDK.SDKClient>();\n\n var mockSdk = new AgonesSDK();\n\n var expected = new Bool() { Bool_ = true };\n\n var fakeCall = TestCalls.AsyncUnaryCall(Task.FromResult(expected), Task.FromResult(new Metadata()), () => Status.DefaultSuccess, () => new Metadata(), () => { });\n\n\n\n mockClient.Setup(m => m.IsPlayerConnectedAsync(It.IsAny<PlayerID>(), It.IsAny<Metadata>(), It.IsAny<DateTime?>(), It.IsAny<CancellationToken>())).Returns(fakeCall);\n\n mockSdk.alpha.client = mockClient.Object;\n\n\n\n var result = await mockSdk.Alpha().IsPlayerConnectedAsync(\"test\");\n\n Assert.AreEqual(expected.Bool_, result);\n\n }\n\n\n\n [TestMethod]\n", "file_path": "sdks/csharp/test/AgonesAlphaSDKClientTests.cs", "rank": 25, "score": 150506.51649498023 }, { "content": " public async Task PlayerConnect_Sends_OK()\n\n {\n\n var mockClient = new Mock<SDK.SDKClient>();\n\n var mockSdk = new AgonesSDK();\n\n var expected = new Bool() { Bool_ = true };\n\n var fakeCall = TestCalls.AsyncUnaryCall(Task.FromResult(expected), Task.FromResult(new Metadata()), () => Status.DefaultSuccess, () => new Metadata(), () => { });\n\n\n\n mockClient.Setup(m => m.PlayerConnectAsync(It.IsAny<PlayerID>(), It.IsAny<Metadata>(), It.IsAny<DateTime?>(), It.IsAny<CancellationToken>())).Returns(fakeCall);\n\n mockSdk.alpha.client = mockClient.Object;\n\n\n\n var result = await mockSdk.Alpha().PlayerConnectAsync(\"test\");\n\n Assert.AreEqual(expected.Bool_, result);\n\n }\n\n\n\n [TestMethod]\n\n public async Task PlayerDisconnect_Sends_OK()\n\n {\n\n var mockClient = new Mock<SDK.SDKClient>();\n\n var mockSdk = new AgonesSDK();\n\n var expected = new Bool() { Bool_ = true };\n", "file_path": "sdks/csharp/test/AgonesAlphaSDKClientTests.cs", "rank": 26, "score": 150506.3516004645 }, { "content": "// Copyright 2020 Google LLC All Rights Reserved.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nusing Agones.Dev.Sdk.Alpha;\n\nusing Grpc.Core;\n\nusing Grpc.Core.Testing;\n\nusing System.Collections.Generic;\n\nusing System.Threading;\n\nusing System.Threading.Tasks;\n", "file_path": "sdks/csharp/test/AgonesAlphaSDKClientTests.cs", "rank": 27, "score": 150502.46624884015 }, { "content": "func (c *sDKClient) SetPlayerCapacity(ctx context.Context, in *Count, opts ...grpc.CallOption) (*Empty, error) {\n\n\tout := new(Empty)\n\n\terr := c.cc.Invoke(ctx, \"/agones.dev.sdk.alpha.SDK/SetPlayerCapacity\", in, out, opts...)\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\treturn out, nil\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 28, "score": 149114.17324580654 }, { "content": "func (c *sDKClient) GetConnectedPlayers(ctx context.Context, in *Empty, opts ...grpc.CallOption) (*PlayerIDList, error) {\n\n\tout := new(PlayerIDList)\n\n\terr := c.cc.Invoke(ctx, \"/agones.dev.sdk.alpha.SDK/GetConnectedPlayers\", in, out, opts...)\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\treturn out, nil\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 29, "score": 149114.17324580654 }, { "content": "func (c *sDKClient) GetPlayerCapacity(ctx context.Context, in *Empty, opts ...grpc.CallOption) (*Count, error) {\n\n\tout := new(Count)\n\n\terr := c.cc.Invoke(ctx, \"/agones.dev.sdk.alpha.SDK/GetPlayerCapacity\", in, out, opts...)\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\treturn out, nil\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 30, "score": 149114.17324580654 }, { "content": "func (c *sDKClient) GetPlayerCount(ctx context.Context, in *Empty, opts ...grpc.CallOption) (*Count, error) {\n\n\tout := new(Count)\n\n\terr := c.cc.Invoke(ctx, \"/agones.dev.sdk.alpha.SDK/GetPlayerCount\", in, out, opts...)\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\treturn out, nil\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 31, "score": 149114.17324580654 }, { "content": "class EmptyDefaultTypeInternal;\n\nAGONES_EXPORT extern EmptyDefaultTypeInternal _Empty_default_instance_;\n", "file_path": "sdks/cpp/include/agones/sdk.pb.h", "rank": 32, "score": 148049.2123489247 }, { "content": "const Alpha = require('../src/alpha');\n", "file_path": "sdks/nodejs/spec/alphaAgonesSDK.spec.js", "rank": 33, "score": 147620.16211260163 }, { "content": "\tclient rest.Interface\n", "file_path": "pkg/client/clientset/versioned/typed/agones/v1/gameserverset.go", "rank": 34, "score": 146724.56288357987 }, { "content": "\tclient rest.Interface\n", "file_path": "pkg/client/clientset/versioned/typed/allocation/v1/gameserverallocation.go", "rank": 35, "score": 146724.56288357987 }, { "content": "\tclient rest.Interface\n", "file_path": "pkg/client/clientset/versioned/typed/agones/v1/gameserver.go", "rank": 36, "score": 146724.56288357987 }, { "content": "\tclient rest.Interface\n", "file_path": "pkg/client/clientset/versioned/typed/multicluster/v1/gameserverallocationpolicy.go", "rank": 37, "score": 146724.56288357987 }, { "content": "\tclient rest.Interface\n", "file_path": "pkg/client/clientset/versioned/typed/autoscaling/v1/fleetautoscaler.go", "rank": 38, "score": 146724.56288357987 }, { "content": "\tclient rest.Interface\n", "file_path": "pkg/client/clientset/versioned/typed/agones/v1/fleet.go", "rank": 39, "score": 146724.56288357987 }, { "content": "class EmptyDefaultTypeInternal {\n\n public:\n\n ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed<Empty> _instance;\n\n} _Empty_default_instance_;\n", "file_path": "sdks/cpp/src/agones/sdk.pb.cc", "rank": 40, "score": 144868.17375709183 }, { "content": "// Internal implementation detail -- do not use these members.\n\nstruct AGONES_EXPORT TableStruct_sdk_2eproto {\n\n static const ::PROTOBUF_NAMESPACE_ID::internal::ParseTableField entries[]\n\n PROTOBUF_SECTION_VARIABLE(protodesc_cold);\n\n static const ::PROTOBUF_NAMESPACE_ID::internal::AuxillaryParseTableField aux[]\n\n PROTOBUF_SECTION_VARIABLE(protodesc_cold);\n\n static const ::PROTOBUF_NAMESPACE_ID::internal::ParseTable schema[12]\n\n PROTOBUF_SECTION_VARIABLE(protodesc_cold);\n\n static const ::PROTOBUF_NAMESPACE_ID::internal::FieldMetadata field_metadata[];\n\n static const ::PROTOBUF_NAMESPACE_ID::internal::SerializationTable serialization_table[];\n\n static const ::PROTOBUF_NAMESPACE_ID::uint32 offsets[];\n\n};\n\nextern AGONES_EXPORT const ::PROTOBUF_NAMESPACE_ID::internal::DescriptorTable descriptor_table_sdk_2eproto;\n\nnamespace agones {\n\nnamespace dev {\n\nnamespace sdk {\n", "file_path": "sdks/cpp/include/agones/sdk.pb.h", "rank": 41, "score": 144091.79866984606 }, { "content": "\tclient alpha.SDKClient\n", "file_path": "sdks/go/alpha.go", "rank": 42, "score": 143878.5796445854 }, { "content": "\talpha *Alpha\n", "file_path": "sdks/go/sdk.go", "rank": 43, "score": 142743.90738409237 }, { "content": "\tclient sdk.SDKClient\n", "file_path": "sdks/go/sdk.go", "rank": 44, "score": 142171.6928598606 }, { "content": "\tType FleetAutoscalerSyncType `json:\"type\"`\n", "file_path": "pkg/apis/autoscaling/v1/fleetautoscaler.go", "rank": 45, "score": 139497.53786542502 }, { "content": "const Alpha = require('./alpha');\n", "file_path": "sdks/nodejs/src/agonesSDK.js", "rank": 46, "score": 138245.09375002177 }, { "content": "const Alpha = require('../src/alpha');\n", "file_path": "sdks/nodejs/spec/agonesSDK.spec.js", "rank": 47, "score": 136140.68162320185 }, { "content": "class GameServer_ObjectMeta_AnnotationsEntry_DoNotUseDefaultTypeInternal;\n\nAGONES_EXPORT extern GameServer_ObjectMeta_AnnotationsEntry_DoNotUseDefaultTypeInternal _GameServer_ObjectMeta_AnnotationsEntry_DoNotUse_default_instance_;\n", "file_path": "sdks/cpp/include/agones/sdk.pb.h", "rank": 48, "score": 130847.12690259052 }, { "content": "class GameServer_ObjectMeta_LabelsEntry_DoNotUseDefaultTypeInternal;\n\nAGONES_EXPORT extern GameServer_ObjectMeta_LabelsEntry_DoNotUseDefaultTypeInternal _GameServer_ObjectMeta_LabelsEntry_DoNotUse_default_instance_;\n", "file_path": "sdks/cpp/include/agones/sdk.pb.h", "rank": 49, "score": 130847.12690259052 }, { "content": "class GameServer_ObjectMeta_LabelsEntry_DoNotUseDefaultTypeInternal {\n\n public:\n\n ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed<GameServer_ObjectMeta_LabelsEntry_DoNotUse> _instance;\n\n} _GameServer_ObjectMeta_LabelsEntry_DoNotUse_default_instance_;\n", "file_path": "sdks/cpp/src/agones/sdk.pb.cc", "rank": 50, "score": 128356.75292448432 }, { "content": "class GameServer_ObjectMeta_AnnotationsEntry_DoNotUseDefaultTypeInternal {\n\n public:\n\n ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed<GameServer_ObjectMeta_AnnotationsEntry_DoNotUse> _instance;\n\n} _GameServer_ObjectMeta_AnnotationsEntry_DoNotUse_default_instance_;\n", "file_path": "sdks/cpp/src/agones/sdk.pb.cc", "rank": 51, "score": 128356.75292448432 }, { "content": "// Internal implementation detail -- do not use these members.\n\nstruct AGONES_EXPORT TableStruct_google_2fapi_2fhttp_2eproto {\n\n static const ::PROTOBUF_NAMESPACE_ID::internal::ParseTableField entries[]\n\n PROTOBUF_SECTION_VARIABLE(protodesc_cold);\n\n static const ::PROTOBUF_NAMESPACE_ID::internal::AuxillaryParseTableField aux[]\n\n PROTOBUF_SECTION_VARIABLE(protodesc_cold);\n\n static const ::PROTOBUF_NAMESPACE_ID::internal::ParseTable schema[3]\n\n PROTOBUF_SECTION_VARIABLE(protodesc_cold);\n\n static const ::PROTOBUF_NAMESPACE_ID::internal::FieldMetadata field_metadata[];\n\n static const ::PROTOBUF_NAMESPACE_ID::internal::SerializationTable serialization_table[];\n\n static const ::PROTOBUF_NAMESPACE_ID::uint32 offsets[];\n\n};\n\nextern AGONES_EXPORT const ::PROTOBUF_NAMESPACE_ID::internal::DescriptorTable descriptor_table_google_2fapi_2fhttp_2eproto;\n\nnamespace google {\n\nnamespace api {\n", "file_path": "sdks/cpp/include/google/api/http.pb.h", "rank": 52, "score": 127019.34949586666 }, { "content": "// Internal implementation detail -- do not use these members.\n\nstruct AGONES_EXPORT TableStruct_google_2fapi_2fannotations_2eproto {\n\n static const ::PROTOBUF_NAMESPACE_ID::internal::ParseTableField entries[]\n\n PROTOBUF_SECTION_VARIABLE(protodesc_cold);\n\n static const ::PROTOBUF_NAMESPACE_ID::internal::AuxillaryParseTableField aux[]\n\n PROTOBUF_SECTION_VARIABLE(protodesc_cold);\n\n static const ::PROTOBUF_NAMESPACE_ID::internal::ParseTable schema[1]\n\n PROTOBUF_SECTION_VARIABLE(protodesc_cold);\n\n static const ::PROTOBUF_NAMESPACE_ID::internal::FieldMetadata field_metadata[];\n\n static const ::PROTOBUF_NAMESPACE_ID::internal::SerializationTable serialization_table[];\n\n static const ::PROTOBUF_NAMESPACE_ID::uint32 offsets[];\n\n};\n\nextern AGONES_EXPORT const ::PROTOBUF_NAMESPACE_ID::internal::DescriptorTable descriptor_table_google_2fapi_2fannotations_2eproto;\n\nPROTOBUF_NAMESPACE_OPEN\n\nPROTOBUF_NAMESPACE_CLOSE\n\nnamespace google {\n\nnamespace api {\n\n\n\n// ===================================================================\n\n\n\n\n", "file_path": "sdks/cpp/include/google/api/annotations.pb.h", "rank": 53, "score": 127019.34949586666 }, { "content": "type SDKServer interface {\n\n\t// PlayerConnect increases the SDK’s stored player count by one, and appends this playerID to GameServer.Status.Players.IDs.\n\n\t//\n\n\t// GameServer.Status.Players.Count and GameServer.Status.Players.IDs are then set to update the player count and id list a second from now,\n\n\t// unless there is already an update pending, in which case the update joins that batch operation.\n\n\t//\n\n\t// PlayerConnect returns true and adds the playerID to the list of playerIDs if this playerID was not already in the\n\n\t// list of connected playerIDs.\n\n\t//\n\n\t// If the playerID exists within the list of connected playerIDs, PlayerConnect will return false, and the list of\n\n\t// connected playerIDs will be left unchanged.\n\n\t//\n\n\t// An error will be returned if the playerID was not already in the list of connected playerIDs but the player capacity for\n\n\t// the server has been reached. The playerID will not be added to the list of playerIDs.\n\n\t//\n\n\t// Warning: Do not use this method if you are manually managing GameServer.Status.Players.IDs and GameServer.Status.Players.Count\n\n\t// through the Kubernetes API, as indeterminate results will occur.\n\n\tPlayerConnect(context.Context, *PlayerID) (*Bool, error)\n\n\t// Decreases the SDK’s stored player count by one, and removes the playerID from GameServer.Status.Players.IDs.\n\n\t//\n\n\t// GameServer.Status.Players.Count and GameServer.Status.Players.IDs are then set to update the player count and id list a second from now,\n\n\t// unless there is already an update pending, in which case the update joins that batch operation.\n\n\t//\n\n\t// PlayerDisconnect will return true and remove the supplied playerID from the list of connected playerIDs if the\n\n\t// playerID value exists within the list.\n\n\t//\n\n\t// If the playerID was not in the list of connected playerIDs, the call will return false, and the connected playerID list\n\n\t// will be left unchanged.\n\n\t//\n\n\t// Warning: Do not use this method if you are manually managing GameServer.status.players.IDs and GameServer.status.players.Count\n\n\t// through the Kubernetes API, as indeterminate results will occur.\n\n\tPlayerDisconnect(context.Context, *PlayerID) (*Bool, error)\n\n\t// Update the GameServer.Status.Players.Capacity value with a new capacity.\n\n\tSetPlayerCapacity(context.Context, *Count) (*Empty, error)\n\n\t// Retrieves the current player capacity. This is always accurate from what has been set through this SDK,\n\n\t// even if the value has yet to be updated on the GameServer status resource.\n\n\t//\n\n\t// If GameServer.Status.Players.Capacity is set manually through the Kubernetes API, use SDK.GameServer() or SDK.WatchGameServer() instead to view this value.\n\n\tGetPlayerCapacity(context.Context, *Empty) (*Count, error)\n\n\t// Retrieves the current player count. This is always accurate from what has been set through this SDK,\n\n\t// even if the value has yet to be updated on the GameServer status resource.\n\n\t//\n\n\t// If GameServer.Status.Players.Count is set manually through the Kubernetes API, use SDK.GameServer() or SDK.WatchGameServer() instead to view this value.\n\n\tGetPlayerCount(context.Context, *Empty) (*Count, error)\n\n\t// Returns if the playerID is currently connected to the GameServer. This is always accurate from what has been set through this SDK,\n\n\t// even if the value has yet to be updated on the GameServer status resource.\n\n\t//\n\n\t// If GameServer.Status.Players.IDs is set manually through the Kubernetes API, use SDK.GameServer() or SDK.WatchGameServer() instead to determine connected status.\n\n\tIsPlayerConnected(context.Context, *PlayerID) (*Bool, error)\n\n\t// Returns the list of the currently connected player ids. This is always accurate from what has been set through this SDK,\n\n\t// even if the value has yet to be updated on the GameServer status resource.\n\n\t//\n\n\t// If GameServer.Status.Players.IDs is set manually through the Kubernetes API, use SDK.GameServer() or SDK.WatchGameServer() instead to view this value.\n\n\tGetConnectedPlayers(context.Context, *Empty) (*PlayerIDList, error)\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 54, "score": 122016.24110638372 }, { "content": "func RegisterSDKServer(s *grpc.Server, srv SDKServer) {\n\n\ts.RegisterService(&_SDK_serviceDesc, srv)\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 55, "score": 120369.84870021284 }, { "content": "var _SDK_serviceDesc = grpc.ServiceDesc{\n\n\tServiceName: \"agones.dev.sdk.alpha.SDK\",\n\n\tHandlerType: (*SDKServer)(nil),\n\n\tMethods: []grpc.MethodDesc{\n\n\t\t{\n\n\t\t\tMethodName: \"PlayerConnect\",\n\n\t\t\tHandler: _SDK_PlayerConnect_Handler,\n\n\t\t},\n\n\t\t{\n\n\t\t\tMethodName: \"PlayerDisconnect\",\n\n\t\t\tHandler: _SDK_PlayerDisconnect_Handler,\n\n\t\t},\n\n\t\t{\n\n\t\t\tMethodName: \"SetPlayerCapacity\",\n\n\t\t\tHandler: _SDK_SetPlayerCapacity_Handler,\n\n\t\t},\n\n\t\t{\n\n\t\t\tMethodName: \"GetPlayerCapacity\",\n\n\t\t\tHandler: _SDK_GetPlayerCapacity_Handler,\n\n\t\t},\n\n\t\t{\n\n\t\t\tMethodName: \"GetPlayerCount\",\n\n\t\t\tHandler: _SDK_GetPlayerCount_Handler,\n\n\t\t},\n\n\t\t{\n\n\t\t\tMethodName: \"IsPlayerConnected\",\n\n\t\t\tHandler: _SDK_IsPlayerConnected_Handler,\n\n\t\t},\n\n\t\t{\n\n\t\t\tMethodName: \"GetConnectedPlayers\",\n\n\t\t\tHandler: _SDK_GetConnectedPlayers_Handler,\n\n\t\t},\n\n\t},\n\n\tStreams: []grpc.StreamDesc{},\n\n\tMetadata: \"alpha.proto\",\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 56, "score": 120369.84870021284 }, { "content": "func RegisterSDKHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error {\n\n\treturn RegisterSDKHandlerClient(ctx, mux, NewSDKClient(conn))\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 57, "score": 118767.29504917926 }, { "content": "func _SDK_PlayerDisconnect_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {\n\n\tin := new(PlayerID)\n\n\tif err := dec(in); err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\tif interceptor == nil {\n\n\t\treturn srv.(SDKServer).PlayerDisconnect(ctx, in)\n\n\t}\n\n\tinfo := &grpc.UnaryServerInfo{\n\n\t\tServer: srv,\n\n\t\tFullMethod: \"/agones.dev.sdk.alpha.SDK/PlayerDisconnect\",\n\n\t}\n\n\thandler := func(ctx context.Context, req interface{}) (interface{}, error) {\n\n\t\treturn srv.(SDKServer).PlayerDisconnect(ctx, req.(*PlayerID))\n\n\t}\n\n\treturn interceptor(ctx, in, info, handler)\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 58, "score": 118767.29504917926 }, { "content": "func _SDK_IsPlayerConnected_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {\n\n\tin := new(PlayerID)\n\n\tif err := dec(in); err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\tif interceptor == nil {\n\n\t\treturn srv.(SDKServer).IsPlayerConnected(ctx, in)\n\n\t}\n\n\tinfo := &grpc.UnaryServerInfo{\n\n\t\tServer: srv,\n\n\t\tFullMethod: \"/agones.dev.sdk.alpha.SDK/IsPlayerConnected\",\n\n\t}\n\n\thandler := func(ctx context.Context, req interface{}) (interface{}, error) {\n\n\t\treturn srv.(SDKServer).IsPlayerConnected(ctx, req.(*PlayerID))\n\n\t}\n\n\treturn interceptor(ctx, in, info, handler)\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 59, "score": 118767.29504917926 }, { "content": "func _SDK_PlayerConnect_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {\n\n\tin := new(PlayerID)\n\n\tif err := dec(in); err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\tif interceptor == nil {\n\n\t\treturn srv.(SDKServer).PlayerConnect(ctx, in)\n\n\t}\n\n\tinfo := &grpc.UnaryServerInfo{\n\n\t\tServer: srv,\n\n\t\tFullMethod: \"/agones.dev.sdk.alpha.SDK/PlayerConnect\",\n\n\t}\n\n\thandler := func(ctx context.Context, req interface{}) (interface{}, error) {\n\n\t\treturn srv.(SDKServer).PlayerConnect(ctx, req.(*PlayerID))\n\n\t}\n\n\treturn interceptor(ctx, in, info, handler)\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 60, "score": 118767.29504917926 }, { "content": "fn run_sync() -> Result<(), String> {\n\n use tokio::runtime::Handle;\n\n\n\n println!(\"rust: Creating SDK instance\");\n\n let mut sdk = Handle::current().block_on(async move {\n\n agones::Sdk::new(None /* default port */, None /* keep_alive */)\n\n .await\n\n .map_err(|e| format!(\"unable to create sdk client: {}\", e))\n\n })?;\n\n\n\n // Spawn a task that will send health checks every 2 seconds. If this current\n\n // thread/task panics or dropped, the health check will also be stopped\n\n let _health = {\n\n let health_tx = sdk.health_check();\n\n let (tx, mut rx) = tokio::sync::oneshot::channel::<()>();\n\n\n\n Handle::current().spawn(async move {\n\n let mut interval = tokio::time::interval(Duration::from_secs(2));\n\n\n\n loop {\n", "file_path": "test/sdk/rust/src/main.rs", "rank": 61, "score": 117515.66671703348 }, { "content": "func RegisterSDKHandlerServer(ctx context.Context, mux *runtime.ServeMux, server SDKServer) error {\n\n\n\n\tmux.Handle(\"POST\", pattern_SDK_PlayerConnect_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\n\t\tctx, cancel := context.WithCancel(req.Context())\n\n\t\tdefer cancel()\n\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\t\tresp, md, err := local_request_SDK_PlayerConnect_0(rctx, inboundMarshaler, server, req, pathParams)\n\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\n\n\t\tforward_SDK_PlayerConnect_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\n\n\t})\n\n\n\n\tmux.Handle(\"POST\", pattern_SDK_PlayerDisconnect_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\n\t\tctx, cancel := context.WithCancel(req.Context())\n\n\t\tdefer cancel()\n\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\t\tresp, md, err := local_request_SDK_PlayerDisconnect_0(rctx, inboundMarshaler, server, req, pathParams)\n\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\n\n\t\tforward_SDK_PlayerDisconnect_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\n\n\t})\n\n\n\n\tmux.Handle(\"PUT\", pattern_SDK_SetPlayerCapacity_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\n\t\tctx, cancel := context.WithCancel(req.Context())\n\n\t\tdefer cancel()\n\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\t\tresp, md, err := local_request_SDK_SetPlayerCapacity_0(rctx, inboundMarshaler, server, req, pathParams)\n\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\n\n\t\tforward_SDK_SetPlayerCapacity_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\n\n\t})\n\n\n\n\tmux.Handle(\"GET\", pattern_SDK_GetPlayerCapacity_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\n\t\tctx, cancel := context.WithCancel(req.Context())\n\n\t\tdefer cancel()\n\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\t\tresp, md, err := local_request_SDK_GetPlayerCapacity_0(rctx, inboundMarshaler, server, req, pathParams)\n\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\n\n\t\tforward_SDK_GetPlayerCapacity_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\n\n\t})\n\n\n\n\tmux.Handle(\"GET\", pattern_SDK_GetPlayerCount_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\n\t\tctx, cancel := context.WithCancel(req.Context())\n\n\t\tdefer cancel()\n\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\t\tresp, md, err := local_request_SDK_GetPlayerCount_0(rctx, inboundMarshaler, server, req, pathParams)\n\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\n\n\t\tforward_SDK_GetPlayerCount_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\n\n\t})\n\n\n\n\tmux.Handle(\"GET\", pattern_SDK_IsPlayerConnected_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\n\t\tctx, cancel := context.WithCancel(req.Context())\n\n\t\tdefer cancel()\n\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\t\tresp, md, err := local_request_SDK_IsPlayerConnected_0(rctx, inboundMarshaler, server, req, pathParams)\n\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\n\n\t\tforward_SDK_IsPlayerConnected_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\n\n\t})\n\n\n\n\tmux.Handle(\"GET\", pattern_SDK_GetConnectedPlayers_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {\n\n\t\tctx, cancel := context.WithCancel(req.Context())\n\n\t\tdefer cancel()\n\n\t\tinboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)\n\n\t\trctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\t\tresp, md, err := local_request_SDK_GetConnectedPlayers_0(rctx, inboundMarshaler, server, req, pathParams)\n\n\t\tctx = runtime.NewServerMetadataContext(ctx, md)\n\n\t\tif err != nil {\n\n\t\t\truntime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)\n\n\t\t\treturn\n\n\t\t}\n\n\n\n\t\tforward_SDK_GetConnectedPlayers_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)\n\n\n\n\t})\n\n\n\n\treturn nil\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 62, "score": 117206.85220608147 }, { "content": "\tforward_SDK_PlayerDisconnect_0 = runtime.ForwardResponseMessage\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 63, "score": 117206.85220608147 }, { "content": "\tforward_SDK_PlayerConnect_0 = runtime.ForwardResponseMessage\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 64, "score": 117206.85220608147 }, { "content": "func request_SDK_IsPlayerConnected_0(ctx context.Context, marshaler runtime.Marshaler, client SDKClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {\n\n\tvar protoReq PlayerID\n\n\tvar metadata runtime.ServerMetadata\n\n\n\n\tvar (\n\n\t\tval string\n\n\t\tok bool\n\n\t\terr error\n\n\t\t_ = err\n\n\t)\n\n\n\n\tval, ok = pathParams[\"playerID\"]\n\n\tif !ok {\n\n\t\treturn nil, metadata, status.Errorf(codes.InvalidArgument, \"missing parameter %s\", \"playerID\")\n\n\t}\n\n\n\n\tprotoReq.PlayerID, err = runtime.String(val)\n\n\n\n\tif err != nil {\n\n\t\treturn nil, metadata, status.Errorf(codes.InvalidArgument, \"type mismatch, parameter: %s, error: %v\", \"playerID\", err)\n\n\t}\n\n\n\n\tmsg, err := client.IsPlayerConnected(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))\n\n\treturn msg, metadata, err\n\n\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 65, "score": 117206.85220608147 }, { "content": "func RegisterSDKHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) {\n\n\tconn, err := grpc.Dial(endpoint, opts...)\n\n\tif err != nil {\n\n\t\treturn err\n\n\t}\n\n\tdefer func() {\n\n\t\tif err != nil {\n\n\t\t\tif cerr := conn.Close(); cerr != nil {\n\n\t\t\t\tgrpclog.Infof(\"Failed to close conn to %s: %v\", endpoint, cerr)\n\n\t\t\t}\n\n\t\t\treturn\n\n\t\t}\n\n\t\tgo func() {\n\n\t\t\t<-ctx.Done()\n\n\t\t\tif cerr := conn.Close(); cerr != nil {\n\n\t\t\t\tgrpclog.Infof(\"Failed to close conn to %s: %v\", endpoint, cerr)\n\n\t\t\t}\n\n\t\t}()\n\n\t}()\n\n\n\n\treturn RegisterSDKHandler(ctx, mux, conn)\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 66, "score": 117206.85220608147 }, { "content": "func request_SDK_PlayerDisconnect_0(ctx context.Context, marshaler runtime.Marshaler, client SDKClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {\n\n\tvar protoReq PlayerID\n\n\tvar metadata runtime.ServerMetadata\n\n\n\n\tnewReader, berr := utilities.IOReaderFactory(req.Body)\n\n\tif berr != nil {\n\n\t\treturn nil, metadata, status.Errorf(codes.InvalidArgument, \"%v\", berr)\n\n\t}\n\n\tif err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF {\n\n\t\treturn nil, metadata, status.Errorf(codes.InvalidArgument, \"%v\", err)\n\n\t}\n\n\n\n\tmsg, err := client.PlayerDisconnect(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))\n\n\treturn msg, metadata, err\n\n\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 67, "score": 117206.85220608147 }, { "content": "func _SDK_GetPlayerCount_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {\n\n\tin := new(Empty)\n\n\tif err := dec(in); err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\tif interceptor == nil {\n\n\t\treturn srv.(SDKServer).GetPlayerCount(ctx, in)\n\n\t}\n\n\tinfo := &grpc.UnaryServerInfo{\n\n\t\tServer: srv,\n\n\t\tFullMethod: \"/agones.dev.sdk.alpha.SDK/GetPlayerCount\",\n\n\t}\n\n\thandler := func(ctx context.Context, req interface{}) (interface{}, error) {\n\n\t\treturn srv.(SDKServer).GetPlayerCount(ctx, req.(*Empty))\n\n\t}\n\n\treturn interceptor(ctx, in, info, handler)\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 68, "score": 117206.85220608147 }, { "content": "func _SDK_GetPlayerCapacity_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {\n\n\tin := new(Empty)\n\n\tif err := dec(in); err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\tif interceptor == nil {\n\n\t\treturn srv.(SDKServer).GetPlayerCapacity(ctx, in)\n\n\t}\n\n\tinfo := &grpc.UnaryServerInfo{\n\n\t\tServer: srv,\n\n\t\tFullMethod: \"/agones.dev.sdk.alpha.SDK/GetPlayerCapacity\",\n\n\t}\n\n\thandler := func(ctx context.Context, req interface{}) (interface{}, error) {\n\n\t\treturn srv.(SDKServer).GetPlayerCapacity(ctx, req.(*Empty))\n\n\t}\n\n\treturn interceptor(ctx, in, info, handler)\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 69, "score": 117206.85220608147 }, { "content": "\tforward_SDK_IsPlayerConnected_0 = runtime.ForwardResponseMessage\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 70, "score": 117206.85220608147 }, { "content": "\tpattern_SDK_PlayerDisconnect_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{\"alpha\", \"player\", \"disconnect\"}, \"\", runtime.AssumeColonVerbOpt(true)))\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 71, "score": 117206.85220608147 }, { "content": "\tpattern_SDK_PlayerConnect_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{\"alpha\", \"player\", \"connect\"}, \"\", runtime.AssumeColonVerbOpt(true)))\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 72, "score": 117206.85220608147 }, { "content": "func _SDK_GetConnectedPlayers_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {\n\n\tin := new(Empty)\n\n\tif err := dec(in); err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\tif interceptor == nil {\n\n\t\treturn srv.(SDKServer).GetConnectedPlayers(ctx, in)\n\n\t}\n\n\tinfo := &grpc.UnaryServerInfo{\n\n\t\tServer: srv,\n\n\t\tFullMethod: \"/agones.dev.sdk.alpha.SDK/GetConnectedPlayers\",\n\n\t}\n\n\thandler := func(ctx context.Context, req interface{}) (interface{}, error) {\n\n\t\treturn srv.(SDKServer).GetConnectedPlayers(ctx, req.(*Empty))\n\n\t}\n\n\treturn interceptor(ctx, in, info, handler)\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 73, "score": 117206.85220608147 }, { "content": "func _SDK_SetPlayerCapacity_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {\n\n\tin := new(Count)\n\n\tif err := dec(in); err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\tif interceptor == nil {\n\n\t\treturn srv.(SDKServer).SetPlayerCapacity(ctx, in)\n\n\t}\n\n\tinfo := &grpc.UnaryServerInfo{\n\n\t\tServer: srv,\n\n\t\tFullMethod: \"/agones.dev.sdk.alpha.SDK/SetPlayerCapacity\",\n\n\t}\n\n\thandler := func(ctx context.Context, req interface{}) (interface{}, error) {\n\n\t\treturn srv.(SDKServer).SetPlayerCapacity(ctx, req.(*Count))\n\n\t}\n\n\treturn interceptor(ctx, in, info, handler)\n", "file_path": "pkg/sdk/alpha/alpha.pb.go", "rank": 74, "score": 117206.85220608147 }, { "content": "func request_SDK_PlayerConnect_0(ctx context.Context, marshaler runtime.Marshaler, client SDKClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {\n\n\tvar protoReq PlayerID\n\n\tvar metadata runtime.ServerMetadata\n\n\n\n\tnewReader, berr := utilities.IOReaderFactory(req.Body)\n\n\tif berr != nil {\n\n\t\treturn nil, metadata, status.Errorf(codes.InvalidArgument, \"%v\", berr)\n\n\t}\n\n\tif err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF {\n\n\t\treturn nil, metadata, status.Errorf(codes.InvalidArgument, \"%v\", err)\n\n\t}\n\n\n\n\tmsg, err := client.PlayerConnect(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))\n\n\treturn msg, metadata, err\n\n\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 75, "score": 117206.85220608147 }, { "content": "\tpattern_SDK_IsPlayerConnected_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{\"alpha\", \"player\", \"connected\", \"playerID\"}, \"\", runtime.AssumeColonVerbOpt(true)))\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 76, "score": 117206.85220608147 }, { "content": "{\n\n public interface IAgonesAlphaSDK : IDisposable\n\n {\n\n Task<long> GetPlayerCapacityAsync();\n\n Task<Status> SetPlayerCapacityAsync(long capacity);\n\n Task<bool> PlayerConnectAsync(string id);\n\n Task<bool> PlayerDisconnectAsync(string id);\n\n Task<long> GetPlayerCountAsync();\n\n Task<bool> IsPlayerConnectedAsync(string id);\n\n Task<List<string>> GetConnectedPlayersAsync();\n\n }\n\n}\n", "file_path": "sdks/csharp/sdk/IAgonesAlphaSDK.cs", "rank": 77, "score": 116769.03888949048 }, { "content": "// Copyright 2020 Google LLC All Rights Reserved.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nusing System;\n\nusing System.Collections.Generic;\n\nusing System.Threading.Tasks;\n\nusing Grpc.Core;\n\n\n\nnamespace Agones\n", "file_path": "sdks/csharp/sdk/IAgonesAlphaSDK.cs", "rank": 78, "score": 116767.71511286555 }, { "content": "type SDKClient interface {\n\n\t// Call when the GameServer is ready\n\n\tReady(ctx context.Context, in *Empty, opts ...grpc.CallOption) (*Empty, error)\n\n\t// Call to self Allocation the GameServer\n\n\tAllocate(ctx context.Context, in *Empty, opts ...grpc.CallOption) (*Empty, error)\n\n\t// Call when the GameServer is shutting down\n\n\tShutdown(ctx context.Context, in *Empty, opts ...grpc.CallOption) (*Empty, error)\n\n\t// Send a Empty every d Duration to declare that this GameSever is healthy\n\n\tHealth(ctx context.Context, opts ...grpc.CallOption) (SDK_HealthClient, error)\n\n\t// Retrieve the current GameServer data\n\n\tGetGameServer(ctx context.Context, in *Empty, opts ...grpc.CallOption) (*GameServer, error)\n\n\t// Send GameServer details whenever the GameServer is updated\n\n\tWatchGameServer(ctx context.Context, in *Empty, opts ...grpc.CallOption) (SDK_WatchGameServerClient, error)\n\n\t// Apply a Label to the backing GameServer metadata\n\n\tSetLabel(ctx context.Context, in *KeyValue, opts ...grpc.CallOption) (*Empty, error)\n\n\t// Apply a Annotation to the backing GameServer metadata\n\n\tSetAnnotation(ctx context.Context, in *KeyValue, opts ...grpc.CallOption) (*Empty, error)\n\n\t// Marks the GameServer as the Reserved state for Duration\n\n\tReserve(ctx context.Context, in *Duration, opts ...grpc.CallOption) (*Empty, error)\n", "file_path": "pkg/sdk/sdk.pb.go", "rank": 79, "score": 115913.4337937469 }, { "content": "\tforward_SDK_GetPlayerCapacity_0 = runtime.ForwardResponseMessage\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 80, "score": 115686.88185767573 }, { "content": "func request_SDK_SetPlayerCapacity_0(ctx context.Context, marshaler runtime.Marshaler, client SDKClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {\n\n\tvar protoReq Count\n\n\tvar metadata runtime.ServerMetadata\n\n\n\n\tnewReader, berr := utilities.IOReaderFactory(req.Body)\n\n\tif berr != nil {\n\n\t\treturn nil, metadata, status.Errorf(codes.InvalidArgument, \"%v\", berr)\n\n\t}\n\n\tif err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF {\n\n\t\treturn nil, metadata, status.Errorf(codes.InvalidArgument, \"%v\", err)\n\n\t}\n\n\n\n\tmsg, err := client.SetPlayerCapacity(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))\n\n\treturn msg, metadata, err\n\n\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 81, "score": 115686.88185767573 }, { "content": "func request_SDK_GetPlayerCount_0(ctx context.Context, marshaler runtime.Marshaler, client SDKClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {\n\n\tvar protoReq Empty\n\n\tvar metadata runtime.ServerMetadata\n\n\n\n\tmsg, err := client.GetPlayerCount(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))\n\n\treturn msg, metadata, err\n\n\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 82, "score": 115686.88185767573 }, { "content": "\tpattern_SDK_GetConnectedPlayers_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{\"alpha\", \"player\", \"connected\"}, \"\", runtime.AssumeColonVerbOpt(true)))\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 83, "score": 115686.88185767573 }, { "content": "\tpattern_SDK_GetPlayerCapacity_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{\"alpha\", \"player\", \"capacity\"}, \"\", runtime.AssumeColonVerbOpt(true)))\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 84, "score": 115686.88185767573 }, { "content": "func request_SDK_GetConnectedPlayers_0(ctx context.Context, marshaler runtime.Marshaler, client SDKClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {\n\n\tvar protoReq Empty\n\n\tvar metadata runtime.ServerMetadata\n\n\n\n\tmsg, err := client.GetConnectedPlayers(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))\n\n\treturn msg, metadata, err\n\n\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 85, "score": 115686.88185767573 }, { "content": "\tforward_SDK_SetPlayerCapacity_0 = runtime.ForwardResponseMessage\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 86, "score": 115686.88185767573 }, { "content": "func local_request_SDK_PlayerConnect_0(ctx context.Context, marshaler runtime.Marshaler, server SDKServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {\n\n\tvar protoReq PlayerID\n\n\tvar metadata runtime.ServerMetadata\n\n\n\n\tnewReader, berr := utilities.IOReaderFactory(req.Body)\n\n\tif berr != nil {\n\n\t\treturn nil, metadata, status.Errorf(codes.InvalidArgument, \"%v\", berr)\n\n\t}\n\n\tif err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF {\n\n\t\treturn nil, metadata, status.Errorf(codes.InvalidArgument, \"%v\", err)\n\n\t}\n\n\n\n\tmsg, err := server.PlayerConnect(ctx, &protoReq)\n\n\treturn msg, metadata, err\n\n\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 87, "score": 115686.88185767573 }, { "content": "func local_request_SDK_IsPlayerConnected_0(ctx context.Context, marshaler runtime.Marshaler, server SDKServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {\n\n\tvar protoReq PlayerID\n\n\tvar metadata runtime.ServerMetadata\n\n\n\n\tvar (\n\n\t\tval string\n\n\t\tok bool\n\n\t\terr error\n\n\t\t_ = err\n\n\t)\n\n\n\n\tval, ok = pathParams[\"playerID\"]\n\n\tif !ok {\n\n\t\treturn nil, metadata, status.Errorf(codes.InvalidArgument, \"missing parameter %s\", \"playerID\")\n\n\t}\n\n\n\n\tprotoReq.PlayerID, err = runtime.String(val)\n\n\n\n\tif err != nil {\n\n\t\treturn nil, metadata, status.Errorf(codes.InvalidArgument, \"type mismatch, parameter: %s, error: %v\", \"playerID\", err)\n\n\t}\n\n\n\n\tmsg, err := server.IsPlayerConnected(ctx, &protoReq)\n\n\treturn msg, metadata, err\n\n\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 88, "score": 115686.88185767573 }, { "content": "\tforward_SDK_GetConnectedPlayers_0 = runtime.ForwardResponseMessage\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 89, "score": 115686.88185767573 }, { "content": "func request_SDK_GetPlayerCapacity_0(ctx context.Context, marshaler runtime.Marshaler, client SDKClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {\n\n\tvar protoReq Empty\n\n\tvar metadata runtime.ServerMetadata\n\n\n\n\tmsg, err := client.GetPlayerCapacity(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))\n\n\treturn msg, metadata, err\n\n\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 90, "score": 115686.88185767573 }, { "content": "\tforward_SDK_GetPlayerCount_0 = runtime.ForwardResponseMessage\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 91, "score": 115686.88185767573 }, { "content": "func local_request_SDK_PlayerDisconnect_0(ctx context.Context, marshaler runtime.Marshaler, server SDKServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {\n\n\tvar protoReq PlayerID\n\n\tvar metadata runtime.ServerMetadata\n\n\n\n\tnewReader, berr := utilities.IOReaderFactory(req.Body)\n\n\tif berr != nil {\n\n\t\treturn nil, metadata, status.Errorf(codes.InvalidArgument, \"%v\", berr)\n\n\t}\n\n\tif err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF {\n\n\t\treturn nil, metadata, status.Errorf(codes.InvalidArgument, \"%v\", err)\n\n\t}\n\n\n\n\tmsg, err := server.PlayerDisconnect(ctx, &protoReq)\n\n\treturn msg, metadata, err\n\n\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 92, "score": 115686.88185767573 }, { "content": "\tpattern_SDK_SetPlayerCapacity_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{\"alpha\", \"player\", \"capacity\"}, \"\", runtime.AssumeColonVerbOpt(true)))\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 93, "score": 115686.88185767573 }, { "content": "\tpattern_SDK_GetPlayerCount_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{\"alpha\", \"player\", \"count\"}, \"\", runtime.AssumeColonVerbOpt(true)))\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 94, "score": 115686.88185767573 }, { "content": "func local_request_SDK_GetConnectedPlayers_0(ctx context.Context, marshaler runtime.Marshaler, server SDKServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {\n\n\tvar protoReq Empty\n\n\tvar metadata runtime.ServerMetadata\n\n\n\n\tmsg, err := server.GetConnectedPlayers(ctx, &protoReq)\n\n\treturn msg, metadata, err\n\n\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 95, "score": 114205.8295870948 }, { "content": "func local_request_SDK_GetPlayerCapacity_0(ctx context.Context, marshaler runtime.Marshaler, server SDKServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {\n\n\tvar protoReq Empty\n\n\tvar metadata runtime.ServerMetadata\n\n\n\n\tmsg, err := server.GetPlayerCapacity(ctx, &protoReq)\n\n\treturn msg, metadata, err\n\n\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 96, "score": 114205.8295870948 }, { "content": "func local_request_SDK_SetPlayerCapacity_0(ctx context.Context, marshaler runtime.Marshaler, server SDKServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {\n\n\tvar protoReq Count\n\n\tvar metadata runtime.ServerMetadata\n\n\n\n\tnewReader, berr := utilities.IOReaderFactory(req.Body)\n\n\tif berr != nil {\n\n\t\treturn nil, metadata, status.Errorf(codes.InvalidArgument, \"%v\", berr)\n\n\t}\n\n\tif err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF {\n\n\t\treturn nil, metadata, status.Errorf(codes.InvalidArgument, \"%v\", err)\n\n\t}\n\n\n\n\tmsg, err := server.SetPlayerCapacity(ctx, &protoReq)\n\n\treturn msg, metadata, err\n\n\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 97, "score": 114205.8295870948 }, { "content": "func local_request_SDK_GetPlayerCount_0(ctx context.Context, marshaler runtime.Marshaler, server SDKServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {\n\n\tvar protoReq Empty\n\n\tvar metadata runtime.ServerMetadata\n\n\n\n\tmsg, err := server.GetPlayerCount(ctx, &protoReq)\n\n\treturn msg, metadata, err\n\n\n", "file_path": "pkg/sdk/alpha/alpha.pb.gw.go", "rank": 98, "score": 114205.8295870948 }, { "content": "\n\nuse api::sdk_client::SdkClient;\n\n\n\n/// Alpha is an instance of the Agones Alpha SDK\n\n#[derive(Clone)]\n\npub struct Alpha {\n\n client: SdkClient<Channel>,\n\n}\n\n\n\nimpl Alpha {\n\n /// new creates a new instance of the Alpha SDK\n\n pub(crate) fn new(ch: Channel) -> Self {\n\n Self {\n\n client: SdkClient::new(ch),\n\n }\n\n }\n\n\n\n /// This returns the last player capacity that was set through the SDK.\n\n /// If the player capacity is set from outside the SDK, use\n\n /// [`Sdk::get_gameserver`] instead.\n", "file_path": "sdks/rust/src/alpha.rs", "rank": 99, "score": 37.3785494206719 } ]
Rust
athena-coreserver/src/services/unit_service.rs
athena-intelli/athena-rs
7546137e14c3248fe20c2a68626beffc0c9ab7d3
use tokio_stream::wrappers::ReceiverStream; use tonic::{Request, Response, Status}; use athena_api::pb::structures::{ChangePriorityRequest, Unit}; use athena_api::pb::unit_service::*; use athena_api::pb::unit_service::unit_service_server::UnitService; #[derive(Default)] pub struct UnitServiceImpl; #[tonic::async_trait] impl UnitService for UnitServiceImpl { async fn add_to_queue(&self, request: Request<AddUnitToQueueRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn save_unit(&self, request: Request<Unit>) -> Result<Response<Unit>, Status> { todo!() } async fn cancel(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn close(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn finish(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn hold(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn pause(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn quarantine(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn release(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn ship(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn scrap(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn undo_close(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn undo_finish(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn undo_scrap(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn undo_ship(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_bom(&self, request: Request<UnitChangeBomRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_bom_from_part(&self, request: Request<UnitChangeBomFromPartRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_lot(&self, request: Request<ChangeLotRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_part(&self, request: Request<UnitChangePartRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_part_only(&self, request: Request<UnitChangePartRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_priority(&self, request: Request<ChangePriorityRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_production_line(&self, request: Request<ChangeProductionLineRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_route(&self, request: Request<UnitChangeRouteRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_serial_number(&self, request: Request<ChangeSerialNumberRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn complete_at_route_step(&self, request: Request<UnitCompleteAtRouteStepRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn start_at_route_step(&self, request: Request<UnitStartAtRouteStepRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn restart(&self, request: Request<RestartUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn create_stand_alone_unit(&self, request: Request<CreateStandAloneUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } type CreateStandAloneUnitsStream = ReceiverStream<Result<Unit, Status>>; async fn create_stand_alone_units(&self, request: Request<CreateStandAloneUnitRequest>) -> Result<Response<Self::CreateStandAloneUnitsStream>, Status> { todo!() } }
use tokio_stream::wrappers::ReceiverStream; use tonic::{Request, Response, Status}; use athena_api::pb::structures::{ChangePriorityRequest, Unit}; use athena_api::pb::unit_service::*; use athena_api::pb::unit_service::unit_service_server::UnitService; #[derive(Default)] pub struct UnitServiceImpl; #[tonic::async_trait] impl UnitService for UnitServiceImpl { async fn add_to_queue(&self, request: Request<AddUnitToQueueRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn save_unit(&self, request: Request<Unit>) -> Result<Response<Unit>, Status> { todo!() } async fn cancel(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn close(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn finish(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn hold(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn pause(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn quarantine(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn release(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn ship(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn scrap(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn undo_close(&self, request: Request<TransitionUnitRequest>) -> Result<Respo
, Status> { todo!() } async fn complete_at_route_step(&self, request: Request<UnitCompleteAtRouteStepRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn start_at_route_step(&self, request: Request<UnitStartAtRouteStepRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn restart(&self, request: Request<RestartUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn create_stand_alone_unit(&self, request: Request<CreateStandAloneUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } type CreateStandAloneUnitsStream = ReceiverStream<Result<Unit, Status>>; async fn create_stand_alone_units(&self, request: Request<CreateStandAloneUnitRequest>) -> Result<Response<Self::CreateStandAloneUnitsStream>, Status> { todo!() } }
nse<Unit>, Status> { todo!() } async fn undo_finish(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn undo_scrap(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn undo_ship(&self, request: Request<TransitionUnitRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_bom(&self, request: Request<UnitChangeBomRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_bom_from_part(&self, request: Request<UnitChangeBomFromPartRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_lot(&self, request: Request<ChangeLotRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_part(&self, request: Request<UnitChangePartRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_part_only(&self, request: Request<UnitChangePartRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_priority(&self, request: Request<ChangePriorityRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_production_line(&self, request: Request<ChangeProductionLineRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_route(&self, request: Request<UnitChangeRouteRequest>) -> Result<Response<Unit>, Status> { todo!() } async fn change_serial_number(&self, request: Request<ChangeSerialNumberRequest>) -> Result<Response<Unit>
random
[ { "content": " #[doc = \"Generated trait containing gRPC methods that should be implemented for use with UnitServiceServer.\"]\n\n #[async_trait]\n\n pub trait UnitService: Send + Sync + 'static {\n\n async fn add_to_queue(\n\n &self,\n\n request: tonic::Request<super::AddUnitToQueueRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn save_unit(\n\n &self,\n\n request: tonic::Request<super::super::structures::Unit>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn cancel(\n\n &self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn close(\n\n &self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn finish(\n\n &self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 0, "score": 84804.55003025028 }, { "content": "fn main() {\n\n let out_dir = PathBuf::from(\"src/pb\");\n\n tonic_build::configure()\n\n .out_dir(out_dir)\n\n .compile(&[\n\n \"proto/athenaapis/package_service.proto\",\n\n \"proto/athenaapis/lot_service.proto\",\n\n \"proto/athenaapis/unit_service.proto\",\n\n \"proto/athenaapis/work_order.proto\",\n\n \"proto/athenaapis/work_station.proto\",\n\n \"proto/athenaapis/security.proto\",\n\n \"proto/athenaapis/object_retrieval.proto\",\n\n \"proto/athenaapis/object_storage.proto\",\n\n ], &[\"proto\"])\n\n .unwrap();\n\n\n\n tonic_build::configure()\n\n .build_server(false)\n\n .compile(\n\n &[\"proto/googleapis/google/pubsub/v1/pubsub.proto\"],\n\n &[\"proto/googleapis\"],\n\n )\n\n .unwrap();\n\n}", "file_path": "athena-api/build.rs", "rank": 1, "score": 56901.13602717923 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "athena-web/src/main.rs", "rank": 2, "score": 55656.977164243996 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "athena-minserver/src/main.rs", "rank": 3, "score": 55656.977164243996 }, { "content": " #[doc = \"Generated trait containing gRPC methods that should be implemented for use with SecurityServiceServer.\"]\n\n #[async_trait]\n\n pub trait SecurityService: Send + Sync + 'static {\n\n #[doc = \" user login\"]\n\n async fn login(\n\n &self,\n\n request: tonic::Request<super::UsernameAndPasswordToken>,\n\n ) -> Result<tonic::Response<super::LoginResponse>, tonic::Status>;\n\n #[doc = \" user logout\"]\n\n async fn logout(\n\n &self,\n\n request: tonic::Request<super::UsernameAndPasswordToken>,\n\n ) -> Result<tonic::Response<()>, tonic::Status>;\n\n #[doc = \" change user password\"]\n\n async fn change_password(\n\n &self,\n\n request: tonic::Request<super::ChangePasswordRequest>,\n\n ) -> Result<tonic::Response<()>, tonic::Status>;\n\n }\n\n #[derive(Debug)]\n\n pub struct SecurityServiceServer<T: SecurityService> {\n\n inner: _Inner<T>,\n", "file_path": "athena-api/src/pb/security_service.rs", "rank": 4, "score": 41998.70037358366 }, { "content": " #[doc = \"Generated trait containing gRPC methods that should be implemented for use with PackageServiceServer.\"]\n\n #[async_trait]\n\n pub trait PackageService: Send + Sync + 'static {\n\n #[doc = \"This method is used to add a container to a container\"]\n\n async fn add_container_to_container(\n\n &self,\n\n request: tonic::Request<super::HandleContainerRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Container>, tonic::Status>;\n\n #[doc = \"This method is used to add a lot to a container\"]\n\n async fn add_lot_to_container(\n\n &self,\n\n request: tonic::Request<super::HandleLotRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Container>, tonic::Status>;\n\n #[doc = \" This method is used to record a transaction indicating that a container and all\"]\n\n #[doc = \" of its contents (any contained containers, lots and units) has been moved to\"]\n\n #[doc = \" the queue specified.\"]\n\n async fn add_container_to_queue(\n\n &self,\n\n request: tonic::Request<super::AddContainerToQueueRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Container>, tonic::Status>;\n\n #[doc = \"This method is used to add a unit to a container\"]\n\n async fn add_unit_to_container(\n", "file_path": "athena-api/src/pb/package_service.rs", "rank": 5, "score": 41998.70037358366 }, { "content": " #[doc = \"Generated trait containing gRPC methods that should be implemented for use with LotServiceServer.\"]\n\n #[async_trait]\n\n pub trait LotService: Send + Sync + 'static {\n\n async fn cancel(\n\n &self,\n\n request: tonic::Request<super::TransitionLotRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Lot>, tonic::Status>;\n\n async fn close(\n\n &self,\n\n request: tonic::Request<super::TransitionLotRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Lot>, tonic::Status>;\n\n async fn finish(\n\n &self,\n\n request: tonic::Request<super::TransitionLotRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Lot>, tonic::Status>;\n\n async fn hold(\n\n &self,\n\n request: tonic::Request<super::TransitionLotRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Lot>, tonic::Status>;\n\n async fn pause(\n\n &self,\n\n request: tonic::Request<super::TransitionLotRequest>,\n", "file_path": "athena-api/src/pb/lot_service.rs", "rank": 6, "score": 41998.70037358366 }, { "content": " #[doc = \"Generated trait containing gRPC methods that should be implemented for use with ObjectRetrievalServiceServer.\"]\n\n #[async_trait]\n\n pub trait ObjectRetrievalService: Send + Sync + 'static {\n\n async fn get_factory_by_id(\n\n &self,\n\n request: tonic::Request<super::super::structures::ObjectId>,\n\n ) -> Result<tonic::Response<super::super::structures::Factory>, tonic::Status>;\n\n async fn get_factory_by_code(\n\n &self,\n\n request: tonic::Request<super::super::structures::ObjectCode>,\n\n ) -> Result<tonic::Response<super::super::structures::Factory>, tonic::Status>;\n\n async fn get_workshop_by_id(\n\n &self,\n\n request: tonic::Request<super::super::structures::ObjectId>,\n\n ) -> Result<tonic::Response<super::super::structures::Workshop>, tonic::Status>;\n\n async fn get_workshop_by_code(\n\n &self,\n\n request: tonic::Request<super::super::structures::ObjectCode>,\n\n ) -> Result<tonic::Response<super::super::structures::Workshop>, tonic::Status>;\n\n async fn get_production_line_by_id(\n\n &self,\n\n request: tonic::Request<super::super::structures::ObjectId>,\n", "file_path": "athena-api/src/pb/object_retrieval_service.rs", "rank": 7, "score": 40436.74722880658 }, { "content": " #[doc = \"Generated trait containing gRPC methods that should be implemented for use with WorkOrderServiceServer.\"]\n\n #[async_trait]\n\n pub trait WorkOrderService: Send + Sync + 'static {\n\n async fn change_order_priority(\n\n &self,\n\n request: tonic::Request<super::super::structures::ChangePriorityRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::WorkOrder>, tonic::Status>;\n\n async fn close(\n\n &self,\n\n request: tonic::Request<super::TransitionOrderRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::WorkOrder>, tonic::Status>;\n\n async fn finish(\n\n &self,\n\n request: tonic::Request<super::TransitionOrderRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::WorkOrder>, tonic::Status>;\n\n async fn hold(\n\n &self,\n\n request: tonic::Request<super::TransitionOrderRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::WorkOrder>, tonic::Status>;\n\n async fn open(\n\n &self,\n\n request: tonic::Request<super::TransitionOrderRequest>,\n", "file_path": "athena-api/src/pb/work_order_service.rs", "rank": 8, "score": 40436.74722880658 }, { "content": " #[doc = \"Generated trait containing gRPC methods that should be implemented for use with ObjectStorageServiceServer.\"]\n\n #[async_trait]\n\n pub trait ObjectStorageService: Send + Sync + 'static {\n\n async fn remove_equipment(\n\n &self,\n\n request: tonic::Request<super::RemoveObjectRequest>,\n\n ) -> Result<tonic::Response<()>, tonic::Status>;\n\n async fn remove_location(\n\n &self,\n\n request: tonic::Request<super::RemoveObjectRequest>,\n\n ) -> Result<tonic::Response<()>, tonic::Status>;\n\n async fn remove_bom(\n\n &self,\n\n request: tonic::Request<super::RemoveObjectRequest>,\n\n ) -> Result<tonic::Response<()>, tonic::Status>;\n\n async fn remove_part(\n\n &self,\n\n request: tonic::Request<super::RemoveObjectRequest>,\n\n ) -> Result<tonic::Response<()>, tonic::Status>;\n\n async fn remove_production_line(\n\n &self,\n\n request: tonic::Request<super::RemoveObjectRequest>,\n", "file_path": "athena-api/src/pb/object_storage_service.rs", "rank": 9, "score": 40436.74722880658 }, { "content": " #[doc = \"Generated trait containing gRPC methods that should be implemented for use with WorkStationServiceServer.\"]\n\n #[async_trait]\n\n pub trait WorkStationService: Send + Sync + 'static {\n\n async fn add_tools(\n\n &self,\n\n request: tonic::Request<super::AddToolRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::WorkStation>, tonic::Status>;\n\n async fn change_priority(\n\n &self,\n\n request: tonic::Request<super::super::structures::ChangePriorityRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::WorkStation>, tonic::Status>;\n\n async fn remove_tools(\n\n &self,\n\n request: tonic::Request<super::RemoveToolRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::WorkStation>, tonic::Status>;\n\n }\n\n #[derive(Debug)]\n\n pub struct WorkStationServiceServer<T: WorkStationService> {\n\n inner: _Inner<T>,\n\n accept_compression_encodings: EnabledCompressionEncodings,\n\n send_compression_encodings: EnabledCompressionEncodings,\n\n }\n", "file_path": "athena-api/src/pb/work_station_service.rs", "rank": 10, "score": 40436.74722880658 }, { "content": " self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn release(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::TransitionUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path = http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/Release\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn ship(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::TransitionUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 17, "score": 37785.81538852993 }, { "content": " http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/StartAtRouteStep\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn restart(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::RestartUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path = http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/Restart\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn create_stand_alone_unit(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::CreateStandAloneUnitRequest>,\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 18, "score": 37785.69316040903 }, { "content": " pub async fn hold(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::TransitionUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path = http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/Hold\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn pause(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::TransitionUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 19, "score": 37785.63871218226 }, { "content": " let path = http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/ChangeLot\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn change_part(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::UnitChangePartRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path = http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/ChangePart\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn change_part_only(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::UnitChangePartRequest>,\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 20, "score": 37785.5822786459 }, { "content": " );\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn change_route(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::UnitChangeRouteRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path = http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/ChangeRoute\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn change_serial_number(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::ChangeSerialNumberRequest>,\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 21, "score": 37785.54468280823 }, { "content": " pub async fn change_bom(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::UnitChangeBomRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path = http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/ChangeBom\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn change_bom_from_part(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::UnitChangeBomFromPartRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 22, "score": 37785.30662437169 }, { "content": " let codec = tonic::codec::ProstCodec::default();\n\n let path = http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/AddToQueue\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn save_unit(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::super::structures::Unit>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path = http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/SaveUnit\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn cancel(\n\n &mut self,\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 23, "score": 37784.58914751129 }, { "content": " let codec = tonic::codec::ProstCodec::default();\n\n let path = http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/Scrap\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn undo_close(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::TransitionUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path = http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/UndoClose\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn undo_finish(\n\n &mut self,\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 24, "score": 37784.45746251622 }, { "content": " #[doc = r\" error.\"]\n\n pub fn send_gzip(mut self) -> Self {\n\n self.inner = self.inner.send_gzip();\n\n self\n\n }\n\n #[doc = r\" Enable decompressing responses with `gzip`.\"]\n\n pub fn accept_gzip(mut self) -> Self {\n\n self.inner = self.inner.accept_gzip();\n\n self\n\n }\n\n pub async fn add_to_queue(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::AddUnitToQueueRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 25, "score": 37783.79336758966 }, { "content": " request: impl tonic::IntoRequest<super::TransitionUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path = http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/Cancel\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn close(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::TransitionUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 26, "score": 37783.68865191477 }, { "content": " self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path = http::uri::PathAndQuery::from_static(\n\n \"/UnitService.UnitService/CreateStandAloneUnits\",\n\n );\n\n self.inner\n\n .server_streaming(request.into_request(), path, codec)\n\n .await\n\n }\n\n }\n\n}\n\n#[doc = r\" Generated server implementations.\"]\n\npub mod unit_service_server {\n\n #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]\n\n use tonic::codegen::*;\n\n #[doc = \"Generated trait containing gRPC methods that should be implemented for use with UnitServiceServer.\"]\n\n #[async_trait]\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 27, "score": 37783.627152023844 }, { "content": " request: impl tonic::IntoRequest<super::TransitionUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path = http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/UndoFinish\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn undo_scrap(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::TransitionUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 28, "score": 37783.592714554514 }, { "content": " }\n\n \"/UnitService.UnitService/Pause\" => {\n\n #[allow(non_camel_case_types)]\n\n struct PauseSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService> tonic::server::UnaryService<super::TransitionUnitRequest> for PauseSvc<T> {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut = async move { (*inner).pause(request).await };\n\n Box::pin(fut)\n\n }\n\n }\n\n let accept_compression_encodings = self.accept_compression_encodings;\n\n let send_compression_encodings = self.send_compression_encodings;\n\n let inner = self.inner.clone();\n\n let fut = async move {\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 29, "score": 37783.504700515674 }, { "content": " #[allow(non_camel_case_types)]\n\n struct FinishSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService> tonic::server::UnaryService<super::TransitionUnitRequest> for FinishSvc<T> {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut = async move { (*inner).finish(request).await };\n\n Box::pin(fut)\n\n }\n\n }\n\n let accept_compression_encodings = self.accept_compression_encodings;\n\n let send_compression_encodings = self.send_compression_encodings;\n\n let inner = self.inner.clone();\n\n let fut = async move {\n\n let inner = inner.0;\n\n let method = FinishSvc(inner);\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 30, "score": 37783.13893850092 }, { "content": " ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path = http::uri::PathAndQuery::from_static(\n\n \"/UnitService.UnitService/CreateStandAloneUnit\",\n\n );\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn create_stand_alone_units(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::CreateStandAloneUnitRequest>,\n\n ) -> Result<\n\n tonic::Response<tonic::codec::Streaming<super::super::structures::Unit>>,\n\n tonic::Status,\n\n > {\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 31, "score": 37783.0508777991 }, { "content": " )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path = http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/Close\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn finish(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::TransitionUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path = http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/Finish\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 32, "score": 37782.957086822214 }, { "content": " let res = grpc.unary(method, req).await;\n\n Ok(res)\n\n };\n\n Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/Release\" => {\n\n #[allow(non_camel_case_types)]\n\n struct ReleaseSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService> tonic::server::UnaryService<super::TransitionUnitRequest> for ReleaseSvc<T> {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut = async move { (*inner).release(request).await };\n\n Box::pin(fut)\n\n }\n\n }\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 33, "score": 37782.943002811466 }, { "content": " )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path = http::uri::PathAndQuery::from_static(\n\n \"/UnitService.UnitService/CompleteAtRouteStep\",\n\n );\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn start_at_route_step(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::UnitStartAtRouteStepRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path =\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 34, "score": 37782.933465172544 }, { "content": " struct UndoCloseSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService> tonic::server::UnaryService<super::TransitionUnitRequest> for UndoCloseSvc<T> {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut = async move { (*inner).undo_close(request).await };\n\n Box::pin(fut)\n\n }\n\n }\n\n let accept_compression_encodings = self.accept_compression_encodings;\n\n let send_compression_encodings = self.send_compression_encodings;\n\n let inner = self.inner.clone();\n\n let fut = async move {\n\n let inner = inner.0;\n\n let method = UndoCloseSvc(inner);\n\n let codec = tonic::codec::ProstCodec::default();\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 35, "score": 37782.88552553167 }, { "content": " Ok(res)\n\n };\n\n Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/SaveUnit\" => {\n\n #[allow(non_camel_case_types)]\n\n struct SaveUnitSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService> tonic::server::UnaryService<super::super::structures::Unit>\n\n for SaveUnitSvc<T>\n\n {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::super::structures::Unit>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut = async move { (*inner).save_unit(request).await };\n\n Box::pin(fut)\n\n }\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 36, "score": 37782.874378664375 }, { "content": " self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path = http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/Ship\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn scrap(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::TransitionUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 37, "score": 37782.872641706555 }, { "content": " Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/CreateStandAloneUnit\" => {\n\n #[allow(non_camel_case_types)]\n\n struct CreateStandAloneUnitSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService>\n\n tonic::server::UnaryService<super::CreateStandAloneUnitRequest>\n\n for CreateStandAloneUnitSvc<T>\n\n {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::CreateStandAloneUnitRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut =\n\n async move { (*inner).create_stand_alone_unit(request).await };\n\n Box::pin(fut)\n\n }\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 38, "score": 37782.85268830147 }, { "content": " )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path = http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/UndoScrap\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn undo_ship(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::TransitionUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path = http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/UndoShip\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 39, "score": 37782.803173521126 }, { "content": " accept_compression_encodings,\n\n send_compression_encodings,\n\n );\n\n let res = grpc.unary(method, req).await;\n\n Ok(res)\n\n };\n\n Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/Close\" => {\n\n #[allow(non_camel_case_types)]\n\n struct CloseSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService> tonic::server::UnaryService<super::TransitionUnitRequest> for CloseSvc<T> {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut = async move { (*inner).close(request).await };\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 40, "score": 37782.784503069306 }, { "content": " send_compression_encodings,\n\n );\n\n let res = grpc.unary(method, req).await;\n\n Ok(res)\n\n };\n\n Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/Scrap\" => {\n\n #[allow(non_camel_case_types)]\n\n struct ScrapSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService> tonic::server::UnaryService<super::TransitionUnitRequest> for ScrapSvc<T> {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut = async move { (*inner).scrap(request).await };\n\n Box::pin(fut)\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 41, "score": 37782.784503069306 }, { "content": " tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path = http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/Pause\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn quarantine(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::TransitionUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path = http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/Quarantine\");\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 42, "score": 37782.75904514202 }, { "content": " };\n\n Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/CompleteAtRouteStep\" => {\n\n #[allow(non_camel_case_types)]\n\n struct CompleteAtRouteStepSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService>\n\n tonic::server::UnaryService<super::UnitCompleteAtRouteStepRequest>\n\n for CompleteAtRouteStepSvc<T>\n\n {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::UnitCompleteAtRouteStepRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut = async move { (*inner).complete_at_route_step(request).await };\n\n Box::pin(fut)\n\n }\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 43, "score": 37782.732528500164 }, { "content": " Ok(res)\n\n };\n\n Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/ChangeBom\" => {\n\n #[allow(non_camel_case_types)]\n\n struct ChangeBomSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService> tonic::server::UnaryService<super::UnitChangeBomRequest> for ChangeBomSvc<T> {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::UnitChangeBomRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut = async move { (*inner).change_bom(request).await };\n\n Box::pin(fut)\n\n }\n\n }\n\n let accept_compression_encodings = self.accept_compression_encodings;\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 44, "score": 37782.62983174255 }, { "content": " )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path =\n\n http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/ChangePriority\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn change_production_line(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::ChangeProductionLineRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path = http::uri::PathAndQuery::from_static(\n\n \"/UnitService.UnitService/ChangeProductionLine\",\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 45, "score": 37782.58646808189 }, { "content": " Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/ChangeProductionLine\" => {\n\n #[allow(non_camel_case_types)]\n\n struct ChangeProductionLineSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService>\n\n tonic::server::UnaryService<super::ChangeProductionLineRequest>\n\n for ChangeProductionLineSvc<T>\n\n {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::ChangeProductionLineRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut = async move { (*inner).change_production_line(request).await };\n\n Box::pin(fut)\n\n }\n\n }\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 46, "score": 37782.53181894289 }, { "content": " tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path =\n\n http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/ChangeBomFromPart\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn change_lot(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::ChangeLotRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 47, "score": 37782.47427268331 }, { "content": " \"/UnitService.UnitService/ChangePart\" => {\n\n #[allow(non_camel_case_types)]\n\n struct ChangePartSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService> tonic::server::UnaryService<super::UnitChangePartRequest>\n\n for ChangePartSvc<T>\n\n {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::UnitChangePartRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut = async move { (*inner).change_part(request).await };\n\n Box::pin(fut)\n\n }\n\n }\n\n let accept_compression_encodings = self.accept_compression_encodings;\n\n let send_compression_encodings = self.send_compression_encodings;\n\n let inner = self.inner.clone();\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 48, "score": 37782.429289365886 }, { "content": " Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/UndoScrap\" => {\n\n #[allow(non_camel_case_types)]\n\n struct UndoScrapSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService> tonic::server::UnaryService<super::TransitionUnitRequest> for UndoScrapSvc<T> {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut = async move { (*inner).undo_scrap(request).await };\n\n Box::pin(fut)\n\n }\n\n }\n\n let accept_compression_encodings = self.accept_compression_encodings;\n\n let send_compression_encodings = self.send_compression_encodings;\n\n let inner = self.inner.clone();\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 49, "score": 37782.3801326789 }, { "content": " ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path =\n\n http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/ChangeSerialNumber\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn complete_at_route_step(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::UnitCompleteAtRouteStepRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 50, "score": 37782.26512082808 }, { "content": " ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n\n )\n\n })?;\n\n let codec = tonic::codec::ProstCodec::default();\n\n let path =\n\n http::uri::PathAndQuery::from_static(\"/UnitService.UnitService/ChangePartOnly\");\n\n self.inner.unary(request.into_request(), path, codec).await\n\n }\n\n pub async fn change_priority(\n\n &mut self,\n\n request: impl tonic::IntoRequest<super::super::structures::ChangePriorityRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status> {\n\n self.inner.ready().await.map_err(|e| {\n\n tonic::Status::new(\n\n tonic::Code::Unknown,\n\n format!(\"Service was not ready: {}\", e.into()),\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 51, "score": 37782.12718272122 }, { "content": " UnitQuarantine = 4,\n\n UnitRelease = 5,\n\n UnitShip = 6,\n\n UnitUndoClose = 7,\n\n UnitUndoShip = 8,\n\n UnitUndoFinish = 9,\n\n UnitScrap = 10,\n\n UnitUndoScrap = 11,\n\n UnitCancle = 12,\n\n}\n\n#[doc = r\" Generated client implementations.\"]\n\npub mod unit_service_client {\n\n #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)]\n\n use tonic::codegen::*;\n\n #[derive(Debug, Clone)]\n\n pub struct UnitServiceClient<T> {\n\n inner: tonic::client::Grpc<T>,\n\n }\n\n impl UnitServiceClient<tonic::transport::Channel> {\n\n #[doc = r\" Attempt to create a new client by connecting to a given endpoint.\"]\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 52, "score": 37781.09756885064 }, { "content": " impl<T: UnitService>\n\n tonic::server::UnaryService<super::UnitStartAtRouteStepRequest>\n\n for StartAtRouteStepSvc<T>\n\n {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::UnitStartAtRouteStepRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut = async move { (*inner).start_at_route_step(request).await };\n\n Box::pin(fut)\n\n }\n\n }\n\n let accept_compression_encodings = self.accept_compression_encodings;\n\n let send_compression_encodings = self.send_compression_encodings;\n\n let inner = self.inner.clone();\n\n let fut = async move {\n\n let inner = inner.0;\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 53, "score": 37780.94210051378 }, { "content": " let fut = async move {\n\n let inner = inner.0;\n\n let method = ChangePartSvc(inner);\n\n let codec = tonic::codec::ProstCodec::default();\n\n let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config(\n\n accept_compression_encodings,\n\n send_compression_encodings,\n\n );\n\n let res = grpc.unary(method, req).await;\n\n Ok(res)\n\n };\n\n Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/ChangePartOnly\" => {\n\n #[allow(non_camel_case_types)]\n\n struct ChangePartOnlySvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService> tonic::server::UnaryService<super::UnitChangePartRequest>\n\n for ChangePartOnlySvc<T>\n\n {\n\n type Response = super::super::structures::Unit;\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 54, "score": 37780.74643776436 }, { "content": " ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn hold(\n\n &self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn pause(\n\n &self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn quarantine(\n\n &self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn release(\n\n &self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn ship(\n\n &self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 55, "score": 37780.54888189795 }, { "content": " ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn scrap(\n\n &self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn undo_close(\n\n &self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn undo_finish(\n\n &self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn undo_scrap(\n\n &self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn undo_ship(\n\n &self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 56, "score": 37780.48250943554 }, { "content": " impl<T: UnitService> tonic::server::UnaryService<super::TransitionUnitRequest> for CancelSvc<T> {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut = async move { (*inner).cancel(request).await };\n\n Box::pin(fut)\n\n }\n\n }\n\n let accept_compression_encodings = self.accept_compression_encodings;\n\n let send_compression_encodings = self.send_compression_encodings;\n\n let inner = self.inner.clone();\n\n let fut = async move {\n\n let inner = inner.0;\n\n let method = CancelSvc(inner);\n\n let codec = tonic::codec::ProstCodec::default();\n\n let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config(\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 57, "score": 37780.4371002897 }, { "content": " let send_compression_encodings = self.send_compression_encodings;\n\n let inner = self.inner.clone();\n\n let fut = async move {\n\n let inner = inner.0;\n\n let method = ChangeBomSvc(inner);\n\n let codec = tonic::codec::ProstCodec::default();\n\n let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config(\n\n accept_compression_encodings,\n\n send_compression_encodings,\n\n );\n\n let res = grpc.unary(method, req).await;\n\n Ok(res)\n\n };\n\n Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/ChangeBomFromPart\" => {\n\n #[allow(non_camel_case_types)]\n\n struct ChangeBomFromPartSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService>\n\n tonic::server::UnaryService<super::UnitChangeBomFromPartRequest>\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 58, "score": 37780.400712826944 }, { "content": " impl<T: UnitService>\n\n tonic::server::ServerStreamingService<super::CreateStandAloneUnitRequest>\n\n for CreateStandAloneUnitsSvc<T>\n\n {\n\n type Response = super::super::structures::Unit;\n\n type ResponseStream = T::CreateStandAloneUnitsStream;\n\n type Future =\n\n BoxFuture<tonic::Response<Self::ResponseStream>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::CreateStandAloneUnitRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut =\n\n async move { (*inner).create_stand_alone_units(request).await };\n\n Box::pin(fut)\n\n }\n\n }\n\n let accept_compression_encodings = self.accept_compression_encodings;\n\n let send_compression_encodings = self.send_compression_encodings;\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 59, "score": 37780.39297315898 }, { "content": " );\n\n let res = grpc.unary(method, req).await;\n\n Ok(res)\n\n };\n\n Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/ChangePriority\" => {\n\n #[allow(non_camel_case_types)]\n\n struct ChangePrioritySvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService>\n\n tonic::server::UnaryService<super::super::structures::ChangePriorityRequest>\n\n for ChangePrioritySvc<T>\n\n {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<\n\n super::super::structures::ChangePriorityRequest,\n\n >,\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 60, "score": 37780.33730344347 }, { "content": " ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn change_bom(\n\n &self,\n\n request: tonic::Request<super::UnitChangeBomRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn change_bom_from_part(\n\n &self,\n\n request: tonic::Request<super::UnitChangeBomFromPartRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn change_lot(\n\n &self,\n\n request: tonic::Request<super::ChangeLotRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn change_part(\n\n &self,\n\n request: tonic::Request<super::UnitChangePartRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn change_part_only(\n\n &self,\n\n request: tonic::Request<super::UnitChangePartRequest>,\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 61, "score": 37780.29151928047 }, { "content": " let accept_compression_encodings = self.accept_compression_encodings;\n\n let send_compression_encodings = self.send_compression_encodings;\n\n let inner = self.inner.clone();\n\n let fut = async move {\n\n let inner = inner.0;\n\n let method = ReleaseSvc(inner);\n\n let codec = tonic::codec::ProstCodec::default();\n\n let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config(\n\n accept_compression_encodings,\n\n send_compression_encodings,\n\n );\n\n let res = grpc.unary(method, req).await;\n\n Ok(res)\n\n };\n\n Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/Ship\" => {\n\n #[allow(non_camel_case_types)]\n\n struct ShipSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService> tonic::server::UnaryService<super::TransitionUnitRequest> for ShipSvc<T> {\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 62, "score": 37780.28977351598 }, { "content": " let fut = async move {\n\n let inner = inner.0;\n\n let method = UndoScrapSvc(inner);\n\n let codec = tonic::codec::ProstCodec::default();\n\n let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config(\n\n accept_compression_encodings,\n\n send_compression_encodings,\n\n );\n\n let res = grpc.unary(method, req).await;\n\n Ok(res)\n\n };\n\n Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/UndoShip\" => {\n\n #[allow(non_camel_case_types)]\n\n struct UndoShipSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService> tonic::server::UnaryService<super::TransitionUnitRequest> for UndoShipSvc<T> {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 63, "score": 37780.25774776847 }, { "content": " where\n\n F: tonic::service::Interceptor,\n\n {\n\n InterceptedService::new(Self::new(inner), interceptor)\n\n }\n\n #[doc = r\" Enable decompressing requests with `gzip`.\"]\n\n pub fn accept_gzip(mut self) -> Self {\n\n self.accept_compression_encodings.enable_gzip();\n\n self\n\n }\n\n #[doc = r\" Compress responses with `gzip`, if the client supports it.\"]\n\n pub fn send_gzip(mut self) -> Self {\n\n self.send_compression_encodings.enable_gzip();\n\n self\n\n }\n\n }\n\n impl<T, B> tonic::codegen::Service<http::Request<B>> for UnitServiceServer<T>\n\n where\n\n T: UnitService,\n\n B: Body + Send + 'static,\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 64, "score": 37780.19667045616 }, { "content": " let accept_compression_encodings = self.accept_compression_encodings;\n\n let send_compression_encodings = self.send_compression_encodings;\n\n let inner = self.inner.clone();\n\n let fut = async move {\n\n let inner = inner.0;\n\n let method = ChangeProductionLineSvc(inner);\n\n let codec = tonic::codec::ProstCodec::default();\n\n let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config(\n\n accept_compression_encodings,\n\n send_compression_encodings,\n\n );\n\n let res = grpc.unary(method, req).await;\n\n Ok(res)\n\n };\n\n Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/ChangeRoute\" => {\n\n #[allow(non_camel_case_types)]\n\n struct ChangeRouteSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService> tonic::server::UnaryService<super::UnitChangeRouteRequest>\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 65, "score": 37780.180773335574 }, { "content": " let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config(\n\n accept_compression_encodings,\n\n send_compression_encodings,\n\n );\n\n let res = grpc.unary(method, req).await;\n\n Ok(res)\n\n };\n\n Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/UndoFinish\" => {\n\n #[allow(non_camel_case_types)]\n\n struct UndoFinishSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService> tonic::server::UnaryService<super::TransitionUnitRequest>\n\n for UndoFinishSvc<T>\n\n {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 66, "score": 37780.116607676726 }, { "content": " ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn start_at_route_step(\n\n &self,\n\n request: tonic::Request<super::UnitStartAtRouteStepRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn restart(\n\n &self,\n\n request: tonic::Request<super::RestartUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn create_stand_alone_unit(\n\n &self,\n\n request: tonic::Request<super::CreateStandAloneUnitRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n #[doc = \"Server streaming response type for the CreateStandAloneUnits method.\"]\n\n type CreateStandAloneUnitsStream: futures_core::Stream<Item = Result<super::super::structures::Unit, tonic::Status>>\n\n + Send\n\n + 'static;\n\n async fn create_stand_alone_units(\n\n &self,\n\n request: tonic::Request<super::CreateStandAloneUnitRequest>,\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 67, "score": 37780.026903672326 }, { "content": " ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn change_priority(\n\n &self,\n\n request: tonic::Request<super::super::structures::ChangePriorityRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn change_production_line(\n\n &self,\n\n request: tonic::Request<super::ChangeProductionLineRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn change_route(\n\n &self,\n\n request: tonic::Request<super::UnitChangeRouteRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn change_serial_number(\n\n &self,\n\n request: tonic::Request<super::ChangeSerialNumberRequest>,\n\n ) -> Result<tonic::Response<super::super::structures::Unit>, tonic::Status>;\n\n async fn complete_at_route_step(\n\n &self,\n\n request: tonic::Request<super::UnitCompleteAtRouteStepRequest>,\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 68, "score": 37780.01357499004 }, { "content": " let codec = tonic::codec::ProstCodec::default();\n\n let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config(\n\n accept_compression_encodings,\n\n send_compression_encodings,\n\n );\n\n let res = grpc.unary(method, req).await;\n\n Ok(res)\n\n };\n\n Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/Hold\" => {\n\n #[allow(non_camel_case_types)]\n\n struct HoldSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService> tonic::server::UnaryService<super::TransitionUnitRequest> for HoldSvc<T> {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n\n ) -> Self::Future {\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 69, "score": 37779.91791368333 }, { "content": " let method = StartAtRouteStepSvc(inner);\n\n let codec = tonic::codec::ProstCodec::default();\n\n let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config(\n\n accept_compression_encodings,\n\n send_compression_encodings,\n\n );\n\n let res = grpc.unary(method, req).await;\n\n Ok(res)\n\n };\n\n Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/Restart\" => {\n\n #[allow(non_camel_case_types)]\n\n struct RestartSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService> tonic::server::UnaryService<super::RestartUnitRequest> for RestartSvc<T> {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::RestartUnitRequest>,\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 70, "score": 37779.76411123217 }, { "content": " let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config(\n\n accept_compression_encodings,\n\n send_compression_encodings,\n\n );\n\n let res = grpc.unary(method, req).await;\n\n Ok(res)\n\n };\n\n Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/ChangeLot\" => {\n\n #[allow(non_camel_case_types)]\n\n struct ChangeLotSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService> tonic::server::UnaryService<super::ChangeLotRequest> for ChangeLotSvc<T> {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::ChangeLotRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 71, "score": 37779.63962288069 }, { "content": " #[prost(uint32, tag = \"10\")]\n\n pub quantity: u32,\n\n}\n\n#[derive(Clone, PartialEq, ::prost::Message)]\n\npub struct RestartUnitRequest {\n\n #[prost(uint64, tag = \"1\")]\n\n pub unit_id: u64,\n\n #[prost(string, tag = \"2\")]\n\n pub comment: ::prost::alloc::string::String,\n\n #[prost(message, optional, tag = \"3\")]\n\n pub transaction_time: ::core::option::Option<::prost_types::Timestamp>,\n\n}\n\n#[derive(Clone, PartialEq, ::prost::Message)]\n\npub struct UnitStartAtRouteStepRequest {\n\n #[prost(uint64, tag = \"1\")]\n\n pub unit_id: u64,\n\n #[prost(string, tag = \"2\")]\n\n pub route_step_number: ::prost::alloc::string::String,\n\n #[prost(string, tag = \"3\")]\n\n pub comment: ::prost::alloc::string::String,\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 72, "score": 37778.67355135975 }, { "content": " B::Error: Into<StdError> + Send + 'static,\n\n {\n\n type Response = http::Response<tonic::body::BoxBody>;\n\n type Error = Never;\n\n type Future = BoxFuture<Self::Response, Self::Error>;\n\n fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n Poll::Ready(Ok(()))\n\n }\n\n fn call(&mut self, req: http::Request<B>) -> Self::Future {\n\n let inner = self.inner.clone();\n\n match req.uri().path() {\n\n \"/UnitService.UnitService/AddToQueue\" => {\n\n #[allow(non_camel_case_types)]\n\n struct AddToQueueSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService> tonic::server::UnaryService<super::AddUnitToQueueRequest>\n\n for AddToQueueSvc<T>\n\n {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 73, "score": 37778.55587232527 }, { "content": "pub struct UnitChangeBomRequest {\n\n #[prost(uint64, tag = \"1\")]\n\n pub unit_id: u64,\n\n #[prost(string, tag = \"2\")]\n\n pub bom_name: ::prost::alloc::string::String,\n\n #[prost(string, tag = \"3\")]\n\n pub bom_revision: ::prost::alloc::string::String,\n\n #[prost(string, tag = \"4\")]\n\n pub comment: ::prost::alloc::string::String,\n\n #[prost(message, optional, tag = \"5\")]\n\n pub transaction_time: ::core::option::Option<::prost_types::Timestamp>,\n\n}\n\n#[derive(Clone, PartialEq, ::prost::Message)]\n\npub struct UnitChangeBomFromPartRequest {\n\n #[prost(uint64, tag = \"1\")]\n\n pub unit_id: u64,\n\n #[prost(string, tag = \"2\")]\n\n pub part_number: ::prost::alloc::string::String,\n\n #[prost(string, tag = \"3\")]\n\n pub part_revision: ::prost::alloc::string::String,\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 74, "score": 37778.499736041405 }, { "content": " let inner = inner.0;\n\n let method = PauseSvc(inner);\n\n let codec = tonic::codec::ProstCodec::default();\n\n let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config(\n\n accept_compression_encodings,\n\n send_compression_encodings,\n\n );\n\n let res = grpc.unary(method, req).await;\n\n Ok(res)\n\n };\n\n Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/Quarantine\" => {\n\n #[allow(non_camel_case_types)]\n\n struct QuarantineSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService> tonic::server::UnaryService<super::TransitionUnitRequest>\n\n for QuarantineSvc<T>\n\n {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 75, "score": 37778.3265617034 }, { "content": " let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config(\n\n accept_compression_encodings,\n\n send_compression_encodings,\n\n );\n\n let res = grpc.unary(method, req).await;\n\n Ok(res)\n\n };\n\n Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/ChangeSerialNumber\" => {\n\n #[allow(non_camel_case_types)]\n\n struct ChangeSerialNumberSvc<T: UnitService>(pub Arc<T>);\n\n impl<T: UnitService>\n\n tonic::server::UnaryService<super::ChangeSerialNumberRequest>\n\n for ChangeSerialNumberSvc<T>\n\n {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 76, "score": 37778.28719461377 }, { "content": "#[derive(Clone, PartialEq, ::prost::Message)]\n\npub struct ChangeSerialNumberRequest {\n\n #[prost(uint64, tag = \"1\")]\n\n pub unit_id: u64,\n\n #[prost(string, tag = \"2\")]\n\n pub new_serial_number: ::prost::alloc::string::String,\n\n #[prost(string, tag = \"3\")]\n\n pub comment: ::prost::alloc::string::String,\n\n #[prost(message, optional, tag = \"4\")]\n\n pub transaction_time: ::core::option::Option<::prost_types::Timestamp>,\n\n}\n\n#[derive(Clone, PartialEq, ::prost::Message)]\n\npub struct UnitChangeRouteRequest {\n\n #[prost(uint64, tag = \"1\")]\n\n pub unit_id: u64,\n\n #[prost(uint64, tag = \"2\")]\n\n pub new_route_id: u64,\n\n #[prost(bool, tag = \"3\")]\n\n pub force_change_route: bool,\n\n #[prost(string, tag = \"4\")]\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 77, "score": 37778.207351373145 }, { "content": " pub override_route_enforcement: bool,\n\n}\n\n#[derive(Clone, PartialEq, ::prost::Message)]\n\npub struct AddUnitToQueueRequest {\n\n #[prost(uint64, tag = \"1\")]\n\n pub unit_id: u64,\n\n #[prost(string, tag = \"2\")]\n\n pub route_queue_name: ::prost::alloc::string::String,\n\n #[prost(string, tag = \"3\")]\n\n pub comment: ::prost::alloc::string::String,\n\n #[prost(message, optional, tag = \"4\")]\n\n pub transaction_time: ::core::option::Option<::prost_types::Timestamp>,\n\n}\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]\n\n#[repr(i32)]\n\npub enum TransitionUnitState {\n\n UnitClose = 0,\n\n UnitFinish = 1,\n\n UnitHold = 2,\n\n UnitOpen = 3,\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 78, "score": 37778.19835927756 }, { "content": " pub comment: ::prost::alloc::string::String,\n\n #[prost(message, optional, tag = \"5\")]\n\n pub transaction_time: ::core::option::Option<::prost_types::Timestamp>,\n\n}\n\n#[derive(Clone, PartialEq, ::prost::Message)]\n\npub struct ChangeProductionLineRequest {\n\n #[prost(uint64, tag = \"1\")]\n\n pub unit_id: u64,\n\n #[prost(uint64, tag = \"2\")]\n\n pub new_production_line_id: u64,\n\n #[prost(string, tag = \"3\")]\n\n pub comment: ::prost::alloc::string::String,\n\n #[prost(message, optional, tag = \"4\")]\n\n pub transaction_time: ::core::option::Option<::prost_types::Timestamp>,\n\n}\n\n#[derive(Clone, PartialEq, ::prost::Message)]\n\npub struct UnitChangePartRequest {\n\n #[prost(uint64, tag = \"1\")]\n\n pub unit_id: u64,\n\n #[prost(string, tag = \"2\")]\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 79, "score": 37778.179662549046 }, { "content": " ) -> Result<tonic::Response<Self::CreateStandAloneUnitsStream>, tonic::Status>;\n\n }\n\n #[derive(Debug)]\n\n pub struct UnitServiceServer<T: UnitService> {\n\n inner: _Inner<T>,\n\n accept_compression_encodings: EnabledCompressionEncodings,\n\n send_compression_encodings: EnabledCompressionEncodings,\n\n }\n\n struct _Inner<T>(Arc<T>);\n\n impl<T: UnitService> UnitServiceServer<T> {\n\n pub fn new(inner: T) -> Self {\n\n let inner = Arc::new(inner);\n\n let inner = _Inner(inner);\n\n Self {\n\n inner,\n\n accept_compression_encodings: Default::default(),\n\n send_compression_encodings: Default::default(),\n\n }\n\n }\n\n pub fn with_interceptor<F>(inner: T, interceptor: F) -> InterceptedService<Self, F>\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 80, "score": 37777.43091400533 }, { "content": " for ChangeRouteSvc<T>\n\n {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::UnitChangeRouteRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut = async move { (*inner).change_route(request).await };\n\n Box::pin(fut)\n\n }\n\n }\n\n let accept_compression_encodings = self.accept_compression_encodings;\n\n let send_compression_encodings = self.send_compression_encodings;\n\n let inner = self.inner.clone();\n\n let fut = async move {\n\n let inner = inner.0;\n\n let method = ChangeRouteSvc(inner);\n\n let codec = tonic::codec::ProstCodec::default();\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 81, "score": 37777.42627126959 }, { "content": " for ChangeBomFromPartSvc<T>\n\n {\n\n type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::UnitChangeBomFromPartRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut = async move { (*inner).change_bom_from_part(request).await };\n\n Box::pin(fut)\n\n }\n\n }\n\n let accept_compression_encodings = self.accept_compression_encodings;\n\n let send_compression_encodings = self.send_compression_encodings;\n\n let inner = self.inner.clone();\n\n let fut = async move {\n\n let inner = inner.0;\n\n let method = ChangeBomFromPartSvc(inner);\n\n let codec = tonic::codec::ProstCodec::default();\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 82, "score": 37777.28408504232 }, { "content": " pub async fn connect<D>(dst: D) -> Result<Self, tonic::transport::Error>\n\n where\n\n D: std::convert::TryInto<tonic::transport::Endpoint>,\n\n D::Error: Into<StdError>,\n\n {\n\n let conn = tonic::transport::Endpoint::new(dst)?.connect().await?;\n\n Ok(Self::new(conn))\n\n }\n\n }\n\n impl<T> UnitServiceClient<T>\n\n where\n\n T: tonic::client::GrpcService<tonic::body::BoxBody>,\n\n T::ResponseBody: Body + Send + 'static,\n\n T::Error: Into<StdError>,\n\n <T::ResponseBody as Body>::Error: Into<StdError> + Send,\n\n {\n\n pub fn new(inner: T) -> Self {\n\n let inner = tonic::client::Grpc::new(inner);\n\n Self { inner }\n\n }\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 83, "score": 37777.22782297545 }, { "content": " type Response = super::super::structures::Unit;\n\n type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut = async move { (*inner).ship(request).await };\n\n Box::pin(fut)\n\n }\n\n }\n\n let accept_compression_encodings = self.accept_compression_encodings;\n\n let send_compression_encodings = self.send_compression_encodings;\n\n let inner = self.inner.clone();\n\n let fut = async move {\n\n let inner = inner.0;\n\n let method = ShipSvc(inner);\n\n let codec = tonic::codec::ProstCodec::default();\n\n let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config(\n\n accept_compression_encodings,\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 84, "score": 37777.21436599732 }, { "content": " #[prost(message, optional, tag = \"4\")]\n\n pub bom: ::core::option::Option<super::structures::Bom>,\n\n #[prost(string, tag = \"5\")]\n\n pub comment: ::prost::alloc::string::String,\n\n #[prost(message, optional, tag = \"6\")]\n\n pub transaction_time: ::core::option::Option<::prost_types::Timestamp>,\n\n}\n\n#[derive(Clone, PartialEq, ::prost::Message)]\n\npub struct TransitionUnitRequest {\n\n #[prost(uint64, tag = \"1\")]\n\n pub unit_id: u64,\n\n #[prost(string, tag = \"2\")]\n\n pub reason: ::prost::alloc::string::String,\n\n #[prost(string, tag = \"3\")]\n\n pub comment: ::prost::alloc::string::String,\n\n #[prost(message, optional, tag = \"4\")]\n\n pub transaction_time: ::core::option::Option<::prost_types::Timestamp>,\n\n #[prost(enumeration = \"TransitionUnitState\", tag = \"5\")]\n\n pub transition_state: i32,\n\n #[prost(bool, tag = \"6\")]\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 85, "score": 37777.12663942656 }, { "content": " #[prost(message, optional, tag = \"4\")]\n\n pub transaction_time: ::core::option::Option<::prost_types::Timestamp>,\n\n #[prost(bool, tag = \"5\")]\n\n pub override_route_enforcement: bool,\n\n}\n\n#[derive(Clone, PartialEq, ::prost::Message)]\n\npub struct UnitCompleteAtRouteStepRequest {\n\n #[prost(uint64, tag = \"1\")]\n\n pub unit_id: u64,\n\n #[prost(string, tag = \"2\")]\n\n pub route_step_number: ::prost::alloc::string::String,\n\n #[prost(string, tag = \"3\")]\n\n pub comment: ::prost::alloc::string::String,\n\n #[prost(message, optional, tag = \"4\")]\n\n pub transaction_time: ::core::option::Option<::prost_types::Timestamp>,\n\n #[prost(bool, tag = \"5\")]\n\n pub override_route_enforcement: bool,\n\n #[prost(string, tag = \"6\")]\n\n pub complete_reason: ::prost::alloc::string::String,\n\n}\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 86, "score": 37776.65363791409 }, { "content": " fn call(\n\n &mut self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut = async move { (*inner).quarantine(request).await };\n\n Box::pin(fut)\n\n }\n\n }\n\n let accept_compression_encodings = self.accept_compression_encodings;\n\n let send_compression_encodings = self.send_compression_encodings;\n\n let inner = self.inner.clone();\n\n let fut = async move {\n\n let inner = inner.0;\n\n let method = QuarantineSvc(inner);\n\n let codec = tonic::codec::ProstCodec::default();\n\n let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config(\n\n accept_compression_encodings,\n\n send_compression_encodings,\n\n );\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 87, "score": 37776.547974872024 }, { "content": " &mut self,\n\n request: tonic::Request<super::TransitionUnitRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut = async move { (*inner).undo_ship(request).await };\n\n Box::pin(fut)\n\n }\n\n }\n\n let accept_compression_encodings = self.accept_compression_encodings;\n\n let send_compression_encodings = self.send_compression_encodings;\n\n let inner = self.inner.clone();\n\n let fut = async move {\n\n let inner = inner.0;\n\n let method = UndoShipSvc(inner);\n\n let codec = tonic::codec::ProstCodec::default();\n\n let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config(\n\n accept_compression_encodings,\n\n send_compression_encodings,\n\n );\n\n let res = grpc.unary(method, req).await;\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 88, "score": 37776.28514507073 }, { "content": " &mut self,\n\n request: tonic::Request<super::AddUnitToQueueRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut = async move { (*inner).add_to_queue(request).await };\n\n Box::pin(fut)\n\n }\n\n }\n\n let accept_compression_encodings = self.accept_compression_encodings;\n\n let send_compression_encodings = self.send_compression_encodings;\n\n let inner = self.inner.clone();\n\n let fut = async move {\n\n let inner = inner.0;\n\n let method = AddToQueueSvc(inner);\n\n let codec = tonic::codec::ProstCodec::default();\n\n let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config(\n\n accept_compression_encodings,\n\n send_compression_encodings,\n\n );\n\n let res = grpc.unary(method, req).await;\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 89, "score": 37776.24878891887 }, { "content": " pub fn with_interceptor<F>(\n\n inner: T,\n\n interceptor: F,\n\n ) -> UnitServiceClient<InterceptedService<T, F>>\n\n where\n\n F: tonic::service::Interceptor,\n\n T: tonic::codegen::Service<\n\n http::Request<tonic::body::BoxBody>,\n\n Response = http::Response<\n\n <T as tonic::client::GrpcService<tonic::body::BoxBody>>::ResponseBody,\n\n >,\n\n >,\n\n <T as tonic::codegen::Service<http::Request<tonic::body::BoxBody>>>::Error:\n\n Into<StdError> + Send + Sync,\n\n {\n\n UnitServiceClient::new(InterceptedService::new(inner, interceptor))\n\n }\n\n #[doc = r\" Compress requests with `gzip`.\"]\n\n #[doc = r\"\"]\n\n #[doc = r\" This requires the server to support it otherwise it might respond with an\"]\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 90, "score": 37776.19608734868 }, { "content": " type Future = BoxFuture<tonic::Response<Self::Response>, tonic::Status>;\n\n fn call(\n\n &mut self,\n\n request: tonic::Request<super::UnitChangePartRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut = async move { (*inner).change_part_only(request).await };\n\n Box::pin(fut)\n\n }\n\n }\n\n let accept_compression_encodings = self.accept_compression_encodings;\n\n let send_compression_encodings = self.send_compression_encodings;\n\n let inner = self.inner.clone();\n\n let fut = async move {\n\n let inner = inner.0;\n\n let method = ChangePartOnlySvc(inner);\n\n let codec = tonic::codec::ProstCodec::default();\n\n let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config(\n\n accept_compression_encodings,\n\n send_compression_encodings,\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 91, "score": 37776.10618949624 }, { "content": " }\n\n let accept_compression_encodings = self.accept_compression_encodings;\n\n let send_compression_encodings = self.send_compression_encodings;\n\n let inner = self.inner.clone();\n\n let fut = async move {\n\n let inner = inner.0;\n\n let method = CreateStandAloneUnitSvc(inner);\n\n let codec = tonic::codec::ProstCodec::default();\n\n let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config(\n\n accept_compression_encodings,\n\n send_compression_encodings,\n\n );\n\n let res = grpc.unary(method, req).await;\n\n Ok(res)\n\n };\n\n Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/CreateStandAloneUnits\" => {\n\n #[allow(non_camel_case_types)]\n\n struct CreateStandAloneUnitsSvc<T: UnitService>(pub Arc<T>);\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 92, "score": 37775.94270615733 }, { "content": " }\n\n let accept_compression_encodings = self.accept_compression_encodings;\n\n let send_compression_encodings = self.send_compression_encodings;\n\n let inner = self.inner.clone();\n\n let fut = async move {\n\n let inner = inner.0;\n\n let method = SaveUnitSvc(inner);\n\n let codec = tonic::codec::ProstCodec::default();\n\n let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config(\n\n accept_compression_encodings,\n\n send_compression_encodings,\n\n );\n\n let res = grpc.unary(method, req).await;\n\n Ok(res)\n\n };\n\n Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/Cancel\" => {\n\n #[allow(non_camel_case_types)]\n\n struct CancelSvc<T: UnitService>(pub Arc<T>);\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 93, "score": 37775.81381522466 }, { "content": " pub part_number: ::prost::alloc::string::String,\n\n #[prost(string, tag = \"3\")]\n\n pub part_revision: ::prost::alloc::string::String,\n\n #[prost(string, tag = \"4\")]\n\n pub comment: ::prost::alloc::string::String,\n\n #[prost(message, optional, tag = \"5\")]\n\n pub transaction_time: ::core::option::Option<::prost_types::Timestamp>,\n\n}\n\n#[derive(Clone, PartialEq, ::prost::Message)]\n\npub struct ChangeLotRequest {\n\n #[prost(uint64, tag = \"1\")]\n\n pub unit_id: u64,\n\n #[prost(uint64, tag = \"2\")]\n\n pub new_lot_id: u64,\n\n #[prost(string, tag = \"3\")]\n\n pub comment: ::prost::alloc::string::String,\n\n #[prost(message, optional, tag = \"4\")]\n\n pub transaction_time: ::core::option::Option<::prost_types::Timestamp>,\n\n}\n\n#[derive(Clone, PartialEq, ::prost::Message)]\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 94, "score": 37775.54645309261 }, { "content": " impl<T: std::fmt::Debug> std::fmt::Debug for _Inner<T> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{:?}\", self.0)\n\n }\n\n }\n\n impl<T: UnitService> tonic::transport::NamedService for UnitServiceServer<T> {\n\n const NAME: &'static str = \"UnitService.UnitService\";\n\n }\n\n}\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 95, "score": 37775.52881738459 }, { "content": "#[derive(Clone, PartialEq, ::prost::Message)]\n\npub struct CreateStandAloneUnitRequest {\n\n #[prost(string, tag = \"1\")]\n\n pub serial_number: ::prost::alloc::string::String,\n\n #[prost(string, tag = \"2\")]\n\n pub part_number: ::prost::alloc::string::String,\n\n #[prost(string, tag = \"3\")]\n\n pub part_revision: ::prost::alloc::string::String,\n\n #[prost(string, tag = \"4\")]\n\n pub bom_name: ::prost::alloc::string::String,\n\n #[prost(string, tag = \"5\")]\n\n pub bom_revision: ::prost::alloc::string::String,\n\n #[prost(uint64, tag = \"6\")]\n\n pub route_id: u64,\n\n #[prost(int32, tag = \"7\")]\n\n pub priority: i32,\n\n #[prost(string, tag = \"8\")]\n\n pub comment: ::prost::alloc::string::String,\n\n #[prost(message, optional, tag = \"9\")]\n\n pub transaction_time: ::core::option::Option<::prost_types::Timestamp>,\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 96, "score": 37775.46905374827 }, { "content": " }\n\n let accept_compression_encodings = self.accept_compression_encodings;\n\n let send_compression_encodings = self.send_compression_encodings;\n\n let inner = self.inner.clone();\n\n let fut = async move {\n\n let inner = inner.0;\n\n let method = CompleteAtRouteStepSvc(inner);\n\n let codec = tonic::codec::ProstCodec::default();\n\n let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config(\n\n accept_compression_encodings,\n\n send_compression_encodings,\n\n );\n\n let res = grpc.unary(method, req).await;\n\n Ok(res)\n\n };\n\n Box::pin(fut)\n\n }\n\n \"/UnitService.UnitService/StartAtRouteStep\" => {\n\n #[allow(non_camel_case_types)]\n\n struct StartAtRouteStepSvc<T: UnitService>(pub Arc<T>);\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 97, "score": 37775.28301192534 }, { "content": " .unwrap())\n\n }),\n\n }\n\n }\n\n }\n\n impl<T: UnitService> Clone for UnitServiceServer<T> {\n\n fn clone(&self) -> Self {\n\n let inner = self.inner.clone();\n\n Self {\n\n inner,\n\n accept_compression_encodings: self.accept_compression_encodings,\n\n send_compression_encodings: self.send_compression_encodings,\n\n }\n\n }\n\n }\n\n impl<T: UnitService> Clone for _Inner<T> {\n\n fn clone(&self) -> Self {\n\n Self(self.0.clone())\n\n }\n\n }\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 98, "score": 37775.201252763094 }, { "content": " request: tonic::Request<super::ChangeSerialNumberRequest>,\n\n ) -> Self::Future {\n\n let inner = self.0.clone();\n\n let fut = async move { (*inner).change_serial_number(request).await };\n\n Box::pin(fut)\n\n }\n\n }\n\n let accept_compression_encodings = self.accept_compression_encodings;\n\n let send_compression_encodings = self.send_compression_encodings;\n\n let inner = self.inner.clone();\n\n let fut = async move {\n\n let inner = inner.0;\n\n let method = ChangeSerialNumberSvc(inner);\n\n let codec = tonic::codec::ProstCodec::default();\n\n let mut grpc = tonic::server::Grpc::new(codec).apply_compression_config(\n\n accept_compression_encodings,\n\n send_compression_encodings,\n\n );\n\n let res = grpc.unary(method, req).await;\n\n Ok(res)\n", "file_path": "athena-api/src/pb/unit_service.rs", "rank": 99, "score": 37773.305207304846 } ]
Rust
layer_core/src/entry.rs
Sorenon/sorenon_openxr_layer
f32597269ac70ac63b7eb7ec9c6300cd8e6eca64
use std::ops::Deref; use std::sync::atomic::AtomicBool; use std::{ffi::CStr, sync::Arc}; use crate::loader_interfaces::*; use crate::wrappers::instance::{InnerInstance, InstanceWrapper, Runtime}; use crate::wrappers::XrHandle; use crate::ToResult; use log::{debug, error, info}; use openxr::sys as xr; use openxr::{ExtensionSet, InstanceExtensions, Result}; pub(crate) unsafe extern "system" fn create_api_layer_instance( instance_info: *const xr::InstanceCreateInfo, layer_info: *const ApiLayerCreateInfo, instance: *mut xr::Instance, ) -> xr::Result { std::panic::catch_unwind(|| create_instance(&*instance_info, &*layer_info, &mut *instance)) .map_or(xr::Result::ERROR_RUNTIME_FAILURE, |res| match res { Ok(res) => res, Err(res) => res, }) } fn create_instance( instance_info: &xr::InstanceCreateInfo, layer_info: &ApiLayerCreateInfo, instance: &mut xr::Instance, ) -> Result<xr::Result> { let next_info = &unsafe { *layer_info.next_info }; if unsafe { CStr::from_ptr(std::mem::transmute(next_info.layer_name.as_ptr())) } .to_string_lossy() != crate::LAYER_NAME { error!( "Crate instance failed: Incorrect layer_name `{}`", unsafe { CStr::from_ptr(std::mem::transmute(next_info.layer_name.as_ptr())) } .to_string_lossy() ); return Err(xr::Result::ERROR_VALIDATION_FAILURE); } debug!("Initializing OpenXR Entry"); let entry = unsafe { openxr::Entry::from_proc_addr(next_info.next_get_instance_proc_addr)? }; let available_extensions = entry.enumerate_extensions()?; let disable_opengl = true; let result = unsafe { let mut needs_opengl_replacement = false; let mut extensions = std::slice::from_raw_parts( instance_info.enabled_extension_names, instance_info.enabled_extension_count as usize, ) .iter() .filter_map(|ext| { let ext_name = CStr::from_ptr(*ext).to_str().unwrap(); if ext_name == "XR_KHR_opengl_enable" { if disable_opengl { needs_opengl_replacement = true; } else if !available_extensions.khr_opengl_enable { needs_opengl_replacement = true; return None; } } Some(*ext) }) .collect::<Vec<_>>(); if needs_opengl_replacement { extensions.push("XR_KHR_vulkan_enable2\0".as_ptr() as *const i8); } let mut instance_info2 = *instance_info; instance_info2.enabled_extension_names = extensions.as_ptr(); instance_info2.enabled_extension_count = extensions.len() as u32; let mut layer_info2 = *layer_info; layer_info2.next_info = (*layer_info2.next_info).next; (next_info.next_create_api_layer_instance)(&instance_info2, &layer_info2, instance).result() }?; let mut supported_extensions = ExtensionSet::default(); supported_extensions.khr_vulkan_enable2 = true; let inner = unsafe { InnerInstance { poison: AtomicBool::new(false), core: openxr::raw::Instance::load(&entry, *instance)?, exts: InstanceExtensions::load(&entry, *instance, &supported_extensions)?, } }; let runtime_name = unsafe { let mut instance_properties = xr::InstanceProperties::out(std::ptr::null_mut()); (inner.core.get_instance_properties)(*instance, instance_properties.as_mut_ptr()) .result()?; let instance_properties = instance_properties.assume_init(); CStr::from_ptr(std::mem::transmute( instance_properties.runtime_name.as_ptr(), )) .to_string_lossy() }; let runtime = match runtime_name.deref() { "SteamVR/OpenXR" => Runtime::SteamVR, "Oculus" => Runtime::Oculus, "Windows Mixed Reality Runtime" => Runtime::WMR, "Monado(XRT) by Collabora et al" => Runtime::Monado, _ => Runtime::Other(runtime_name.to_string()), }; let wrapper = InstanceWrapper { handle: *instance, inner: Arc::new(inner), systems: Default::default(), sessions: Default::default(), runtime, }; xr::Instance::all_wrappers().insert(*instance, Arc::new(wrapper)); info!("Instance created with name `{}`", unsafe { CStr::from_ptr(&instance_info.application_info.application_name as _).to_string_lossy() }); Ok(result) }
use std::ops::Deref; use std::sync::atomic::AtomicBool; use std::{ffi::CStr, sync::Arc}; use crate::loader_interfaces::*; use crate::wrappers::instance::{InnerInstance, InstanceWrapper, Runtime}; use crate::wrappers::XrHandle; use crate::ToResult; use log::{debug, error, info}; use openxr::sys as xr; use openxr::{ExtensionSet, InstanceExtensions, Result}; pub(crate) unsafe extern "system" fn create_api_layer_instance( instance_info: *const xr::InstanceCreateInfo, layer_info: *const ApiLayerCreateInfo, instance: *mut xr::Instance, ) -> xr::Result { std::panic::catch_unwind(|| create_instance(&*instance_info, &*layer_info, &mut *instance)) .map_or(xr::Result::ERROR_RUNTIME_FAILURE, |res| match res { Ok(res) => res, Err(res) => res, }) } fn create_instance( instance_info: &xr::InstanceCreateInfo, layer_info: &ApiLayerCreateInfo, instance: &mut xr::Instance, ) -> Result<xr::Res
dows Mixed Reality Runtime" => Runtime::WMR, "Monado(XRT) by Collabora et al" => Runtime::Monado, _ => Runtime::Other(runtime_name.to_string()), }; let wrapper = InstanceWrapper { handle: *instance, inner: Arc::new(inner), systems: Default::default(), sessions: Default::default(), runtime, }; xr::Instance::all_wrappers().insert(*instance, Arc::new(wrapper)); info!("Instance created with name `{}`", unsafe { CStr::from_ptr(&instance_info.application_info.application_name as _).to_string_lossy() }); Ok(result) }
ult> { let next_info = &unsafe { *layer_info.next_info }; if unsafe { CStr::from_ptr(std::mem::transmute(next_info.layer_name.as_ptr())) } .to_string_lossy() != crate::LAYER_NAME { error!( "Crate instance failed: Incorrect layer_name `{}`", unsafe { CStr::from_ptr(std::mem::transmute(next_info.layer_name.as_ptr())) } .to_string_lossy() ); return Err(xr::Result::ERROR_VALIDATION_FAILURE); } debug!("Initializing OpenXR Entry"); let entry = unsafe { openxr::Entry::from_proc_addr(next_info.next_get_instance_proc_addr)? }; let available_extensions = entry.enumerate_extensions()?; let disable_opengl = true; let result = unsafe { let mut needs_opengl_replacement = false; let mut extensions = std::slice::from_raw_parts( instance_info.enabled_extension_names, instance_info.enabled_extension_count as usize, ) .iter() .filter_map(|ext| { let ext_name = CStr::from_ptr(*ext).to_str().unwrap(); if ext_name == "XR_KHR_opengl_enable" { if disable_opengl { needs_opengl_replacement = true; } else if !available_extensions.khr_opengl_enable { needs_opengl_replacement = true; return None; } } Some(*ext) }) .collect::<Vec<_>>(); if needs_opengl_replacement { extensions.push("XR_KHR_vulkan_enable2\0".as_ptr() as *const i8); } let mut instance_info2 = *instance_info; instance_info2.enabled_extension_names = extensions.as_ptr(); instance_info2.enabled_extension_count = extensions.len() as u32; let mut layer_info2 = *layer_info; layer_info2.next_info = (*layer_info2.next_info).next; (next_info.next_create_api_layer_instance)(&instance_info2, &layer_info2, instance).result() }?; let mut supported_extensions = ExtensionSet::default(); supported_extensions.khr_vulkan_enable2 = true; let inner = unsafe { InnerInstance { poison: AtomicBool::new(false), core: openxr::raw::Instance::load(&entry, *instance)?, exts: InstanceExtensions::load(&entry, *instance, &supported_extensions)?, } }; let runtime_name = unsafe { let mut instance_properties = xr::InstanceProperties::out(std::ptr::null_mut()); (inner.core.get_instance_properties)(*instance, instance_properties.as_mut_ptr()) .result()?; let instance_properties = instance_properties.assume_init(); CStr::from_ptr(std::mem::transmute( instance_properties.runtime_name.as_ptr(), )) .to_string_lossy() }; let runtime = match runtime_name.deref() { "SteamVR/OpenXR" => Runtime::SteamVR, "Oculus" => Runtime::Oculus, "Win
random
[ { "content": "type Func<H, T> = unsafe extern \"system\" fn(\n\n handle: H,\n\n format_capacity_input: u32,\n\n format_count_output: *mut u32,\n\n out: *mut T,\n\n) -> xr::Result;\n\n\n\npub unsafe fn call_enumerate<H: Copy, T: Copy>(\n\n handle: H,\n\n f: Func<H, T>,\n\n default: T,\n\n) -> Result<Vec<T>> {\n\n let mut count = 0;\n\n\n\n f(handle, 0, &mut count, std::ptr::null_mut())\n\n .result()\n\n .map_err(|err| {\n\n error!(\"1{}\", err);\n\n xr::Result::ERROR_RUNTIME_FAILURE\n\n })?;\n", "file_path": "layer_core/src/interceptors/mod.rs", "rank": 0, "score": 121746.34701185198 }, { "content": "fn get_system(\n\n instance: &InstanceWrapper,\n\n get_info: &xr::SystemGetInfo,\n\n system_id: &mut xr::SystemId,\n\n) -> Result<xr::Result> {\n\n let success = unsafe { (instance.inner.core.get_system)(instance.handle, get_info, system_id) }\n\n .result()?;\n\n\n\n instance.systems.insert(\n\n *system_id,\n\n SystemMeta {\n\n form_factor: get_info.form_factor,\n\n requirements_called: GraphicsEnableFlags::empty(),\n\n },\n\n );\n\n\n\n debug!(\n\n \"Get system called: form_factor={:?}, id={}\",\n\n get_info.form_factor,\n\n system_id.into_raw()\n\n );\n\n\n\n Ok(success)\n\n}\n\n\n", "file_path": "layer_core/src/interceptors/instance.rs", "rank": 1, "score": 95108.64373650677 }, { "content": "pub fn needed_instance_extensions() -> Vec<*const i8> {\n\n vec![\n\n vk::KhrExternalMemoryCapabilitiesFn::name().as_ptr(),\n\n vk::KhrExternalSemaphoreCapabilitiesFn::name().as_ptr(),\n\n ]\n\n}\n\n\n", "file_path": "graphics_interop/src/apis/vulkan.rs", "rank": 2, "score": 90403.65016064749 }, { "content": "fn generate_gl_bindings<W>(dest: &mut W)\n\nwhere\n\n W: Write,\n\n{\n\n let gl_registry = Registry::new(\n\n Api::Gl,\n\n (4, 6),\n\n Profile::Core,\n\n Fallbacks::None,\n\n vec![\n\n \"GL_AMD_depth_clamp_separate\",\n\n \"GL_APPLE_vertex_array_object\",\n\n \"GL_ARB_bindless_texture\",\n\n \"GL_ARB_blend_func_extended\",\n\n \"GL_ARB_buffer_storage\",\n\n \"GL_ARB_compute_shader\",\n\n \"GL_ARB_copy_buffer\",\n\n \"GL_ARB_debug_output\",\n\n \"GL_ARB_depth_texture\",\n\n \"GL_ARB_direct_state_access\",\n", "file_path": "graphics_interop/build.rs", "rank": 3, "score": 69932.92032763663 }, { "content": "fn create_session(\n\n instance: &Arc<InstanceWrapper>,\n\n create_info: &xr::SessionCreateInfo,\n\n session: &mut xr::Session,\n\n) -> Result<xr::Result> {\n\n let mut needs_compat = false;\n\n\n\n let opengl_override = true;\n\n\n\n unsafe {\n\n if !create_info.next.is_null() {\n\n let next: *const xr::BaseInStructure = std::mem::transmute(create_info.next);\n\n\n\n if !(*next).next.is_null() {\n\n todo!();\n\n }\n\n\n\n needs_compat = match (*next).ty {\n\n xr::StructureType::GRAPHICS_BINDING_D3D11_KHR => false,\n\n xr::StructureType::GRAPHICS_BINDING_D3D12_KHR => false,\n", "file_path": "layer_core/src/interceptors/instance.rs", "rank": 5, "score": 67323.23197910027 }, { "content": "pub fn needed_device_extensions() -> Vec<*const i8> {\n\n vec![\n\n vk::KhrExternalMemoryFn::name().as_ptr(),\n\n #[cfg(target_os = \"windows\")]\n\n vk::KhrExternalMemoryWin32Fn::name().as_ptr(),\n\n #[cfg(target_os = \"linux\")]\n\n vk::KhrExternalMemoryFdFn::name().as_ptr(),\n\n ]\n\n}\n\n\n\npub struct VulkanInterop {\n\n // instance: Instance,\n\n // physical_device: vk::PhysicalDevice,\n\n device_memory_properties: vk::PhysicalDeviceMemoryProperties,\n\n device: Device,\n\n\n\n #[cfg(target_os = \"windows\")]\n\n khr_external_memory: vk::KhrExternalMemoryWin32Fn,\n\n\n\n #[cfg(target_os = \"linux\")]\n", "file_path": "graphics_interop/src/apis/vulkan.rs", "rank": 6, "score": 66981.29653530754 }, { "content": "fn instance_proc_addr(\n\n instance: &InstanceWrapper,\n\n name: *const c_char,\n\n function: &mut Option<pfn::VoidFunction>,\n\n) -> Result<xr::Result> {\n\n let name_str = unsafe { CStr::from_ptr(name) }.to_str().map_err(|err| {\n\n warn!(\n\n \"get_instance_proc_addr passed bad name ({}): `{}`\",\n\n unsafe { CStr::from_ptr(name) }.to_string_lossy(),\n\n err,\n\n );\n\n //We can't parse the function name so just let the runtime deal with it\n\n unsafe { (instance.inner.core.get_instance_proc_addr)(instance.handle, name, function) }\n\n })?;\n\n\n\n trace!(\"get_instance_proc_addr({})\", name_str);\n\n\n\n (*function) = unsafe { INTERCEPTORS.iter().find_map(|f| f(name_str)) };\n\n\n\n if function.is_some() {\n", "file_path": "layer_core/src/interceptors/mod.rs", "rank": 7, "score": 65616.52610346957 }, { "content": "fn install() {\n\n let layer_path = layer_path();\n\n if !Path::new(&layer_path).exists() {\n\n panic!(\"Could not find layer at `{}`\\nTry building crate in release mode (cargo run --release)\", layer_path)\n\n }\n\n let path = manifest_path().unwrap();\n\n std::fs::create_dir_all(path.parent().unwrap()).unwrap();\n\n let mut file = File::create(&path).unwrap();\n\n file.write_all(json_contents(&layer_path).as_bytes())\n\n .unwrap();\n\n println!(\"Successfully installed layer in `{}`\", path.display());\n\n}\n\n\n", "file_path": "installer/src/main.rs", "rank": 8, "score": 43024.57523243555 }, { "content": "#[cfg(target_os = \"linux\")]\n\nfn main() {\n\n let mut args = std::env::args();\n\n args.next().unwrap();\n\n\n\n if let Some(arg) = args.next() {\n\n match &arg[..] {\n\n \"install\" => install(),\n\n \"uninstall\" => uninstall(),\n\n _ => panic!(\"Unexpected argument `{}`\", arg),\n\n }\n\n } else {\n\n install()\n\n }\n\n}\n\n\n", "file_path": "installer/src/main.rs", "rank": 9, "score": 43024.57523243555 }, { "content": "fn uninstall() {\n\n let path = manifest_path().unwrap();\n\n if path.exists() {\n\n std::fs::remove_file(&path).unwrap();\n\n println!(\"Successfully deleted `{}`\", path.display());\n\n } else {\n\n eprintln!(\"Layer not installed\");\n\n }\n\n}\n\n\n", "file_path": "installer/src/main.rs", "rank": 10, "score": 43024.57523243555 }, { "content": "fn main() {\n\n let dest = env::var(\"OUT_DIR\").unwrap();\n\n let dest = Path::new(&dest);\n\n\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n\n\n let mut file_output = File::create(&dest.join(\"gl_bindings.rs\")).unwrap();\n\n generate_gl_bindings(&mut file_output);\n\n #[cfg(windows)]\n\n generate_wgl_bindings(&dest);\n\n}\n\n\n", "file_path": "graphics_interop/build.rs", "rank": 11, "score": 43024.57523243555 }, { "content": "fn create_swapchain(\n\n session: &Arc<SessionWrapper>,\n\n create_info: &xr::SwapchainCreateInfo,\n\n swapchain: &mut xr::Swapchain,\n\n) -> Result<xr::Result> {\n\n let swapchain_wrapper = if let SessionGraphics::Compat {\n\n frontend, backend, ..\n\n } = &session.graphics\n\n {\n\n //TODO SRGB\n\n let format = ImageFormat::from_gl(create_info.format as u32)\n\n .ok_or(xr::Result::ERROR_SWAPCHAIN_FORMAT_UNSUPPORTED)?;\n\n\n\n assert_eq!(create_info.sample_count, 1);\n\n assert_eq!(create_info.face_count, 1);\n\n assert_eq!(create_info.mip_count, 1);\n\n assert!(create_info.array_size <= 32);\n\n assert!(create_info.create_flags.is_empty());\n\n assert!(create_info.next.is_null());\n\n\n", "file_path": "layer_core/src/interceptors/session.rs", "rank": 12, "score": 40102.617369896994 }, { "content": "fn layer_path() -> String {\n\n let workspace_path = Path::new(env!(\"CARGO_MANIFEST_DIR\")).parent().unwrap();\n\n workspace_path\n\n .join(Path::new(\"target/release/liblayer_entry.so\"))\n\n .to_str()\n\n .unwrap()\n\n .to_owned()\n\n}\n\n\n", "file_path": "installer/src/main.rs", "rank": 13, "score": 39335.28625830059 }, { "content": "fn acquire_swapchain_image(\n\n swapchain: &SwapchainWrapper,\n\n acquire_info: &xr::SwapchainImageAcquireInfo,\n\n index: &mut u32,\n\n) -> Result<xr::Result> {\n\n let success = unsafe {\n\n (swapchain.inner.core.acquire_swapchain_image)(swapchain.handle, acquire_info, index)\n\n }\n\n .result()?;\n\n\n\n swapchain\n\n .acquired_images\n\n .lock()\n\n .unwrap()\n\n .get_mut()\n\n .push_back(*index);\n\n\n\n Ok(success)\n\n}\n\n\n", "file_path": "layer_core/src/interceptors/swapchain.rs", "rank": 14, "score": 39251.01837983633 }, { "content": "fn release_swapchain_image(\n\n swapchain: &SwapchainWrapper,\n\n release_info: &xr::SwapchainImageReleaseInfo,\n\n) -> Result<xr::Result> {\n\n let mut lock = swapchain.acquired_images.lock().unwrap();\n\n let queue = lock.get_mut();\n\n\n\n if let SwapchainGraphics::Compat {\n\n frontend, backend, ..\n\n } = &swapchain.graphics\n\n {\n\n let index = *queue.front().unwrap();\n\n //TODO better sub resource memory format transitions\n\n frontend.release_image(index);\n\n backend.release_image(index as usize);\n\n // let runtime_image = backend.runtime_images[index as usize];\n\n // let image = backend.images[index as usize];\n\n // vk_base.record_submit_commandbuffer(\n\n // vk_base.command_buffer,\n\n // vk::Fence::null(),\n", "file_path": "layer_core/src/interceptors/swapchain.rs", "rank": 15, "score": 39251.01837983633 }, { "content": "pub fn static_initialize() -> (\n\n xr::pfn::GetInstanceProcAddr,\n\n loader_interfaces::FnCreateApiLayerInstance,\n\n) {\n\n wrappers::initialize();\n\n // let url = format!(\"vscode://vadimcn.vscode-lldb/launch/config?{{'request':'attach','pid':{}}}\", std::process::id());\n\n // std::process::Command::new(\"C:\\\\Program Files\\\\VSCodium\\\\VSCodium.exe\").arg(\"--open-url\").arg(url).output().unwrap();\n\n // std::thread::sleep(std::time::Duration::from_millis(2000)); // Wait for debugger to attach\n\n\n\n (\n\n interceptors::get_instance_proc_addr,\n\n entry::create_api_layer_instance,\n\n )\n\n}\n\n\n", "file_path": "layer_core/src/lib.rs", "rank": 16, "score": 38426.59618881958 }, { "content": "fn manifest_path() -> Option<PathBuf> {\n\n dirs::home_dir().map(|home| {\n\n home.join(Path::new(\n\n \".local/share/openxr/1/api_layers/implicit.d/sorenon_layer.json\",\n\n ))\n\n })\n\n}\n\n\n", "file_path": "installer/src/main.rs", "rank": 17, "score": 36281.94891655177 }, { "content": "#[cfg(windows)]\n\nfn generate_wgl_bindings(dest: &Path) {\n\n let wgl_registry = Registry::new(\n\n Api::Wgl,\n\n (1, 0),\n\n Profile::Core,\n\n Fallbacks::None,\n\n vec![\"WGL_NV_DX_interop\", \"WGL_NV_DX_interop2\"],\n\n );\n\n\n\n let mut dest = File::create(&dest.join(\"wgl_bindings.rs\")).unwrap();\n\n\n\n (wgl_registry)\n\n .write_bindings(gl_generator::StructGenerator, &mut dest)\n\n .unwrap();\n\n}\n", "file_path": "graphics_interop/build.rs", "rank": 18, "score": 36281.94891655177 }, { "content": "fn json_contents(shared_lib_path: &str) -> String {\n\nr#\"{\n\n \"file_format_version\" : \"1.0.0\",\n\n \"api_layer\": {\n\n \"name\": \"XR_APILAYER_SORENON_compat_layer\",\n\n \"library_path\": \"\"#\n\n .to_owned()\n\n + shared_lib_path\n\n + r#\"\",\n\n \"api_version\" : \"1.0\",\n\n \"implementation_version\" : \"1\",\n\n \"description\" : \"Provides OpenGL over Vulkan\",\n\n \"instance_extensions\": [\n\n {\n\n \"name\": \"XR_KHR_opengl_enable\",\n\n \"extension_version\": \"10\"\n\n }\n\n ],\n\n \"disable_environment\": \"DISABLE_SORENON_OPENXR_LAYER\"\n\n }\n\n}\"#\n\n}\n", "file_path": "installer/src/main.rs", "rank": 19, "score": 33701.89420210644 }, { "content": "unsafe extern \"system\" fn xr_get_opengl_graphics_requirements_khr(\n\n instance: xr::Instance,\n\n system_id: xr::SystemId,\n\n graphics_requirements: *mut xr::GraphicsRequirementsOpenGLKHR,\n\n) -> xr::Result {\n\n instance.run(|instance| {\n\n match instance.systems.get_mut(&system_id) {\n\n Some(mut system_meta) => {\n\n system_meta.requirements_called |= GraphicsEnableFlags::OPENGL_GL;\n\n }\n\n None => return Err(xr::Result::ERROR_SYSTEM_INVALID),\n\n }\n\n\n\n (*graphics_requirements).max_api_version_supported = openxr::Version::new(4, 6, 0);\n\n (*graphics_requirements).min_api_version_supported = openxr::Version::new(4, 5, 0);\n\n\n\n Ok(xr::Result::SUCCESS)\n\n })\n\n}\n\n\n\nunsafe extern \"system\" fn xr_create_session(\n\n instance: xr::Instance,\n\n create_info: *const xr::SessionCreateInfo,\n\n session: *mut xr::Session,\n\n) -> xr::Result {\n\n instance.run(|instance| create_session(instance, &*create_info, &mut *session))\n\n}\n\n\n", "file_path": "layer_core/src/interceptors/instance.rs", "rank": 20, "score": 29135.305366803277 }, { "content": " use std::mem::transmute;\n\n use xr::pfn::*;\n\n Some(match name {\n\n \"xrGetSystem\" => transmute(xr_get_system as GetSystem),\n\n \"xrGetOpenGLGraphicsRequirementsKHR\" => {\n\n transmute(xr_get_opengl_graphics_requirements_khr as GetOpenGLGraphicsRequirementsKHR)\n\n }\n\n \"xrCreateSession\" => transmute(xr_create_session as CreateSession),\n\n _ => return None,\n\n })\n\n}\n\n\n\nunsafe extern \"system\" fn xr_get_system(\n\n instance: xr::Instance,\n\n get_info: *const xr::SystemGetInfo,\n\n system_id: *mut xr::SystemId,\n\n) -> xr::Result {\n\n instance.run(|instance| get_system(instance, &*get_info, &mut *system_id))\n\n}\n\n\n", "file_path": "layer_core/src/interceptors/instance.rs", "rank": 21, "score": 29133.93627770432 }, { "content": "use std::sync::Arc;\n\n\n\nuse ash::vk::Handle;\n\nuse log::{debug, error, info};\n\nuse openxr::{\n\n sys::{self as xr, pfn},\n\n Result,\n\n};\n\n\n\nuse crate::{\n\n graphics::{opengl::frontend::OpenGLFrontend, vulkan},\n\n wrappers::{\n\n instance::{GraphicsEnableFlags, InstanceWrapper, SystemMeta},\n\n session::{SessionGraphics, SessionWrapper},\n\n XrHandle,\n\n },\n\n ToResult,\n\n};\n\n\n\npub(super) unsafe fn get_instance_interceptors(name: &str) -> Option<pfn::VoidFunction> {\n", "file_path": "layer_core/src/interceptors/instance.rs", "rank": 22, "score": 29128.47050471391 }, { "content": " opengl_context.make_current();\n\n\n\n let vk_backend = unsafe {\n\n vulkan::VkBackend::new_openxr(instance, create_info.system_id).map_err(|_| {\n\n error!(\"Backend vulkan base creation failed\");\n\n xr::Result::ERROR_RUNTIME_FAILURE\n\n })?\n\n };\n\n\n\n debug!(\"Created vulkan backend successfully!\");\n\n\n\n let vulkan = xr::GraphicsBindingVulkanKHR {\n\n ty: xr::GraphicsBindingVulkanKHR::TYPE,\n\n next: std::ptr::null(),\n\n instance: vk_backend.instance.handle().as_raw() as _,\n\n physical_device: vk_backend.physical_device.as_raw() as _,\n\n device: vk_backend.device.handle().as_raw() as _,\n\n queue_family_index: vk_backend.graphics_queue_family,\n\n queue_index: 0,\n\n };\n", "file_path": "layer_core/src/interceptors/instance.rs", "rank": 23, "score": 29124.911243686052 }, { "content": "use std::sync::{atomic::AtomicBool, Arc};\n\n\n\nuse bitflags::bitflags;\n\nuse dashmap::DashMap;\n\nuse openxr::sys as xr;\n\n\n\nuse super::{session::SessionWrapper, XrHandle, XrWrapper};\n\n\n\npub struct InstanceWrapper {\n\n pub handle: xr::Instance,\n\n pub inner: Arc<InnerInstance>,\n\n pub systems: DashMap<xr::SystemId, SystemMeta>,\n\n pub sessions: DashMap<xr::Session, Arc<SessionWrapper>>,\n\n pub runtime: Runtime,\n\n}\n\n\n\npub struct InnerInstance {\n\n pub poison: AtomicBool,\n\n pub core: openxr::raw::Instance,\n\n pub exts: openxr::InstanceExtensions,\n", "file_path": "layer_core/src/wrappers/instance.rs", "rank": 24, "score": 29124.81067506722 }, { "content": " }\n\n #[cfg(windows)]\n\n todo!()\n\n }\n\n xr::StructureType::GRAPHICS_BINDING_OPENGL_XCB_KHR => todo!(),\n\n xr::StructureType::GRAPHICS_BINDING_OPENGL_WAYLAND_KHR => todo!(),\n\n xr::StructureType::GRAPHICS_BINDING_OPENGL_ES_ANDROID_KHR => todo!(),\n\n _ => unreachable!(),\n\n }\n\n };\n\n if !instance\n\n .systems\n\n .get(&create_info.system_id)\n\n .ok_or(xr::Result::ERROR_SYSTEM_INVALID)?\n\n .requirements_called\n\n .contains(GraphicsEnableFlags::OPENGL_GL)\n\n {\n\n return Err(xr::Result::ERROR_GRAPHICS_REQUIREMENTS_CALL_MISSING);\n\n }\n\n\n", "file_path": "layer_core/src/interceptors/instance.rs", "rank": 25, "score": 29124.318469636804 }, { "content": "\n\n let create_info2 = xr::SessionCreateInfo {\n\n ty: xr::SessionCreateInfo::TYPE,\n\n next: &vulkan as *const _ as _,\n\n create_flags: xr::SessionCreateFlags::EMPTY,\n\n system_id: create_info.system_id,\n\n };\n\n\n\n unsafe { (instance.inner.core.create_session)(instance.handle, &create_info2, session) }\n\n .result()?;\n\n\n\n let swapchain_formats = unsafe {\n\n super::call_enumerate(*session, instance.inner.core.enumerate_swapchain_formats, 0)?\n\n .iter()\n\n .filter_map(|backend_format| {\n\n let vulkan_format = ash::vk::Format::from_raw(*backend_format as i32);\n\n graphics_interop::ImageFormat::from_vk(vulkan_format)\n\n .map(|format| {\n\n log::info!(\"{:?}\", format);\n\n format.to_gl()\n", "file_path": "layer_core/src/interceptors/instance.rs", "rank": 26, "score": 29123.629175300015 }, { "content": " .result()?;\n\n Arc::new(SessionWrapper {\n\n handle: *session,\n\n instance: Arc::downgrade(instance),\n\n inner: instance.inner.clone(),\n\n graphics: SessionGraphics::Direct,\n\n swapchains: Default::default(),\n\n })\n\n };\n\n\n\n *session = session_wrapper.handle;\n\n xr::Session::all_wrappers().insert(*session, session_wrapper.clone());\n\n instance.sessions.insert(*session, session_wrapper);\n\n info!(\"Session created: {:?}\", *session);\n\n\n\n Ok(xr::Result::SUCCESS)\n\n}\n", "file_path": "layer_core/src/interceptors/instance.rs", "rank": 27, "score": 29119.69955879211 }, { "content": " const VULKAN2 = 0b00000100;\n\n const D3D11 = 0b00001000;\n\n const D3D12 = 0b00010000;\n\n }\n\n}\n\n\n\nimpl XrWrapper for InstanceWrapper {\n\n fn inner_instance(&self) -> &Arc<InnerInstance> {\n\n &self.inner\n\n }\n\n}\n\n\n\nimpl XrHandle for xr::Instance {\n\n type Wrapper = InstanceWrapper;\n\n\n\n fn all_wrappers<'a>() -> &'a DashMap<Self, Arc<Self::Wrapper>>\n\n where\n\n Self: Sized + std::hash::Hash,\n\n {\n\n unsafe { super::INSTANCE_WRAPPERS.as_ref().unwrap() }\n\n }\n\n}\n", "file_path": "layer_core/src/wrappers/instance.rs", "rank": 28, "score": 29117.79413764804 }, { "content": " xr::StructureType::GRAPHICS_BINDING_EGL_MNDX => opengl_override,\n\n xr::StructureType::GRAPHICS_BINDING_OPENGL_WIN32_KHR => opengl_override,\n\n xr::StructureType::GRAPHICS_BINDING_OPENGL_XLIB_KHR => opengl_override,\n\n xr::StructureType::GRAPHICS_BINDING_OPENGL_XCB_KHR => opengl_override,\n\n xr::StructureType::GRAPHICS_BINDING_OPENGL_WAYLAND_KHR => opengl_override,\n\n xr::StructureType::GRAPHICS_BINDING_OPENGL_ES_ANDROID_KHR => opengl_override,\n\n xr::StructureType::GRAPHICS_BINDING_VULKAN_KHR => false,\n\n _ => false,\n\n };\n\n }\n\n }\n\n\n\n let session_wrapper = if needs_compat {\n\n let opengl_context = unsafe {\n\n use crate::graphics::opengl::*;\n\n let next: *const xr::BaseInStructure = std::mem::transmute(create_info.next);\n\n match (*next).ty {\n\n xr::StructureType::GRAPHICS_BINDING_EGL_MNDX => todo!(),\n\n xr::StructureType::GRAPHICS_BINDING_OPENGL_WIN32_KHR => {\n\n #[cfg(windows)]\n", "file_path": "layer_core/src/interceptors/instance.rs", "rank": 29, "score": 29117.7105550143 }, { "content": " })\n\n .flatten()\n\n .map(|f| f as i64)\n\n })\n\n .collect::<Vec<_>>()\n\n };\n\n\n\n Arc::new(SessionWrapper {\n\n handle: *session,\n\n instance: Arc::downgrade(instance),\n\n inner: instance.inner.clone(),\n\n graphics: SessionGraphics::Compat {\n\n frontend: Arc::new(OpenGLFrontend::load(opengl_context)),\n\n backend: Arc::new(vk_backend),\n\n swapchain_formats,\n\n },\n\n swapchains: Default::default(),\n\n })\n\n } else {\n\n unsafe { (instance.inner.core.create_session)(instance.handle, create_info, session) }\n", "file_path": "layer_core/src/interceptors/instance.rs", "rank": 30, "score": 29116.085410015097 }, { "content": "}\n\n\n\npub struct SystemMeta {\n\n pub form_factor: xr::FormFactor,\n\n pub requirements_called: GraphicsEnableFlags,\n\n // pub physical_device: Option<ash::vk::PhysicalDevice>,\n\n}\n\n\n\npub enum Runtime {\n\n SteamVR,\n\n Oculus,\n\n WMR,\n\n Monado,\n\n Other(String),\n\n}\n\n\n\nbitflags! {\n\n pub struct GraphicsEnableFlags: u8 {\n\n const OPENGL_GL = 0b00000001;\n\n const VULKAN = 0b00000010;\n", "file_path": "layer_core/src/wrappers/instance.rs", "rank": 31, "score": 29115.539255883 }, { "content": " {\n\n let binding =\n\n &*(create_info.next as *const xr::GraphicsBindingOpenGLWin32KHR);\n\n GLContext::Wgl(platform::windows::WGL::load(binding.h_dc, binding.h_glrc))\n\n }\n\n #[cfg(target_os = \"linux\")]\n\n todo!()\n\n }\n\n xr::StructureType::GRAPHICS_BINDING_OPENGL_XLIB_KHR => {\n\n #[cfg(target_os = \"linux\")]\n\n {\n\n let binding =\n\n &*(create_info.next as *const xr::GraphicsBindingOpenGLXlibKHR);\n\n GLContext::X11(platform::linux::X11 {\n\n x_display: binding.x_display as _,\n\n visualid: binding.visualid,\n\n glx_fb_config: binding.glx_fb_config,\n\n glx_drawable: binding.glx_drawable,\n\n glx_context: binding.glx_context,\n\n })\n", "file_path": "layer_core/src/interceptors/instance.rs", "rank": 32, "score": 29113.768146612085 }, { "content": "pub trait ToResult {\n\n fn result(self) -> Result<Self, Self>\n\n where\n\n Self: Sized + Copy,\n\n {\n\n ToResult::result2(self, self)\n\n }\n\n\n\n fn result2<T>(self, ok: T) -> Result<T, Self>\n\n where\n\n Self: Sized + Copy;\n\n}\n\n\n\nimpl ToResult for xr::Result {\n\n fn result2<T>(self, ok: T) -> Result<T, Self> {\n\n if self.into_raw() >= 0 {\n\n Ok(ok)\n\n } else {\n\n Err(self)\n\n }\n\n }\n\n}\n", "file_path": "layer_core/src/lib.rs", "rank": 33, "score": 28755.499330309474 }, { "content": "pub trait XrWrapper {\n\n fn inner_instance(&self) -> &Arc<InnerInstance>;\n\n}\n\n\n", "file_path": "layer_core/src/wrappers/mod.rs", "rank": 34, "score": 26912.562365382706 }, { "content": "pub trait XrHandle {\n\n type Wrapper: XrWrapper;\n\n\n\n fn all_wrappers<'a>() -> &'a DashMap<Self, Arc<Self::Wrapper>>\n\n where\n\n Self: Sized + Hash;\n\n\n\n fn run<F>(self, f: F) -> xr::Result\n\n where\n\n Self: Sized + Copy + Hash + Eq + RefUnwindSafe,\n\n F: FnOnce(&Arc<Self::Wrapper>) -> openxr::Result<xr::Result> + UnwindSafe,\n\n {\n\n match std::panic::catch_unwind(|| {\n\n let wrapper = match Self::all_wrappers().get(&self) {\n\n Some(wrapper_ref) => wrapper_ref,\n\n None => return xr::Result::ERROR_HANDLE_INVALID,\n\n };\n\n if wrapper.inner_instance().poison.load(Ordering::Relaxed) {\n\n xr::Result::ERROR_INSTANCE_LOST\n\n } else {\n", "file_path": "layer_core/src/wrappers/mod.rs", "rank": 35, "score": 26912.562365382706 }, { "content": "}\n\n\n\npub type FnNegotiateLoaderApiLayerInterface = unsafe extern \"system\" fn(loader_info: *const XrNegotiateLoaderInfo, api_layer_name: *const i8, api_layer_request: *mut XrNegotiateApiLayerRequest) -> Result;\n\n\n\npub type FnNegotiateLoaderRuntimeInterface = unsafe extern \"system\" fn(loader_info: *const XrNegotiateLoaderInfo, runtime_request: *mut XrNegotiateRuntimeRequest) -> Result;\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct XrApiLayerNextInfo {\n\n pub ty: StructureType,\n\n pub struct_version: u32,\n\n pub struct_size: usize,\n\n pub layer_name: [i8; MAX_API_LAYER_NAME_SIZE], \n\n pub next_get_instance_proc_addr: pfn::GetInstanceProcAddr,\n\n pub next_create_api_layer_instance : FnCreateApiLayerInstance,\n\n pub next: *mut XrApiLayerNextInfo,\n\n}\n\n\n\npub const XR_API_LAYER_MAX_SETTINGS_PATH_SIZE: usize = 512usize;\n\n\n", "file_path": "layer_core/src/loader_interfaces.rs", "rank": 37, "score": 24.52590043989236 }, { "content": "pub mod instance;\n\nmod session;\n\nmod swapchain;\n\n\n\nuse std::{ffi::CStr, os::raw::c_char};\n\n\n\nuse log::{error, trace, warn};\n\nuse openxr::sys::{self as xr, pfn};\n\nuse openxr::Result;\n\n\n\nuse crate::wrappers::instance::InstanceWrapper;\n\nuse crate::wrappers::XrHandle;\n\nuse crate::ToResult;\n\n\n\npub(crate) unsafe extern \"system\" fn get_instance_proc_addr(\n\n instance: xr::Instance,\n\n name: *const c_char,\n\n function: *mut Option<pfn::VoidFunction>,\n\n) -> xr::Result {\n\n instance.run(|instance| instance_proc_addr(instance, name, &mut *function))\n\n}\n\n\n\nconst INTERCEPTORS: [unsafe fn(&str) -> Option<pfn::VoidFunction>; 3] = [\n\n instance::get_instance_interceptors,\n\n session::get_session_interceptors,\n\n swapchain::get_swapchain_interceptors,\n\n];\n\n\n", "file_path": "layer_core/src/interceptors/mod.rs", "rank": 38, "score": 22.237325254704032 }, { "content": "use openxr::sys::*;\n\n\n\npub const XR_CURRENT_LOADER_API_LAYER_VERSION: u32 = 1;\n\npub const XR_CURRENT_LOADER_RUNTIME_VERSION: u32 = 1;\n\n\n\npub type FnCreateApiLayerInstance = unsafe extern \"system\" fn(info: *const InstanceCreateInfo, api_layer_info: *const ApiLayerCreateInfo, instance: *mut Instance) -> Result;\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone, Debug)]\n\npub struct XrNegotiateLoaderInfo {\n\n pub ty: StructureType,\n\n pub struct_version: u32,\n\n pub struct_size: usize,\n\n pub min_interface_version: u32,\n\n pub max_interface_version: u32,\n\n pub min_api_version: Version,\n\n pub max_api_version: Version,\n\n}\n\n\n\n#[repr(C)]\n", "file_path": "layer_core/src/loader_interfaces.rs", "rank": 39, "score": 21.766707259024223 }, { "content": " &xr::VulkanDeviceCreateInfoKHR {\n\n ty: xr::VulkanDeviceCreateInfoKHR::TYPE,\n\n next: std::ptr::null_mut(),\n\n system_id,\n\n create_flags: xr::VulkanDeviceCreateFlagsKHR::EMPTY,\n\n pfn_get_instance_proc_addr: std::mem::transmute(\n\n entry.static_fn().get_instance_proc_addr,\n\n ),\n\n vulkan_physical_device: physical_device.as_raw() as _,\n\n vulkan_create_info: &device_info as *const _ as _,\n\n vulkan_allocator: std::ptr::null_mut(),\n\n },\n\n &mut device as *mut _ as _,\n\n &mut vk_result as *mut _ as _,\n\n );\n\n\n\n if xr_result.result().is_err() {\n\n error!(\"OpenXR error creating vulkan device: {}\", xr_result);\n\n return Err(());\n\n } else if vk_result.result().is_err() {\n", "file_path": "layer_core/src/graphics/vulkan.rs", "rank": 40, "score": 21.051387333788053 }, { "content": " images,\n\n )\n\n .result()\n\n }\n\n })\n\n}\n\n\n\nunsafe extern \"system\" fn xr_acquire_swapchain_image(\n\n swapchain: xr::Swapchain,\n\n acquire_info: *const xr::SwapchainImageAcquireInfo,\n\n index: *mut u32,\n\n) -> xr::Result {\n\n swapchain.run(|swapchain| acquire_swapchain_image(swapchain, &*acquire_info, &mut *index))\n\n}\n\n\n\nunsafe extern \"system\" fn xr_release_swapchain_image(\n\n swapchain: xr::Swapchain,\n\n release_info: *const xr::SwapchainImageReleaseInfo,\n\n) -> xr::Result {\n\n swapchain.run(|swapchain| release_swapchain_image(swapchain, &*release_info))\n\n}\n\n\n", "file_path": "layer_core/src/interceptors/swapchain.rs", "rank": 41, "score": 20.692899536024015 }, { "content": " },\n\n &mut vk_instance as *mut _ as _,\n\n &mut vk_result as *mut _ as _,\n\n );\n\n\n\n if xr_result.result().is_err() {\n\n error!(\"OpenXR error creating vulkan instance: {}\", xr_result);\n\n return Err(());\n\n } else if vk_result.result().is_err() {\n\n error!(\"Vulkan error creating vulkan instance: {}\", vk_result);\n\n return Err(());\n\n }\n\n\n\n Instance::load(entry.static_fn(), vk_instance)\n\n } else {\n\n todo!()\n\n };\n\n\n\n let (debug_utils, debug_messenger) = create_debug_callback(&entry, &vk_instance).unwrap();\n\n\n", "file_path": "layer_core/src/graphics/vulkan.rs", "rank": 42, "score": 19.696143178611003 }, { "content": " // } else {\n\n // instance_info\n\n // };\n\n\n\n let vk_instance = if let Some(vulkan) = exts.khr_vulkan_enable2 {\n\n let mut vk_instance = vk::Instance::null();\n\n let mut vk_result = vk::Result::default();\n\n\n\n let xr_result = (vulkan.create_vulkan_instance)(\n\n xr_instance,\n\n &xr::VulkanInstanceCreateInfoKHR {\n\n ty: xr::VulkanInstanceCreateInfoKHR::TYPE,\n\n next: std::ptr::null_mut(),\n\n system_id,\n\n create_flags: xr::VulkanInstanceCreateFlagsKHR::EMPTY,\n\n pfn_get_instance_proc_addr: Some(std::mem::transmute(\n\n entry.static_fn().get_instance_proc_addr,\n\n )),\n\n vulkan_create_info: &instance_info as *const _ as _,\n\n vulkan_allocator: std::ptr::null(),\n", "file_path": "layer_core/src/graphics/vulkan.rs", "rank": 43, "score": 19.134602994157312 }, { "content": " let physical_device = if let Some(vulkan) = exts.khr_vulkan_enable2 {\n\n let mut physical_device = vk::PhysicalDevice::null();\n\n let result = (vulkan.get_vulkan_graphics_device2)(\n\n xr_instance,\n\n &xr::VulkanGraphicsDeviceGetInfoKHR {\n\n ty: xr::VulkanGraphicsDeviceGetInfoKHR::TYPE,\n\n next: std::ptr::null(),\n\n system_id,\n\n vulkan_instance: vk_instance.handle().as_raw() as _,\n\n },\n\n &mut physical_device as *mut _ as _,\n\n );\n\n if result.result().is_err() {\n\n error!(\"OpenXR error getting physical device: {}\", result);\n\n return Err(());\n\n }\n\n physical_device\n\n } else {\n\n todo!()\n\n };\n", "file_path": "layer_core/src/graphics/vulkan.rs", "rank": 44, "score": 19.131410081379446 }, { "content": "use std::{ffi::CStr, fs::File, sync::atomic::AtomicBool, path::Path};\n\n\n\nuse layer_core::loader_interfaces::*;\n\nuse log::{debug, error, info};\n\nuse openxr::sys as xr;\n\nuse simplelog::*;\n\n\n\n//TODO set up a safer logging system\n\nstatic LOGGER_LOADED: AtomicBool = AtomicBool::new(false);\n\n\n\n#[no_mangle]\n\n#[allow(clippy::missing_safety_doc)]\n\npub unsafe extern \"system\" fn xrNegotiateLoaderApiLayerInterface(\n\n negotiate_info: *const XrNegotiateLoaderInfo,\n\n layer_name: *const i8,\n\n layer_request: *mut XrNegotiateApiLayerRequest,\n\n) -> xr::Result {\n\n if !LOGGER_LOADED.load(std::sync::atomic::Ordering::Relaxed) {\n\n let workspace_path = Path::new(env!(\"CARGO_MANIFEST_DIR\")).parent().unwrap();\n\n\n", "file_path": "layer_entry/src/lib.rs", "rank": 46, "score": 18.64801833819258 }, { "content": " Ok(xr::Result::SUCCESS)\n\n } else {\n\n unsafe { (instance.inner.core.get_instance_proc_addr)(instance.handle, name, function) }\n\n .result()\n\n }\n\n}\n\n\n\nunsafe fn enumerate<T: Copy>(\n\n capacity: u32,\n\n count_output: *mut u32,\n\n out: *mut T,\n\n data: &[T],\n\n) -> Result<xr::Result> {\n\n if capacity != 0 {\n\n if (capacity as usize) < data.len() {\n\n return Err(xr::Result::ERROR_SIZE_INSUFFICIENT);\n\n }\n\n if out.is_null() {\n\n return Err(xr::Result::ERROR_VALIDATION_FAILURE);\n\n }\n", "file_path": "layer_core/src/interceptors/mod.rs", "rank": 47, "score": 18.54125554803752 }, { "content": " use std::mem::transmute;\n\n use xr::pfn::*;\n\n Some(match name {\n\n \"xrEnumerateSwapchainFormats\" => {\n\n transmute(xr_enumerate_swapchain_formats as EnumerateSwapchainFormats)\n\n }\n\n \"xrCreateSwapchain\" => transmute(xr_create_swapchain as CreateSwapchain),\n\n // \"xrEndFrame\" => transmute(xr_end_frame as EndFrame),\n\n _ => return None,\n\n })\n\n}\n\n\n\npub(crate) unsafe extern \"system\" fn xr_enumerate_swapchain_formats(\n\n session: xr::Session,\n\n format_capacity_input: u32,\n\n format_count_output: *mut u32,\n\n formats: *mut i64,\n\n) -> xr::Result {\n\n session.run(|session| {\n\n if let SessionGraphics::Compat {\n", "file_path": "layer_core/src/interceptors/session.rs", "rank": 48, "score": 17.69044153452593 }, { "content": " swapchain_formats, ..\n\n } = &session.graphics\n\n {\n\n super::enumerate(\n\n format_capacity_input,\n\n format_count_output,\n\n formats,\n\n swapchain_formats,\n\n )\n\n } else {\n\n (session.inner.core.enumerate_swapchain_formats)(\n\n session.handle,\n\n format_capacity_input,\n\n format_count_output,\n\n formats,\n\n )\n\n .result()\n\n }\n\n })\n\n}\n\n\n\npub(crate) unsafe extern \"system\" fn xr_create_swapchain(\n\n session: xr::Session,\n\n create_info: *const xr::SwapchainCreateInfo,\n\n swapchain: *mut xr::Swapchain,\n\n) -> xr::Result {\n\n session.run(|session| create_swapchain(session, &*create_info, &mut *swapchain))\n\n}\n\n\n", "file_path": "layer_core/src/interceptors/session.rs", "rank": 50, "score": 16.933290499466878 }, { "content": " let exts = &xr_instance.inner.exts;\n\n let xr_instance = xr_instance.handle;\n\n\n\n let _requirements = if let Some(vulkan) = exts.khr_vulkan_enable2 {\n\n let mut reqs =\n\n xr::GraphicsRequirementsVulkanKHR::out(std::ptr::null_mut()).assume_init();\n\n let result =\n\n (vulkan.get_vulkan_graphics_requirements2)(xr_instance, system_id, &mut reqs);\n\n if result.result().is_err() {\n\n error!(\"get_vulkan_graphics_requirements2 returned: {}\", result);\n\n return Err(());\n\n }\n\n reqs\n\n } else {\n\n todo!()\n\n };\n\n\n\n //TODO actually check requirements\n\n\n\n let layer_names = [CStr::from_bytes_with_nul_unchecked(\n", "file_path": "layer_core/src/graphics/vulkan.rs", "rank": 51, "score": 16.743212684381643 }, { "content": "\n\n let mut vec = vec![default; count as usize];\n\n\n\n f(handle, count, &mut count, vec.as_mut_ptr())\n\n .result()\n\n .map_err(|err| {\n\n error!(\"2{} {}\", count, err);\n\n xr::Result::ERROR_RUNTIME_FAILURE\n\n })?;\n\n Ok(vec)\n\n}\n", "file_path": "layer_core/src/interceptors/mod.rs", "rank": 52, "score": 15.287079981242949 }, { "content": " );\n\n xr::Result::ERROR_INITIALIZATION_FAILED\n\n } else if (*negotiate_info).min_interface_version > XR_CURRENT_LOADER_API_LAYER_VERSION\n\n || (*negotiate_info).max_interface_version < XR_CURRENT_LOADER_API_LAYER_VERSION\n\n || (*negotiate_info).min_api_version > xr::CURRENT_API_VERSION\n\n || (*negotiate_info).max_api_version < xr::CURRENT_API_VERSION\n\n {\n\n error!(\n\n \"Layer negotiation failed: Incompatible negotiate info {:#?}\",\n\n (*negotiate_info)\n\n );\n\n xr::Result::ERROR_INITIALIZATION_FAILED\n\n } else {\n\n let (get_instance_proc_addr, create_api_layer_instance) = layer_core::static_initialize();\n\n\n\n (*layer_request).layer_interface_version = XR_CURRENT_LOADER_API_LAYER_VERSION;\n\n (*layer_request).layer_api_version = xr::CURRENT_API_VERSION;\n\n (*layer_request).get_instance_proc_addr = Some(get_instance_proc_addr);\n\n (*layer_request).create_api_layer_instance = Some(create_api_layer_instance);\n\n\n\n debug!(\"Negotiation complete\");\n\n\n\n xr::Result::SUCCESS\n\n }\n\n}\n", "file_path": "layer_entry/src/lib.rs", "rank": 53, "score": 14.635175078623835 }, { "content": "use openxr::{\n\n sys::{self as xr, pfn},\n\n Result,\n\n};\n\n\n\nuse crate::{\n\n wrappers::{\n\n swapchain::{SwapchainGraphics, SwapchainWrapper},\n\n XrHandle,\n\n },\n\n ToResult,\n\n};\n\n\n\npub(super) unsafe fn get_swapchain_interceptors(name: &str) -> Option<pfn::VoidFunction> {\n\n use std::mem::transmute;\n\n use xr::pfn::*;\n\n Some(match name {\n\n \"xrEnumerateSwapchainImages\" => {\n\n transmute(xr_enumerate_swapchain_images as EnumerateSwapchainImages)\n\n }\n", "file_path": "layer_core/src/interceptors/swapchain.rs", "rank": 54, "score": 14.004865986412582 }, { "content": "pub mod instance;\n\npub mod session;\n\npub mod swapchain;\n\n\n\nuse std::{\n\n hash::Hash,\n\n panic::{RefUnwindSafe, UnwindSafe},\n\n sync::{atomic::Ordering, Arc},\n\n};\n\n\n\nuse dashmap::DashMap;\n\nuse openxr::sys as xr;\n\n\n\nuse self::instance::InnerInstance;\n\n\n\nstatic mut INSTANCE_WRAPPERS: Option<DashMap<xr::Instance, Arc<instance::InstanceWrapper>>> = None;\n\nstatic mut SESSION_WRAPPERS: Option<DashMap<xr::Session, Arc<session::SessionWrapper>>> = None;\n\nstatic mut SWAPCHAIN_WRAPPERS: Option<DashMap<xr::Swapchain, Arc<swapchain::SwapchainWrapper>>> = None;\n\n\n\npub(crate) fn initialize() {\n\n unsafe {\n\n if INSTANCE_WRAPPERS.is_none() {\n\n INSTANCE_WRAPPERS = Some(DashMap::new());\n\n SESSION_WRAPPERS = Some(DashMap::new());\n\n SWAPCHAIN_WRAPPERS = Some(DashMap::new());\n\n }\n\n }\n\n}\n\n\n", "file_path": "layer_core/src/wrappers/mod.rs", "rank": 56, "score": 13.684045023025172 }, { "content": " &self,\n\n capacity: u32,\n\n count_output: *mut u32,\n\n out: *mut openxr::sys::SwapchainImageBaseHeader,\n\n ) -> openxr::Result<xr::Result> {\n\n if capacity != 0 {\n\n if (capacity as usize) < self.images.len() {\n\n return Err(xr::Result::ERROR_SIZE_INSUFFICIENT);\n\n }\n\n if out.is_null() {\n\n return Err(xr::Result::ERROR_VALIDATION_FAILURE);\n\n }\n\n let slice: &mut [xr::SwapchainImageOpenGLKHR] =\n\n std::slice::from_raw_parts_mut(std::mem::transmute(out), self.images.len());\n\n for (i, image_out) in slice.iter_mut().enumerate() {\n\n if image_out.ty != xr::SwapchainImageOpenGLKHR::TYPE {\n\n return Err(xr::Result::ERROR_VALIDATION_FAILURE);\n\n }\n\n image_out.image = self.images[i];\n\n }\n", "file_path": "layer_core/src/graphics/opengl/frontend.rs", "rank": 58, "score": 13.25665630377329 }, { "content": "use core::slice;\n\nuse std::{borrow::Cow, ffi::CStr, io::Cursor, os::raw::c_char};\n\n\n\nuse ash::{\n\n extensions::ext::DebugUtils,\n\n prelude::VkResult,\n\n vk::{self, DebugUtilsMessengerEXT, Handle},\n\n Device, Entry, Instance,\n\n};\n\nuse graphics_interop::apis::vulkan::VulkanInterop;\n\nuse log::error;\n\nuse openxr::sys as xr;\n\n\n\nuse crate::{wrappers::instance::InstanceWrapper, ToResult};\n\n\n\npub struct VkBackend {\n\n pub entry: Entry,\n\n pub instance: Instance,\n\n pub device: Device,\n\n pub debug_utils: DebugUtils,\n", "file_path": "layer_core/src/graphics/vulkan.rs", "rank": 59, "score": 13.18620699222134 }, { "content": " match f(wrapper.value()) {\n\n Ok(res) => res,\n\n Err(res) => res,\n\n }\n\n }\n\n }) {\n\n Ok(res) => res,\n\n Err(_) => {\n\n if let Some(wrapper) = Self::all_wrappers().get(&self) {\n\n wrapper\n\n .inner_instance()\n\n .poison\n\n .store(true, Ordering::Relaxed);\n\n }\n\n xr::Result::ERROR_INSTANCE_LOST\n\n }\n\n }\n\n }\n\n}\n", "file_path": "layer_core/src/wrappers/mod.rs", "rank": 60, "score": 13.079969097081374 }, { "content": "use std::sync::Arc;\n\n\n\nuse graphics_interop::ImageFormat;\n\nuse log::info;\n\nuse openxr::sys as xr;\n\nuse openxr::Result;\n\n\n\nuse crate::graphics::vulkan_backend::SwapchainBackendVulkan;\n\nuse crate::wrappers::swapchain::SwapchainBackend;\n\nuse crate::wrappers::swapchain::SwapchainGraphics;\n\nuse crate::wrappers::swapchain::SwapchainWrapper;\n\nuse crate::{\n\n wrappers::{\n\n session::{SessionGraphics, SessionWrapper},\n\n XrHandle,\n\n },\n\n ToResult,\n\n};\n\n\n\npub(super) unsafe fn get_session_interceptors(name: &str) -> Option<xr::pfn::VoidFunction> {\n", "file_path": "layer_core/src/interceptors/session.rs", "rank": 61, "score": 12.721845149674891 }, { "content": " .handle_type(vk::ExternalMemoryHandleTypeFlags::OPAQUE_WIN32)\n\n .build();\n\n\n\n self.khr_external_memory\n\n .get_memory_win32_handle_khr(self.device.handle(), &win32_handle_info, &mut handle)\n\n .result()?;\n\n Ok(handle)\n\n }\n\n\n\n #[cfg(target_os = \"linux\")]\n\n unsafe {\n\n let mut handle = 0;\n\n\n\n let handle_info = vk::MemoryGetFdInfoKHR::builder()\n\n .memory(memory)\n\n .handle_type(vk::ExternalMemoryHandleTypeFlags::OPAQUE_FD)\n\n .build();\n\n\n\n self.khr_external_memory\n\n .get_memory_fd_khr(self.device.handle(), &handle_info, &mut handle)\n", "file_path": "graphics_interop/src/apis/vulkan.rs", "rank": 62, "score": 12.712284642707356 }, { "content": " let queue_info = vk::DeviceQueueCreateInfo::builder()\n\n .queue_family_index(graphics_queue_family)\n\n .queue_priorities(&[1.0]);\n\n\n\n let features = vk::PhysicalDeviceFeatures {\n\n multi_viewport: vk::TRUE,\n\n ..Default::default()\n\n };\n\n\n\n let device_info = vk::DeviceCreateInfo::builder()\n\n .queue_create_infos(slice::from_ref(&queue_info))\n\n .enabled_extension_names(&device_extension_names[..])\n\n .enabled_features(&features);\n\n\n\n let device = if let Some(vulkan) = exts.khr_vulkan_enable2 {\n\n let mut device = vk::Device::null();\n\n let mut vk_result = vk::Result::default();\n\n\n\n let xr_result = (vulkan.create_vulkan_device)(\n\n xr_instance,\n", "file_path": "layer_core/src/graphics/vulkan.rs", "rank": 63, "score": 12.657936702059661 }, { "content": " khr_external_memory: vk::KhrExternalMemoryFdFn,\n\n}\n\n\n\nimpl VulkanInterop {\n\n pub fn new(instance: &Instance, physical_device: vk::PhysicalDevice, device: &Device) -> Self {\n\n let device_memory_properties =\n\n unsafe { instance.get_physical_device_memory_properties(physical_device) };\n\n\n\n let khr_external_memory = unsafe {\n\n let load_fn = |name: &std::ffi::CStr| {\n\n std::mem::transmute(instance.get_device_proc_addr(device.handle(), name.as_ptr()))\n\n };\n\n #[cfg(target_os = \"windows\")]\n\n {\n\n vk::KhrExternalMemoryWin32Fn::load(load_fn)\n\n }\n\n #[cfg(target_os = \"linux\")]\n\n {\n\n vk::KhrExternalMemoryFdFn::load(load_fn)\n\n }\n", "file_path": "graphics_interop/src/apis/vulkan.rs", "rank": 64, "score": 12.49153246079915 }, { "content": " \"xrAcquireSwapchainImage\" => transmute(xr_acquire_swapchain_image as AcquireSwapchainImage),\n\n \"xrReleaseSwapchainImage\" => transmute(xr_release_swapchain_image as ReleaseSwapchainImage),\n\n _ => return None,\n\n })\n\n}\n\n\n\nunsafe extern \"system\" fn xr_enumerate_swapchain_images(\n\n swapchain: xr::Swapchain,\n\n image_capacity_input: u32,\n\n image_count_output: *mut u32,\n\n images: *mut xr::SwapchainImageBaseHeader,\n\n) -> xr::Result {\n\n swapchain.run(|swapchain| {\n\n if let SwapchainGraphics::Compat { frontend, .. } = &swapchain.graphics {\n\n frontend.enumerate_images(image_capacity_input, image_count_output, images)\n\n } else {\n\n (swapchain.inner.core.enumerate_swapchain_images)(\n\n swapchain.handle,\n\n image_capacity_input,\n\n image_count_output,\n", "file_path": "layer_core/src/interceptors/swapchain.rs", "rank": 65, "score": 12.23189239584071 }, { "content": " let create_info2 = xr::SwapchainCreateInfo {\n\n ty: xr::SwapchainCreateInfo::TYPE,\n\n next: std::ptr::null(),\n\n create_flags: xr::SwapchainCreateFlags::EMPTY,\n\n usage_flags: xr::SwapchainUsageFlags::COLOR_ATTACHMENT,\n\n format: format\n\n .to_vk()\n\n .ok_or(xr::Result::ERROR_SWAPCHAIN_FORMAT_UNSUPPORTED)?\n\n .as_raw() as i64,\n\n sample_count: 1,\n\n width: create_info.width,\n\n height: create_info.height,\n\n face_count: 1,\n\n array_size: create_info.array_size,\n\n mip_count: 1,\n\n };\n\n\n\n unsafe {\n\n (session.inner.core.create_swapchain)(session.handle, &create_info2, swapchain)\n\n .result()?\n", "file_path": "layer_core/src/interceptors/session.rs", "rank": 66, "score": 12.157966089698743 }, { "content": " .ImportMemoryFdEXT(mem_obj, size, bindings::HANDLE_TYPE_OPAQUE_FD_EXT, handle);\n\n }\n\n if mem_obj == 0 {\n\n Err(unsafe { self.gl.GetError() })\n\n } else {\n\n Ok(mem_obj)\n\n }\n\n }\n\n\n\n pub fn import_image(\n\n &self,\n\n create_info: &ImageCreateInfo,\n\n mem_obj: u32,\n\n offset: u64,\n\n ) -> GlResult<u32> {\n\n let mut texture = 0;\n\n\n\n if create_info.layers == 1 {\n\n unsafe {\n\n self.gl\n", "file_path": "graphics_interop/src/apis/opengl.rs", "rank": 67, "score": 12.047543299948476 }, { "content": "use core::slice;\n\nuse std::sync::Arc;\n\n\n\nuse ash::{\n\n prelude::VkResult,\n\n vk::{self, Handle},\n\n};\n\nuse openxr::sys as xr;\n\n\n\nuse crate::wrappers::{instance::InnerInstance, swapchain::SwapchainBackend};\n\n\n\nuse super::vulkan::VkBackend;\n\n\n\npub struct SwapchainBackendVulkan {\n\n vk_backend: Arc<VkBackend>,\n\n images: Vec<vk::Image>,\n\n memory: Vec<(vk::DeviceMemory, u64)>,\n\n runtime_images: Vec<vk::Image>,\n\n pipeline: vk::Pipeline,\n\n pipeline_layout: vk::PipelineLayout,\n", "file_path": "layer_core/src/graphics/vulkan_backend.rs", "rank": 68, "score": 11.850926194867851 }, { "content": "use std::{\n\n cell::RefCell,\n\n collections::VecDeque,\n\n sync::{Arc, Mutex, Weak},\n\n};\n\n\n\nuse dashmap::DashMap;\n\nuse log::error;\n\nuse openxr::sys as xr;\n\n\n\nuse super::{instance::InnerInstance, session::SessionWrapper, XrHandle, XrWrapper};\n\n\n\npub struct SwapchainWrapper {\n\n pub handle: xr::Swapchain,\n\n pub session: Weak<SessionWrapper>,\n\n pub inner: Arc<InnerInstance>,\n\n pub width: u32,\n\n pub height: u32,\n\n pub layers: u32,\n\n pub graphics: SwapchainGraphics,\n", "file_path": "layer_core/src/wrappers/swapchain.rs", "rank": 69, "score": 11.735657287953035 }, { "content": " render_pass: vk::RenderPass,\n\n image_views: Vec<vk::ImageView>,\n\n runtime_image_views: Vec<vk::ImageView>,\n\n framebuffers: Vec<vk::Framebuffer>,\n\n command_buffers: Vec<vk::CommandBuffer>,\n\n descriptor_pool: vk::DescriptorPool,\n\n}\n\n\n\nimpl SwapchainBackendVulkan {\n\n pub fn load(\n\n swapchain: xr::Swapchain,\n\n inner: &InnerInstance,\n\n vk_backend: Arc<VkBackend>,\n\n image_info: &graphics_interop::ImageCreateInfo,\n\n ) -> Self {\n\n let runtime_images = unsafe {\n\n crate::interceptors::call_enumerate(\n\n swapchain,\n\n std::mem::transmute(inner.core.enumerate_swapchain_images),\n\n xr::SwapchainImageVulkanKHR::out(std::ptr::null_mut()).assume_init(),\n", "file_path": "layer_core/src/graphics/vulkan_backend.rs", "rank": 70, "score": 11.564485422780292 }, { "content": "\n\n println!(\n\n \"{:?}:\\n{:?} [{} ({})] : {}\\n\",\n\n message_severity,\n\n message_type,\n\n message_id_name,\n\n &message_id_number.to_string(),\n\n message,\n\n );\n\n\n\n vk::FALSE\n\n}\n\n\n\nunsafe fn create_debug_callback(\n\n entry: &Entry,\n\n instance: &Instance,\n\n) -> VkResult<(DebugUtils, DebugUtilsMessengerEXT)> {\n\n let debug_info = vk::DebugUtilsMessengerCreateInfoEXT::builder()\n\n .message_severity(\n\n vk::DebugUtilsMessageSeverityFlagsEXT::ERROR\n", "file_path": "layer_core/src/graphics/vulkan.rs", "rank": 71, "score": 11.336993209631892 }, { "content": " let slice = std::slice::from_raw_parts_mut(out, data.len());\n\n slice.copy_from_slice(data);\n\n }\n\n if count_output.is_null() {\n\n return Err(xr::Result::ERROR_VALIDATION_FAILURE);\n\n }\n\n *count_output = data.len() as u32;\n\n Ok(xr::Result::SUCCESS)\n\n}\n\n\n", "file_path": "layer_core/src/interceptors/mod.rs", "rank": 72, "score": 11.213795169829886 }, { "content": " }\n\n if count_output.is_null() {\n\n return Err(xr::Result::ERROR_VALIDATION_FAILURE);\n\n }\n\n *count_output = self.images.len() as u32;\n\n Ok(xr::Result::SUCCESS)\n\n }\n\n\n\n fn release_image(&self, _: u32) {\n\n self.opengl.context.make_current();\n\n unsafe {\n\n //We need to wait for all OpenGL calls to finish execution before copying the image\n\n self.opengl.interop.gl.Finish();\n\n }\n\n }\n\n\n\n fn destroy(&self) {\n\n unsafe {\n\n self.opengl\n\n .interop\n", "file_path": "layer_core/src/graphics/opengl/frontend.rs", "rank": 73, "score": 10.791125580184616 }, { "content": "use std::sync::{Arc, Weak};\n\n\n\nuse dashmap::DashMap;\n\nuse openxr::sys as xr;\n\n\n\nuse crate::{graphics::{vulkan, opengl::frontend::OpenGLFrontend}};\n\n\n\nuse super::{\n\n instance::{InnerInstance, InstanceWrapper},\n\n swapchain::SwapchainWrapper,\n\n XrHandle, XrWrapper,\n\n};\n\n\n\npub struct SessionWrapper {\n\n pub handle: xr::Session,\n\n pub instance: Weak<InstanceWrapper>,\n\n pub inner: Arc<InnerInstance>,\n\n pub graphics: SessionGraphics,\n\n pub swapchains: DashMap<xr::Swapchain, Arc<SwapchainWrapper>>,\n\n}\n", "file_path": "layer_core/src/wrappers/session.rs", "rank": 74, "score": 10.61873318468621 }, { "content": " winapi::um::handleapi::CloseHandle(handle);\n\n }\n\n #[cfg(target_os = \"linux\")]\n\n unsafe {\n\n if libc::close(handle) == -1 {\n\n error!(\n\n \"Failed to close swapchain fd `{}` with error `{:X}`\",\n\n handle,\n\n *libc::__errno_location()\n\n )\n\n }\n\n }\n\n }\n\n backend.destroy();\n\n }\n\n }\n\n}\n\n\n\nimpl XrWrapper for SwapchainWrapper {\n\n fn inner_instance(&self) -> &Arc<InnerInstance> {\n", "file_path": "layer_core/src/wrappers/swapchain.rs", "rank": 75, "score": 10.574672768750972 }, { "content": " pub debug_messenger: vk::DebugUtilsMessengerEXT,\n\n\n\n pub physical_device: vk::PhysicalDevice,\n\n pub device_memory_properties: vk::PhysicalDeviceMemoryProperties,\n\n pub graphics_queue_family: u32,\n\n pub graphics_queue: vk::Queue,\n\n\n\n pub command_pool: vk::CommandPool,\n\n pub nearest_sampler: vk::Sampler,\n\n pub descriptor_set_layout: vk::DescriptorSetLayout,\n\n\n\n pub interop: VulkanInterop,\n\n}\n\n\n\nimpl VkBackend {\n\n pub unsafe fn new_openxr(\n\n xr_instance: &InstanceWrapper,\n\n system_id: xr::SystemId,\n\n ) -> Result<VkBackend, ()> {\n\n let entry = Entry::load().unwrap();\n", "file_path": "layer_core/src/graphics/vulkan.rs", "rank": 76, "score": 10.41002941348749 }, { "content": "#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct ApiLayerCreateInfo {\n\n pub ty: StructureType,\n\n pub struct_version: u32,\n\n pub struct_size: usize,\n\n pub loader_instance: *const (),\n\n pub settings_file_location: [i8; XR_API_LAYER_MAX_SETTINGS_PATH_SIZE],\n\n pub next_info : *mut XrApiLayerNextInfo,\n\n}", "file_path": "layer_core/src/loader_interfaces.rs", "rank": 77, "score": 10.202756965281594 }, { "content": "#[derive(Copy, Clone)]\n\npub struct XrNegotiateApiLayerRequest {\n\n pub ty: StructureType,\n\n pub struct_version: u32,\n\n pub struct_size: usize,\n\n pub layer_interface_version: u32,\n\n pub layer_api_version: Version,\n\n pub get_instance_proc_addr: Option<pfn::GetInstanceProcAddr>,\n\n pub create_api_layer_instance : Option<FnCreateApiLayerInstance>,\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct XrNegotiateRuntimeRequest {\n\n pub ty: StructureType,\n\n pub struct_version: u32,\n\n pub struct_size: usize,\n\n pub runtime_interface_version: u32,\n\n pub runtime_api_version: Version,\n\n pub get_instance_proc_addr: Option<pfn::GetInstanceProcAddr>,\n", "file_path": "layer_core/src/loader_interfaces.rs", "rank": 78, "score": 10.080538250975172 }, { "content": " flags: vk::MemoryPropertyFlags,\n\n ) -> Option<u32> {\n\n self.device_memory_properties.memory_types\n\n [..self.device_memory_properties.memory_type_count as _]\n\n .iter()\n\n .enumerate()\n\n .find(|(index, memory_type)| {\n\n (1 << index) & memory_req.memory_type_bits != 0\n\n && memory_type.property_flags & flags == flags\n\n })\n\n .map(|(index, _memory_type)| index as _)\n\n }\n\n\n\n pub fn get_external_memory_handle(&self, memory: vk::DeviceMemory) -> VkResult<InteropHandle> {\n\n #[cfg(target_os = \"windows\")]\n\n unsafe {\n\n let mut handle = std::ptr::null_mut();\n\n\n\n let win32_handle_info = vk::MemoryGetWin32HandleInfoKHR::builder()\n\n .memory(memory)\n", "file_path": "graphics_interop/src/apis/vulkan.rs", "rank": 80, "score": 9.601114838665566 }, { "content": " )\n\n }\n\n .unwrap()\n\n .into_iter()\n\n .map(|image| vk::Image::from_raw(image.image))\n\n .collect::<Vec<_>>();\n\n\n\n let mut memory = Vec::with_capacity(runtime_images.len());\n\n let mut images = Vec::with_capacity(runtime_images.len());\n\n\n\n let cb_memory_barrier = unsafe {\n\n *vk_backend\n\n .device\n\n .allocate_command_buffers(&vk::CommandBufferAllocateInfo {\n\n command_pool: vk_backend.command_pool,\n\n level: vk::CommandBufferLevel::PRIMARY,\n\n command_buffer_count: 1,\n\n ..Default::default()\n\n })\n\n .unwrap()\n", "file_path": "layer_core/src/graphics/vulkan_backend.rs", "rank": 81, "score": 9.140747629844528 }, { "content": " };\n\n\n\n unsafe { self.device.create_image(&create_info, None) }\n\n }\n\n\n\n pub fn alloc_and_bind_external_image(\n\n &self,\n\n image: vk::Image,\n\n ) -> VkResult<(vk::DeviceMemory, u64)> {\n\n let texture_memory_req = unsafe { self.device.get_image_memory_requirements(image) };\n\n let texture_memory_index = self\n\n .find_memory_type_index(&texture_memory_req, vk::MemoryPropertyFlags::DEVICE_LOCAL)\n\n .expect(\"Unable to find suitable memory index for depth image.\");\n\n\n\n #[cfg(windows)]\n\n let export_mem_alloc_info = vk::ExportMemoryAllocateInfo::builder()\n\n .handle_types(vk::ExternalMemoryHandleTypeFlags::OPAQUE_WIN32)\n\n .build();\n\n #[cfg(target_os = \"linux\")]\n\n let export_mem_alloc_info = vk::ExportMemoryAllocateInfo::builder()\n", "file_path": "graphics_interop/src/apis/vulkan.rs", "rank": 82, "score": 9.052776900287913 }, { "content": " device.create_command_pool(&pool_create_info, None)\n\n}\n\n\n\nconst VERTEX: &[u8] = include_bytes!(\"../../../shaders/vert.spv\");\n\nconst FRAGMENT: &[u8] = include_bytes!(\"../../../shaders/frag.spv\");\n\n\n\nunsafe fn create_shader_module(device: &Device, code_bytes: &[u8]) -> VkResult<vk::ShaderModule> {\n\n let shader_code = ash::util::read_spv(&mut Cursor::new(code_bytes)).unwrap();\n\n\n\n let create_info = vk::ShaderModuleCreateInfo::builder().code(&shader_code);\n\n device.create_shader_module(&create_info, None)\n\n}\n\n\n\nunsafe fn create_render_pass(\n\n device: &Device,\n\n format: vk::Format,\n\n sample_count: vk::SampleCountFlags,\n\n) -> VkResult<vk::RenderPass> {\n\n let color_attachment = vk::AttachmentDescription::builder()\n\n .format(format)\n", "file_path": "layer_core/src/graphics/vulkan.rs", "rank": 83, "score": 8.587889193047781 }, { "content": " };\n\n\n\n Self {\n\n device_memory_properties,\n\n device: device.clone(),\n\n khr_external_memory,\n\n }\n\n }\n\n\n\n pub fn create_external_image(\n\n &self,\n\n image_create_info: &crate::ImageCreateInfo,\n\n ) -> VkResult<vk::Image> {\n\n let export_info = vk::ExternalMemoryImageCreateInfo {\n\n #[cfg(target_os = \"windows\")]\n\n handle_types: vk::ExternalMemoryHandleTypeFlags::OPAQUE_WIN32,\n\n #[cfg(target_os = \"linux\")]\n\n handle_types: vk::ExternalMemoryHandleTypeFlags::OPAQUE_FD,\n\n ..Default::default()\n\n };\n", "file_path": "graphics_interop/src/apis/vulkan.rs", "rank": 84, "score": 8.57627810111401 }, { "content": " b\"VK_LAYER_KHRONOS_validation\\0\",\n\n )];\n\n let layers_names_raw: Vec<*const c_char> = layer_names\n\n .iter()\n\n .map(|raw_name| raw_name.as_ptr())\n\n .collect();\n\n\n\n let instance_extensions = [DebugUtils::name().as_ptr()];\n\n\n\n let app_info = vk::ApplicationInfo::builder()\n\n .application_name(CStr::from_bytes_with_nul_unchecked(b\"SorenonOpenXRLayer\\0\"))\n\n .application_version(0)\n\n .api_version(vk::make_api_version(0, 1, 1, 0));\n\n\n\n let instance_info = vk::InstanceCreateInfo::builder()\n\n .application_info(&app_info)\n\n .enabled_extension_names(&instance_extensions);\n\n\n\n // let instance_info = if option_env!(\"SORENON_LAYER_VK_VALIDATION\").is_some() {\n\n // instance_info.enabled_layer_names(&layers_names_raw)\n", "file_path": "layer_core/src/graphics/vulkan.rs", "rank": 85, "score": 8.468761123617774 }, { "content": " pub fn new<F: Fn(&str) -> *const c_void>(f: F) -> Self {\n\n Self {\n\n gl: bindings::Gl::load_with(f),\n\n }\n\n }\n\n\n\n pub fn import_memory(&self, handle: InteropHandle, size: u64) -> GlResult<u32> {\n\n let mut mem_obj = 0;\n\n\n\n unsafe {\n\n self.gl.CreateMemoryObjectsEXT(1, &mut mem_obj);\n\n #[cfg(target_os = \"windows\")]\n\n self.gl.ImportMemoryWin32HandleEXT(\n\n mem_obj,\n\n size,\n\n bindings::HANDLE_TYPE_OPAQUE_WIN32_EXT,\n\n handle,\n\n );\n\n #[cfg(target_os = \"linux\")]\n\n self.gl\n", "file_path": "graphics_interop/src/apis/opengl.rs", "rank": 86, "score": 8.278113287530804 }, { "content": "\n\n let device_memory_properties =\n\n vk_instance.get_physical_device_memory_properties(physical_device);\n\n\n\n let graphics_queue_family = vk_instance\n\n .get_physical_device_queue_family_properties(physical_device)\n\n .into_iter()\n\n .enumerate()\n\n .find_map(|(queue_family_index, info)| {\n\n if info.queue_flags.contains(vk::QueueFlags::GRAPHICS) {\n\n Some(queue_family_index as u32)\n\n } else {\n\n None\n\n }\n\n })\n\n .expect(\"Vulkan device has no graphics queue\");\n\n\n\n let mut device_extension_names = graphics_interop::apis::vulkan::needed_device_extensions();\n\n device_extension_names.push(vk::ExtShaderViewportIndexLayerFn::name().as_ptr());\n\n\n", "file_path": "layer_core/src/graphics/vulkan.rs", "rank": 87, "score": 8.242729115192521 }, { "content": " (session.inner.core.create_swapchain)(session.handle, create_info, swapchain)\n\n .result()?;\n\n }\n\n Arc::new(SwapchainWrapper {\n\n handle: *swapchain,\n\n session: Arc::downgrade(session),\n\n inner: session.inner.clone(),\n\n graphics: SwapchainGraphics::Direct,\n\n acquired_images: Default::default(),\n\n width: create_info.width,\n\n height: create_info.height,\n\n layers: create_info.array_size,\n\n })\n\n };\n\n\n\n xr::Swapchain::all_wrappers().insert(*swapchain, swapchain_wrapper.clone());\n\n session.swapchains.insert(*swapchain, swapchain_wrapper);\n\n info!(\"Swapchain created: {:?}\", *swapchain);\n\n\n\n Ok(xr::Result::SUCCESS)\n\n}\n", "file_path": "layer_core/src/interceptors/session.rs", "rank": 88, "score": 8.140918048085041 }, { "content": " image_info.height,\n\n image_info.format.to_vk().unwrap(),\n\n vk::SampleCountFlags::TYPE_1,\n\n std::slice::from_ref(&vk_backend.descriptor_set_layout),\n\n )\n\n };\n\n\n\n let image_views = images\n\n .iter()\n\n .map(|&image| {\n\n vk_backend.create_image_view(\n\n image,\n\n image_info.format.to_vk().unwrap(),\n\n image_info.layers,\n\n )\n\n })\n\n .collect::<VkResult<Vec<_>>>()\n\n .unwrap();\n\n\n\n let runtime_image_views = runtime_images\n", "file_path": "layer_core/src/graphics/vulkan_backend.rs", "rank": 89, "score": 8.102694959161159 }, { "content": " .CreateTextures(bindings::TEXTURE_2D, 1, &mut texture);\n\n self.gl.TextureStorageMem2DEXT(\n\n texture,\n\n create_info.mip_count as i32,\n\n create_info.format.to_gl().unwrap(),\n\n create_info.width as i32,\n\n create_info.height as i32,\n\n mem_obj,\n\n offset,\n\n );\n\n }\n\n } else {\n\n unsafe {\n\n self.gl\n\n .CreateTextures(bindings::TEXTURE_2D_ARRAY, 1, &mut texture);\n\n self.gl.TextureStorageMem3DEXT(\n\n texture,\n\n create_info.mip_count as i32,\n\n create_info.format.to_gl().unwrap(),\n\n create_info.width as i32,\n", "file_path": "graphics_interop/src/apis/opengl.rs", "rank": 90, "score": 7.830356793069788 }, { "content": "# Sorenon OpenXR Layer\n\n(I'm not good with names)\n\n\n\n\n\nAs of writing, both the SteamVR and WMR OpenXR runtimes have a few significant issues / missing features. \n\nWMR does not provide any OpenGL / Vulkan extensions and SteamVR on Linux is essentially broken.\n\n\n\nWMR's issues can be fixed through the use of SteamVR. <br>\n\n[This existing layer](https://github.com/ChristophHaag/gl_context_fix_layer) also attempts to fix SteamVR's GLX support and may be more performant (but has some stability issues).\n\n\n\n\n\n## How the layer works\n\nWhen the application creates an OpenGL session, the layer creates a Vulkan session and uses external memory extensions to share swapchain images between the apis. \n\nThis adds the extra overhead of creating a second swapchain to expose to the application, and one draw call in `xrReleaseSwapchainImage` to copy and transfrom the image into the OpenXR swapchain.\n\n<br><br>\n\nThere are some notable areas that can be improved. Mainly using an interop semephore instead of a `glFinish` call and passing a fence to an async thread instead of a `vkQueueWaitIdle` call.\n\n\n\n## Fixes:\n\n- https://github.com/ValveSoftware/SteamVR-for-Linux/issues/421\n\n- https://github.com/ValveSoftware/SteamVR-for-Linux/issues/466\n\n\n\n## Does not fix:\n\n- General system wide deadlocks caused by SteamVR\n\n- SteamVR's overall worse performance compared to Monado\n\n- https://github.com/ValveSoftware/SteamVR-for-Linux/issues/422<br>^ Is it possible to fix this by calling xrDestroyInstance in its own thread?\n\n- https://github.com/ValveSoftware/SteamVR-for-Linux/issues/461\n\n- https://github.com/ValveSoftware/SteamVR-for-Linux/issues/479\n\n\n\n## Installation instructions\n\n- Install the [Rust toolchain](https://www.rust-lang.org/tools/install)\n\n- Clone this repository\n\n- Install with `cargo run --release`\n\n- Uninstall with `cargo run --release uninstall`\n\n\n\n## Current TODO:\n\n- [x] OpenGL Frontend\n\n- [x] Vulkan Backend\n\n- [x] Linux Installer\n\n- [ ] Correctly handle sRGB formats\n\n- [ ] Investigate improving performance\n\n\n\n## If perfomance impact can be minimized:\n\n- [ ] D3D11 Backend\n\n- [ ] Vulkan Frontend\n\n- [ ] Windows Installer\n\n- [ ] FSR / NIS\n\n- [ ] Attempt to deal with other runtime bugs\n", "file_path": "README.md", "rank": 91, "score": 7.796012398548305 }, { "content": " error!(\"Vulkan error creating vulkan device: {}\", vk_result);\n\n return Err(());\n\n }\n\n\n\n Device::load(vk_instance.fp_v1_0(), device)\n\n } else {\n\n todo!()\n\n };\n\n\n\n let graphics_queue = device.get_device_queue(graphics_queue_family, 0);\n\n let command_pool = create_command_pool(&device, graphics_queue_family).unwrap();\n\n\n\n let nearest_sampler = {\n\n let create_info = vk::SamplerCreateInfo::builder()\n\n .mag_filter(vk::Filter::NEAREST)\n\n .min_filter(vk::Filter::NEAREST)\n\n .address_mode_u(vk::SamplerAddressMode::REPEAT)\n\n .address_mode_v(vk::SamplerAddressMode::REPEAT)\n\n .address_mode_w(vk::SamplerAddressMode::REPEAT)\n\n .unnormalized_coordinates(false)//TODO research whether this is optimal\n", "file_path": "layer_core/src/graphics/vulkan.rs", "rank": 92, "score": 7.774713603738115 }, { "content": "\n\npub enum SessionGraphics {\n\n Headless,\n\n Direct,\n\n Compat {\n\n frontend: Arc<OpenGLFrontend>,\n\n backend: Arc<vulkan::VkBackend>,\n\n swapchain_formats: Vec<i64>,\n\n },\n\n}\n\n\n\nimpl XrWrapper for SessionWrapper {\n\n fn inner_instance(&self) -> &Arc<InnerInstance> {\n\n &self.inner\n\n }\n\n}\n\n\n\nimpl XrHandle for xr::Session {\n\n type Wrapper = SessionWrapper;\n\n\n\n fn all_wrappers<'a>() -> &'a DashMap<Self, Arc<Self::Wrapper>>\n\n where\n\n Self: Sized + std::hash::Hash,\n\n {\n\n unsafe { super::SESSION_WRAPPERS.as_ref().unwrap() }\n\n }\n\n}\n", "file_path": "layer_core/src/wrappers/session.rs", "rank": 93, "score": 7.553237966819265 }, { "content": "use std::{\n\n ffi::{c_void, CString, OsStr},\n\n os::windows::prelude::OsStrExt,\n\n};\n\n\n\nuse openxr::sys::platform::*;\n\nuse winapi::shared::minwindef::HMODULE;\n\n\n\n#[derive(Debug)]\n\npub enum GLContext {\n\n Wgl(WGL),\n\n Egl(super::Egl),\n\n}\n\n\n\nimpl GLContext {\n\n pub fn make_current(&self) {\n\n unsafe {\n\n match &self {\n\n GLContext::Wgl(wgl) => wgl.make_current().unwrap(),\n\n GLContext::Egl(_) => todo!(),\n", "file_path": "layer_core/src/graphics/opengl/platform/windows.rs", "rank": 94, "score": 7.414483832001615 }, { "content": " .iter()\n\n .map(|&image| {\n\n vk_backend.create_image_view(\n\n image,\n\n image_info.format.to_vk().unwrap(),\n\n image_info.layers,\n\n )\n\n })\n\n .collect::<VkResult<Vec<_>>>()\n\n .unwrap();\n\n\n\n let framebuffers = runtime_image_views\n\n .iter()\n\n .map(|image_view| {\n\n let create_info = vk::FramebufferCreateInfo::builder()\n\n .render_pass(render_pass)\n\n .attachments(std::slice::from_ref(image_view))\n\n .width(image_info.width)\n\n .height(image_info.height)\n\n .layers(image_info.layers);\n", "file_path": "layer_core/src/graphics/vulkan_backend.rs", "rank": 95, "score": 7.328257462966102 }, { "content": "use std::ffi::{c_void, CString};\n\n\n\nuse lazy_static::lazy_static;\n\n\n\npub enum GLContext {\n\n EGl,\n\n X11(X11),\n\n Xcb,\n\n Wayland,\n\n}\n\n\n\nimpl GLContext {\n\n pub fn make_current(&self) {\n\n unsafe {\n\n match &self {\n\n GLContext::EGl => todo!(),\n\n GLContext::X11(x11) => x11.make_current(),\n\n GLContext::Xcb => todo!(),\n\n GLContext::Wayland => todo!(),\n\n }\n", "file_path": "layer_core/src/graphics/opengl/platform/linux.rs", "rank": 96, "score": 7.247799540763337 }, { "content": " }\n\n }\n\n\n\n pub fn get_proc_address(&self, name: &str) -> *const c_void {\n\n unsafe {\n\n match &self {\n\n GLContext::EGl => todo!(),\n\n GLContext::X11(x11) => x11.get_proc_address(name),\n\n GLContext::Xcb => todo!(),\n\n GLContext::Wayland => todo!(),\n\n }\n\n }\n\n }\n\n}\n\n\n\nuse glutin_glx_sys::glx as glx_sys;\n\n\n", "file_path": "layer_core/src/graphics/opengl/platform/linux.rs", "rank": 97, "score": 6.9217999591137715 }, { "content": " create_info.height as i32,\n\n create_info.layers as i32,\n\n mem_obj,\n\n offset,\n\n );\n\n }\n\n }\n\n\n\n if texture == 0 {\n\n Err(unsafe { self.gl.GetError() })\n\n } else {\n\n Ok(texture)\n\n }\n\n }\n\n}\n\n\n\nimpl ImageFormat {\n\n pub fn to_gl(&self) -> Option<u32> {\n\n GL_FORMATS.get_by_left(self).copied()\n\n }\n\n\n\n pub fn from_gl(gl_format: u32) -> Option<Self> {\n\n GL_FORMATS.get_by_right(&gl_format).copied()\n\n }\n\n}\n", "file_path": "graphics_interop/src/apis/opengl.rs", "rank": 98, "score": 6.888979524450477 }, { "content": "\n\n device.destroy_shader_module(vert_shader, None);\n\n device.destroy_shader_module(frag_shader, None);\n\n\n\n (layout, render_pass, pipeline)\n\n }\n\n}\n\n\n\nimpl Drop for VkBackend {\n\n fn drop(&mut self) {\n\n unsafe {\n\n self.device.destroy_command_pool(self.command_pool, None);\n\n self.device.destroy_device(None);\n\n self.debug_utils\n\n .destroy_debug_utils_messenger(self.debug_messenger, None);\n\n self.instance.destroy_instance(None);\n\n }\n\n }\n\n}\n\n\n", "file_path": "layer_core/src/graphics/vulkan.rs", "rank": 99, "score": 6.856220018737126 } ]
Rust
src/handlers/common/global.rs
ALinuxPerson/try-drop
1b2ac32ba746747f3af51278a689d19a94261691
pub(crate) mod imports {} use crate::handlers::common::Handler; use crate::handlers::UninitializedError; use parking_lot::{ MappedRwLockReadGuard, MappedRwLockWriteGuard, RwLock, RwLockReadGuard, RwLockWriteGuard, }; use std::marker::PhantomData; pub trait GlobalDefinition: Handler { const UNINITIALIZED_ERROR: &'static str; type Global: 'static; fn global() -> &'static RwLock<Option<Self::Global>>; } pub trait DefaultGlobalDefinition: GlobalDefinition { fn default() -> Self::Global; } pub struct Global<T: GlobalDefinition>(PhantomData<T>); impl<T: GlobalDefinition> Global<T> { pub fn install_dyn(strategy: T::Global) { T::global().write().replace(strategy); } pub fn install(strategy: impl Into<T::Global>) { Self::install_dyn(strategy.into()) } pub fn try_read() -> Result<MappedRwLockReadGuard<'static, T::Global>, UninitializedError> { let global = T::global().read(); if global.is_some() { Ok(RwLockReadGuard::map(global, |global| { global.as_ref().unwrap() })) } else { Err(UninitializedError(())) } } pub fn read() -> MappedRwLockReadGuard<'static, T::Global> { Self::try_read().expect(T::UNINITIALIZED_ERROR) } pub fn try_write() -> Result<MappedRwLockWriteGuard<'static, T::Global>, UninitializedError> { let global = T::global().write(); if global.is_some() { Ok(RwLockWriteGuard::map(global, |global| { global.as_mut().unwrap() })) } else { Err(UninitializedError(())) } } pub fn write() -> MappedRwLockWriteGuard<'static, T::Global> { Self::try_write().expect(T::UNINITIALIZED_ERROR) } pub fn uninstall() { *T::global().write() = None } } impl<T: DefaultGlobalDefinition> Global<T> { pub fn read_or_default() -> MappedRwLockReadGuard<'static, T::Global> { drop(Self::write_or_default()); Self::read() } pub fn write_or_default() -> MappedRwLockWriteGuard<'static, T::Global> { RwLockWriteGuard::map(T::global().write(), |drop_strategy| { drop_strategy.get_or_insert_with(T::default) }) } } macro_rules! global_methods { ( Global = $global:ident; GenericStrategy = $generic_strategy:ident; DynStrategy = $dyn_strategy:ident; feature = $feature:literal; $(#[$($install_dyn_tt:tt)*])* install_dyn; $(#[$($install_tt:tt)*])* install; $(#[$($try_read_tt:tt)*])* try_read; $(#[$($read_tt:tt)*])* read; $(#[$($try_write_tt:tt)*])* try_write; $(#[$($write_tt:tt)*])* write; $(#[$($uninstall_tt:tt)*])* uninstall; $(#[$($read_or_default_tt:tt)*])* read_or_default; $(#[$($write_or_default_tt:tt)*])* write_or_default; ) => { #[allow(unused_imports)] use $crate::handlers::common::global::imports::*; $(#[$($install_dyn_tt)*])* pub fn install_dyn(strategy: $dyn_strategy) { $global::install_dyn(strategy) } $(#[$($install_tt)*])* pub fn install(strategy: impl $generic_strategy) { $global::install(strategy) } $(#[$($try_read_tt)*])* pub fn try_read() -> Result<MappedRwLockReadGuard<'static, $dyn_strategy>, UninitializedError> { $global::try_read() } $(#[$($read_tt)*])* pub fn read() -> MappedRwLockReadGuard<'static, $dyn_strategy> { $global::read() } $(#[$($try_write_tt)*])* pub fn try_write() -> Result<MappedRwLockWriteGuard<'static, $dyn_strategy>, UninitializedError> { $global::try_write() } $(#[$($write_tt)*])* pub fn write() -> MappedRwLockWriteGuard<'static, $dyn_strategy> { $global::write() } $(#[$($uninstall_tt)*])* pub fn uninstall() { $global::uninstall() } $(#[$($read_or_default_tt)*])* #[cfg(feature = $feature)] pub fn read_or_default() -> MappedRwLockReadGuard<'static, $dyn_strategy> { $global::read_or_default() } $(#[$($write_or_default_tt)*])* #[cfg(feature = $feature)] pub fn write_or_default() -> MappedRwLockWriteGuard<'static, $dyn_strategy> { $global::write_or_default() } }; }
pub(crate) mod imports {} use crate::handlers::common::Handler; use crate::handlers::UninitializedError; use parking_lot::{ MappedRwLockReadGuard, MappedRwLockWriteGuard, RwLock, RwLockReadGuard, RwLockWriteGuard, }; use std::marker::PhantomData; pub trait GlobalDefinition: Handler { const UNINITIALIZED_ERROR: &'static str; type Global: 'static; fn global() -> &'static RwLock<Option<Self::Global>>; } pub trait DefaultGlobalDefinition: GlobalDefinition { fn default() -> Self::Global; } pub struct Global<T: GlobalDefinition>(PhantomData<T>); impl<T: GlobalDefinition> Global<T> { pub fn install_dyn(strategy: T::Global) { T::global().write().replace(strategy); } pub fn install(strategy: impl Into<T::Global>) { Self::install_dyn(strategy.into()) } pub fn try_read() -> Result<MappedRwLockReadGuard<'static, T::Global>, UninitializedError> { let global = T::global().read(); if global.is_some() { Ok(RwLockReadGuard::map(global, |global| { global.as_ref().unwrap() })) } else { Err(UninitializedError(())) } } pub fn read() -> MappedRwLockReadGuard<'static, T::Global> { Self::try_read().expect(T::UNINITIALIZED_ERROR) } pub fn try_write() -> Result<MappedRwLockWriteGuard<'static, T::Global>, UninitializedError> { let global = T::global().write(); if glo
pub fn write() -> MappedRwLockWriteGuard<'static, T::Global> { Self::try_write().expect(T::UNINITIALIZED_ERROR) } pub fn uninstall() { *T::global().write() = None } } impl<T: DefaultGlobalDefinition> Global<T> { pub fn read_or_default() -> MappedRwLockReadGuard<'static, T::Global> { drop(Self::write_or_default()); Self::read() } pub fn write_or_default() -> MappedRwLockWriteGuard<'static, T::Global> { RwLockWriteGuard::map(T::global().write(), |drop_strategy| { drop_strategy.get_or_insert_with(T::default) }) } } macro_rules! global_methods { ( Global = $global:ident; GenericStrategy = $generic_strategy:ident; DynStrategy = $dyn_strategy:ident; feature = $feature:literal; $(#[$($install_dyn_tt:tt)*])* install_dyn; $(#[$($install_tt:tt)*])* install; $(#[$($try_read_tt:tt)*])* try_read; $(#[$($read_tt:tt)*])* read; $(#[$($try_write_tt:tt)*])* try_write; $(#[$($write_tt:tt)*])* write; $(#[$($uninstall_tt:tt)*])* uninstall; $(#[$($read_or_default_tt:tt)*])* read_or_default; $(#[$($write_or_default_tt:tt)*])* write_or_default; ) => { #[allow(unused_imports)] use $crate::handlers::common::global::imports::*; $(#[$($install_dyn_tt)*])* pub fn install_dyn(strategy: $dyn_strategy) { $global::install_dyn(strategy) } $(#[$($install_tt)*])* pub fn install(strategy: impl $generic_strategy) { $global::install(strategy) } $(#[$($try_read_tt)*])* pub fn try_read() -> Result<MappedRwLockReadGuard<'static, $dyn_strategy>, UninitializedError> { $global::try_read() } $(#[$($read_tt)*])* pub fn read() -> MappedRwLockReadGuard<'static, $dyn_strategy> { $global::read() } $(#[$($try_write_tt)*])* pub fn try_write() -> Result<MappedRwLockWriteGuard<'static, $dyn_strategy>, UninitializedError> { $global::try_write() } $(#[$($write_tt)*])* pub fn write() -> MappedRwLockWriteGuard<'static, $dyn_strategy> { $global::write() } $(#[$($uninstall_tt)*])* pub fn uninstall() { $global::uninstall() } $(#[$($read_or_default_tt)*])* #[cfg(feature = $feature)] pub fn read_or_default() -> MappedRwLockReadGuard<'static, $dyn_strategy> { $global::read_or_default() } $(#[$($write_or_default_tt)*])* #[cfg(feature = $feature)] pub fn write_or_default() -> MappedRwLockWriteGuard<'static, $dyn_strategy> { $global::write_or_default() } }; }
bal.is_some() { Ok(RwLockWriteGuard::map(global, |global| { global.as_mut().unwrap() })) } else { Err(UninitializedError(())) } }
function_block-function_prefixed
[ { "content": "pub trait Handler: private::Sealed {}\n\n\n\npub enum Primary {}\n\nimpl private::Sealed for Primary {}\n\nimpl Handler for Primary {}\n\n\n\npub enum Fallback {}\n\nimpl private::Sealed for Fallback {}\n\nimpl Handler for Fallback {}\n\n\n", "file_path": "src/handlers/common/mod.rs", "rank": 2, "score": 176614.81600697216 }, { "content": "pub trait ThreadLocalDefinition: Handler {\n\n const UNINITIALIZED_ERROR: &'static str;\n\n const DYN: &'static str;\n\n type ThreadLocal: 'static;\n\n\n\n fn thread_local() -> &'static LocalKey<RefCell<Option<Self::ThreadLocal>>>;\n\n fn locked() -> &'static LocalKey<Cell<bool>>;\n\n}\n\n\n", "file_path": "src/handlers/common/thread_local/mod.rs", "rank": 3, "score": 171238.96820964746 }, { "content": "#[cfg(feature = \"global\")]\n\npub fn install_global_handlers(\n\n primary: impl GlobalDynFallibleTryDropStrategy,\n\n fallback: impl GlobalTryDropStrategy,\n\n) {\n\n install_global_handlers_dyn(Box::new(primary), Box::new(fallback))\n\n}\n\n\n\n/// This installs the primary and fallback global handlers. Must be a dynamic trait object.\n", "file_path": "src/handlers/fns.rs", "rank": 4, "score": 166986.593888169 }, { "content": "#[cfg(feature = \"global\")]\n\npub fn install_global_handlers_dyn(\n\n primary: Box<dyn GlobalDynFallibleTryDropStrategy>,\n\n fallback: Box<dyn GlobalTryDropStrategy>,\n\n) {\n\n primary::global::install_dyn(primary);\n\n fallback::global::install_dyn(fallback);\n\n}\n\n\n\n/// This uninstalls the primary and fallback global handlers.\n", "file_path": "src/handlers/fns.rs", "rank": 5, "score": 163414.0614425048 }, { "content": "#[cfg(feature = \"global\")]\n\npub fn uninstall_globally() {\n\n primary::global::uninstall();\n\n fallback::global::uninstall();\n\n}\n\n\n\n/// This installs the primary and fallback thread local handlers.\n", "file_path": "src/handlers/fns.rs", "rank": 6, "score": 162696.76014220147 }, { "content": " pub trait Sealed {}\n\n}\n\npub mod shim;\n\n\n\n#[macro_use]\n\n#[cfg(feature = \"thread-local\")]\n\npub mod thread_local;\n\n\n\n#[macro_use]\n\n#[cfg(feature = \"global\")]\n\npub mod global;\n\n\n\npub mod handler;\n\npub mod proxy;\n\n\n\nuse std::error::Error;\n\nuse std::fmt;\n\nuse std::fmt::Formatter;\n\n\n\n/// This error occurs when you attempt to use a scope guard in a nested scope.\n", "file_path": "src/handlers/common/mod.rs", "rank": 7, "score": 160231.10395298395 }, { "content": " pub trait Sealed {}\n\n}\n\n\n\nuse crate::handlers::common::handler::CommonHandler;\n\nuse crate::handlers::common::proxy::TheGreatAbstracter;\n\nuse crate::handlers::common::{Fallback, Scope};\n\nuse std::marker::PhantomData;\n\n\n\n/// The default thing to do when the fallback handler is not initialized.\n\n#[cfg(not(feature = \"ds-panic\"))]\n\npub type DefaultOnUninit = crate::handlers::on_uninit::PanicOnUninit;\n\n\n\n/// The default thing to do when the fallback handler is not initialized.\n\n#[cfg(feature = \"ds-panic\")]\n\npub type DefaultOnUninit = crate::handlers::on_uninit::UseDefaultOnUninit;\n\n\n", "file_path": "src/handlers/fallback/mod.rs", "rank": 8, "score": 160231.10395298395 }, { "content": "pub trait DefaultThreadLocalDefinition: ThreadLocalDefinition {\n\n fn default() -> Self::ThreadLocal;\n\n}\n\n\n\npub struct ThreadLocal<T: ThreadLocalDefinition>(PhantomData<T>);\n\n\n\nimpl<T: ThreadLocalDefinition> ThreadLocal<T> {\n\n pub fn read<R>(f: impl FnOnce(&T::ThreadLocal) -> R) -> R {\n\n Self::try_read(f).expect(T::UNINITIALIZED_ERROR)\n\n }\n\n\n\n pub fn try_read<R>(f: impl FnOnce(&T::ThreadLocal) -> R) -> Result<R, UninitializedError> {\n\n T::thread_local().with(|cell| {\n\n cell.borrow_mut()\n\n .as_ref()\n\n .map(f)\n\n .ok_or(UninitializedError(()))\n\n })\n\n }\n\n\n", "file_path": "src/handlers/common/thread_local/mod.rs", "rank": 9, "score": 156743.9544197523 }, { "content": "pub trait Scope: private::Sealed {}\n\n\n\npub enum Global {}\n\nimpl private::Sealed for Global {}\n\nimpl Scope for Global {}\n\n\n\npub enum ThreadLocal {}\n\nimpl private::Sealed for ThreadLocal {}\n\nimpl Scope for ThreadLocal {}\n", "file_path": "src/handlers/common/mod.rs", "rank": 10, "score": 147111.06586354997 }, { "content": "type Global = GenericGlobal<Primary>;\n\n\n\n/// A handy type alias to `Box<dyn GlobalDynFallibleTryDropStrategy>`.\n\npub type BoxDynGlobalFallibleTryDropStrategy = Box<dyn GlobalDynFallibleTryDropStrategy>;\n\n\n\nglobal_methods! {\n\n Global = Global;\n\n GenericStrategy = GlobalDynFallibleTryDropStrategy;\n\n DynStrategy = BoxDynGlobalFallibleTryDropStrategy;\n\n feature = \"ds-write\";\n\n\n\n /// Set the global primary handler. Must be a dynamic trait object.\n\n install_dyn;\n\n\n\n /// Get the global primary handler.\n\n install;\n\n\n\n /// Try and get a reference to the global primary handler.\n\n ///\n\n /// # Errors\n", "file_path": "src/handlers/primary/global.rs", "rank": 11, "score": 144744.63055920295 }, { "content": "type Global = GenericGlobal<Fallback>;\n", "file_path": "src/handlers/fallback/global.rs", "rank": 12, "score": 144744.63055920295 }, { "content": "/// Signifies that this type can be converted into an [`AdHocDropStrategy`].\n\npub trait IntoAdHocDropStrategy: Fn(crate::Error) + Sized {\n\n /// Convert this type into an [`AdHocDropStrategy`].\n\n fn into_drop_strategy(self) -> AdHocDropStrategy<Self> {\n\n AdHocDropStrategy(self)\n\n }\n\n}\n\n\n\nimpl<T: Fn(crate::Error)> IntoAdHocDropStrategy for T {}\n\n\n\n/// A quick and dirty fallible drop strategy which uses a function.\n\n#[cfg_attr(\n\n feature = \"derives\",\n\n derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Default)\n\n)]\n\n#[cfg_attr(feature = \"shrinkwraprs\", derive(Shrinkwrap))]\n\n#[cfg_attr(feature = \"shrinkwraprs\", shrinkwrap(mutable))]\n\npub struct AdHocFallibleDropStrategy<F, E>(pub F)\n\nwhere\n\n F: Fn(crate::Error) -> Result<(), E>,\n\n E: Into<anyhow::Error>;\n", "file_path": "src/drop_strategies/adhoc/mod.rs", "rank": 13, "score": 143366.45853283038 }, { "content": "/// A trait which signifies a thread safe type. Can be used in a `static`.\n\npub trait ThreadSafe: Send + Sync + 'static {}\n\n\n\nimpl<T: Send + Sync + 'static> ThreadSafe for T {}\n\n\n\n/// Marker trait signifying that the implementing type can repeatedly call its [`TryDrop::try_drop`]\n\n/// method.\n\n///\n\n/// # Safety\n\n/// The implementor must ensure that no undefined behavior will occur when calling\n\n/// [`TryDrop::try_drop`] multiple times.\n\npub unsafe trait RepeatableTryDrop: PureTryDrop {\n\n /// Safely try and drop the implementing type. You can call this function multiple times.\n\n fn safe_try_drop(&mut self) -> Result<(), Self::Error> {\n\n // SAFETY: This is safe because the implementing type has implemented `RepeatableTryDrop`,\n\n // which assures us that it is safe to call `try_drop` multiple times.\n\n unsafe { self.try_drop() }\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 14, "score": 138771.89278156622 }, { "content": "#[cfg(feature = \"thread-local\")]\n\npub fn install_thread_local_handlers(\n\n primary: impl ThreadLocalFallibleTryDropStrategy,\n\n fallback: impl ThreadLocalTryDropStrategy,\n\n) {\n\n install_thread_local_handlers_dyn(Box::new(primary), Box::new(fallback))\n\n}\n\n\n\n/// This installs the primary and fallback thread local handlers. Must be a dynamic trait\n\n/// object.\n", "file_path": "src/handlers/fns.rs", "rank": 15, "score": 134169.02981429905 }, { "content": " pub trait Sealed {}\n\n}\n\n\n\n#[cfg(any(feature = \"ds-write\", feature = \"ds-panic\"))]\n\nmod use_default {\n\n use super::*;\n\n\n\n /// Use the default drop strategy if uninitialized\n\n #[cfg_attr(\n\n feature = \"derives\",\n\n derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)\n\n )]\n\n pub enum UseDefaultOnUninit {}\n\n\n\n impl OnUninit for UseDefaultOnUninit {\n\n type ExtraData = ();\n\n }\n\n impl private::Sealed for UseDefaultOnUninit {}\n\n}\n\n\n\nuse core::sync::atomic::AtomicBool;\n\n#[cfg(any(feature = \"ds-write\", feature = \"ds-panic\"))]\n\npub use use_default::*;\n\n\n", "file_path": "src/handlers/on_uninit.rs", "rank": 16, "score": 132835.4721570253 }, { "content": "#[cfg(feature = \"thread-local\")]\n\npub trait ThreadLocalTryDropStrategy: TryDropStrategy + 'static {}\n\n\n\n#[cfg(feature = \"thread-local\")]\n\nimpl<T: TryDropStrategy + 'static> ThreadLocalTryDropStrategy for T {}\n\n\n", "file_path": "src/lib.rs", "rank": 17, "score": 132547.28066239995 }, { "content": "#[cfg(feature = \"thread-local\")]\n\npub fn install_thread_local_handlers_dyn(\n\n primary: Box<dyn ThreadLocalFallibleTryDropStrategy>,\n\n fallback: Box<dyn ThreadLocalTryDropStrategy>,\n\n) {\n\n primary::thread_local::install_dyn(primary);\n\n fallback::thread_local::install_dyn(fallback);\n\n}\n\n\n\n/// This installs the primary and fallback thread local handlers for this scope.\n", "file_path": "src/handlers/fns.rs", "rank": 18, "score": 131688.89466256072 }, { "content": "#[cfg(feature = \"thread-local\")]\n\npub fn install_thread_local_handlers_for_this_scope(\n\n primary: impl ThreadLocalFallibleTryDropStrategy,\n\n fallback: impl ThreadLocalTryDropStrategy,\n\n) -> (\n\n primary::thread_local::ScopeGuard,\n\n fallback::thread_local::ScopeGuard,\n\n) {\n\n install_thread_local_handlers_for_this_scope_dyn(Box::new(primary), Box::new(fallback))\n\n}\n\n\n\n/// This installs the primary and fallback thread local handlers for this scope. Must be a\n\n/// dynamic trait object.\n", "file_path": "src/handlers/fns.rs", "rank": 19, "score": 131688.89466256072 }, { "content": "#[cfg(feature = \"thread-local\")]\n\npub fn uninstall_for_thread() {\n\n primary::thread_local::uninstall();\n\n fallback::thread_local::uninstall();\n\n}\n", "file_path": "src/handlers/fns.rs", "rank": 20, "score": 131492.33957389637 }, { "content": " pub trait Sealed {}\n\n}\n\n#[cfg(any(feature = \"ds-write\", feature = \"ds-panic\"))]\n\nmod use_default {\n\n use super::private;\n\n use crate::handlers::common::shim::OnUninitShim;\n\n use crate::handlers::common::{Fallback, Handler, Primary};\n\n use once_cell::sync::Lazy;\n\n use std::marker::PhantomData;\n\n\n\n #[cfg_attr(\n\n feature = \"derives\",\n\n derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)\n\n )]\n\n pub struct UseDefaultOnUninitShim<H: Handler>(PhantomData<H>);\n\n\n\n #[cfg(feature = \"ds-write\")]\n\n impl OnUninitShim for UseDefaultOnUninitShim<Primary> {\n\n type ExtraData = Lazy<crate::drop_strategies::WriteDropStrategy<std::io::Stderr>>;\n\n }\n", "file_path": "src/handlers/common/shim.rs", "rank": 21, "score": 129685.58434585088 }, { "content": "#[cfg(feature = \"thread-local\")]\n\npub fn install_thread_local_handlers_for_this_scope_dyn(\n\n primary: Box<dyn ThreadLocalFallibleTryDropStrategy>,\n\n fallback: Box<dyn ThreadLocalTryDropStrategy>,\n\n) -> (\n\n primary::thread_local::ScopeGuard,\n\n fallback::thread_local::ScopeGuard,\n\n) {\n\n (\n\n primary::thread_local::scope_dyn(primary),\n\n fallback::thread_local::scope_dyn(fallback),\n\n )\n\n}\n\n\n\n/// This uninstalls the primary and fallback thread local handlers.\n", "file_path": "src/handlers/fns.rs", "rank": 22, "score": 129341.29961066453 }, { "content": " pub trait Sealed {}\n\n}\n\n\n\nuse crate::{FallibleTryDropStrategy, TryDropStrategy};\n\npub use once_cell::sync::OnceCell;\n\nuse std::error::Error as StdError;\n\nuse std::fmt;\n\nuse std::marker::PhantomData;\n\nuse std::sync::Arc;\n\npub use thread_unsafe::*;\n\n\n\n/// Ignore the occupied error value and continue.\n\n#[cfg_attr(\n\n feature = \"derives\",\n\n derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)\n\n)]\n\npub enum Ignore {}\n\n\n\nimpl Mode for Ignore {}\n\nimpl private::Sealed for Ignore {}\n", "file_path": "src/drop_strategies/once_cell/mod.rs", "rank": 23, "score": 128315.2762674069 }, { "content": "/// Signifies that this type can be converted into an [`AdHocMutDropStrategy`].\n\npub trait IntoAdHocMutDropStrategy: FnMut(crate::Error) + Sized {\n\n /// Convert this type into an [`AdHocMutDropStrategy`].\n\n fn into_drop_strategy(self) -> AdHocMutDropStrategy<Self> {\n\n AdHocMutDropStrategy::new(self)\n\n }\n\n}\n\n\n\nimpl<T: FnMut(crate::Error)> IntoAdHocMutDropStrategy for T {}\n\n\n\n/// A quick and dirty try drop strategy which uses a function.\n\n///\n\n/// This is more flexible compared to [`AdHocFallibleDropStrategy`], accepting also [`FnMut`]s\n\n/// instead of only [`Fn`]s, but the function is guarded by a [`Mutex`], which has more overhead.\n\n///\n\n/// [`AdHocFallibleDropStrategy`]: super::AdHocFallibleDropStrategy\n\n#[cfg_attr(feature = \"derives\", derive(Debug, Default))]\n\npub struct AdHocMutFallibleDropStrategy<F, E>\n\nwhere\n\n F: FnMut(crate::Error) -> Result<(), E>,\n\n E: Into<anyhow::Error>,\n", "file_path": "src/drop_strategies/adhoc/fn_mut/mod.rs", "rank": 24, "score": 126738.21775290134 }, { "content": "#[cfg(feature = \"thread-local\")]\n\npub trait ThreadLocalFallibleTryDropStrategy: DynFallibleTryDropStrategy + 'static {}\n\n\n\n#[cfg(feature = \"thread-local\")]\n\nimpl<T: DynFallibleTryDropStrategy + 'static> ThreadLocalFallibleTryDropStrategy for T {}\n\n\n", "file_path": "src/lib.rs", "rank": 25, "score": 126516.14171463862 }, { "content": "#[cfg(feature = \"global\")]\n\n#[cfg(feature = \"downcast-rs\")]\n\npub trait GlobalDynFallibleTryDropStrategy:\n\n ThreadSafe + downcast_rs::DowncastSync + DynFallibleTryDropStrategy\n\n{\n\n}\n\n\n\n#[cfg(feature = \"global\")]\n\n#[cfg(feature = \"downcast-rs\")]\n\ndowncast_rs::impl_downcast!(sync GlobalDynFallibleTryDropStrategy);\n\n\n\n#[cfg(feature = \"global\")]\n\nimpl<T: ThreadSafe + DynFallibleTryDropStrategy> GlobalDynFallibleTryDropStrategy for T {}\n\n\n\n/// A trait which signifies a try drop strategy which can be used in a thread local scenario. Must\n\n/// be dynamically dispatched and must live as long as the program does.\n", "file_path": "src/lib.rs", "rank": 26, "score": 123464.98372327539 }, { "content": "/// What to do when the global, thread local, or shim drop strategies is uninitialized.\n\npub trait OnUninit: private::Sealed {\n\n /// Any extra data that this type may neee.\n\n type ExtraData;\n\n}\n\n\n\n/// Just error on the drop strategy if uninitialized.\n\n#[cfg_attr(\n\n feature = \"derives\",\n\n derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)\n\n)]\n\npub enum ErrorOnUninit {}\n\n\n\nimpl OnUninit for ErrorOnUninit {\n\n type ExtraData = ();\n\n}\n\nimpl private::Sealed for ErrorOnUninit {}\n\n\n\n/// Panic on the drop strategy if uninitialized.\n\n#[cfg_attr(\n\n feature = \"derives\",\n", "file_path": "src/handlers/on_uninit.rs", "rank": 27, "score": 121248.15926878605 }, { "content": "/// Signifies that this type can be converted into an [`AdHocMutFallibleDropStrategy`].\n\npub trait IntoAdHocMutFallibleDropStrategy<E: Into<anyhow::Error>>:\n\n FnMut(crate::Error) -> Result<(), E> + Sized\n\n{\n\n /// Convert this type into an [`AdHocMutFallibleDropStrategy`].\n\n fn into_drop_strategy(self) -> AdHocMutFallibleDropStrategy<Self, E> {\n\n AdHocMutFallibleDropStrategy::new(self)\n\n }\n\n}\n\n\n\nimpl<T, E> IntoAdHocMutFallibleDropStrategy<E> for T\n\nwhere\n\n T: FnMut(crate::Error) -> Result<(), E>,\n\n E: Into<anyhow::Error>,\n\n{}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::sync::Arc;\n\n use std::sync::atomic::AtomicBool;\n\n use crate::drop_strategies::PanicDropStrategy;\n", "file_path": "src/drop_strategies/adhoc/fn_mut/mod.rs", "rank": 28, "score": 119448.45477893353 }, { "content": "pub trait TryDropTypes: private::Sealed {}\n\n\n\npub struct NotGiven;\n\n\n\nimpl TryDropTypes for NotGiven {}\n\nimpl private::Sealed for NotGiven {}\n\n\n", "file_path": "src/test_utils.rs", "rank": 29, "score": 118445.02954153181 }, { "content": "/// How to handle cases where the error value is already occupied.\n\npub trait Mode: private::Sealed {}\n\n\n\n/// An error which is returned if the cell is already occupied.\n\n#[derive(Debug)]\n\npub struct AlreadyOccupiedError(pub anyhow::Error);\n\n\n\nimpl StdError for AlreadyOccupiedError {\n\n fn source(&self) -> Option<&(dyn StdError + 'static)> {\n\n Some(self.0.as_ref())\n\n }\n\n}\n\n\n\nimpl fmt::Display for AlreadyOccupiedError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_str(\"an already existing error was occupied in this cell\")\n\n }\n\n}\n\n\n\n/// A try drop strategy which sets an error value once.\n\n//\n", "file_path": "src/drop_strategies/once_cell/mod.rs", "rank": 30, "score": 117370.65035126833 }, { "content": "pub trait OnUninitShim: private::Sealed {\n\n type ExtraData;\n\n}\n\n\n\nimpl<T: OnUninit> OnUninitShim for T {\n\n type ExtraData = T::ExtraData;\n\n}\n\nimpl<T: OnUninit> private::Sealed for T {}\n", "file_path": "src/handlers/common/shim.rs", "rank": 31, "score": 115891.21578823015 }, { "content": "#[cfg(feature = \"global\")]\n\n#[cfg(not(feature = \"downcast-rs\"))]\n\npub trait GlobalTryDropStrategy: ThreadSafe + TryDropStrategy {}\n\n\n\n/// A trait which signifies a try drop strategy which can be used as the primary or fallback\n\n/// handler. Can be downcast.\n", "file_path": "src/lib.rs", "rank": 32, "score": 110886.04045888915 }, { "content": "type BoxDynGlobalTryDropStrategy = Box<dyn GlobalTryDropStrategy>;\n\n\n\nglobal_methods! {\n\n Global = Global;\n\n GenericStrategy = GlobalTryDropStrategy;\n\n DynStrategy = BoxDynGlobalTryDropStrategy;\n\n feature = \"ds-panic\";\n\n\n\n /// Install a new global fallback handler. Must be a dynamic trait object.\n\n install_dyn;\n\n\n\n /// Install a new global fallback handler.\n\n install;\n\n\n\n /// Try and get a reference to the global fallback handler.\n\n ///\n\n /// # Errors\n\n /// If the global fallback handler is not initialized yet, an error is returned.\n\n try_read;\n\n\n", "file_path": "src/handlers/fallback/global.rs", "rank": 33, "score": 104285.01566816424 }, { "content": "#[cfg(feature = \"global\")]\n\n#[cfg(not(feature = \"downcast-rs\"))]\n\npub trait GlobalDynFallibleTryDropStrategy: ThreadSafe + DynFallibleTryDropStrategy {}\n\n\n\n/// A trait which signifies a try drop strategy which can fail, can be dynamically dispatched, and\n\n/// can be used as the global try drop strategy.\n", "file_path": "src/lib.rs", "rank": 34, "score": 103100.53092807543 }, { "content": " pub trait Sealed {}\n\n}\n\n\n\n/// A drop strategy which always fails.\n\npub struct FallibleDropStrategy;\n\n\n\nimpl FallibleTryDropStrategy for FallibleDropStrategy {\n\n type Error = anyhow::Error;\n\n\n\n fn try_handle_error(&self, error: Error) -> Result<(), Self::Error> {\n\n Err(error)\n\n }\n\n}\n\n\n", "file_path": "src/test_utils.rs", "rank": 35, "score": 102915.65564564173 }, { "content": "/// Signifies that this type can be converted into an [`AdHocFallibleDropStrategy`].\n\npub trait IntoAdHocFallibleDropStrategy<E: Into<anyhow::Error>>:\n\n Fn(crate::Error) -> Result<(), E> + Sized\n\n{\n\n /// Convert this type into an [`AdHocFallibleDropStrategy`].\n\n fn into_drop_strategy(self) -> AdHocFallibleDropStrategy<Self, E> {\n\n AdHocFallibleDropStrategy(self)\n\n }\n\n}\n\n\n\nimpl<T, E> IntoAdHocFallibleDropStrategy<E> for T\n\nwhere\n\n T: Fn(crate::Error) -> Result<(), E>,\n\n E: Into<anyhow::Error>,\n\n{}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::cell::Cell;\n\n use std::rc::Rc;\n\n use crate::drop_strategies::PanicDropStrategy;\n", "file_path": "src/drop_strategies/adhoc/mod.rs", "rank": 36, "score": 102138.55219882075 }, { "content": "/// A trait for types which can be dropped, but which may fail to do so.\n\n///\n\n/// This is a pure version of try drop, meaning that the drop strategies have to be explicitly\n\n/// specified, which means it does not depend on a global try drop strategy.\n\n///\n\n/// # Gotchas\n\n/// Implementing this trait is not enough to make it droppable. In order for the try drop strategy\n\n/// to be run, you need to put your type in a [`DropAdapter`].\n\n///\n\n/// An easier way to make your type droppable is to call [`PureTryDrop::adapt`] on it.\n\npub trait PureTryDrop {\n\n /// The type of the error that may occur during drop.\n\n type Error: Into<anyhow::Error>;\n\n\n\n /// The type which will be used if the drop strategy fails.\n\n type FallbackTryDropStrategy: TryDropStrategy;\n\n\n\n /// The type which will be used if dropping fails.\n\n type TryDropStrategy: FallibleTryDropStrategy;\n\n\n\n /// Get a reference to the fallback try drop strategy.\n\n fn fallback_try_drop_strategy(&self) -> &Self::FallbackTryDropStrategy;\n\n\n\n /// Get a reference to the try drop strategy.\n\n fn try_drop_strategy(&self) -> &Self::TryDropStrategy;\n\n\n\n /// Adapts this type to take advantage of the specified try drop strategies.\n\n ///\n\n /// # Notes\n\n /// If [`Self`] implements [`Copy`], and you call this function, at first it seems like there\n", "file_path": "src/lib.rs", "rank": 37, "score": 100778.13354319886 }, { "content": "/// A trait which signifies a try drop strategy. This can never fail. If it can, use\n\n/// [`FallibleTryDropStrategy`] instead.\n\npub trait TryDropStrategy {\n\n /// Handle the drop error.\n\n fn handle_error(&self, error: anyhow::Error);\n\n}\n\n\n\nimpl<TDS: TryDropStrategy> FallibleTryDropStrategy for TDS {\n\n type Error = Infallible;\n\n\n\n fn try_handle_error(&self, error: anyhow::Error) -> Result<(), Self::Error> {\n\n self.handle_error(error);\n\n Ok(())\n\n }\n\n}\n\n\n\n/// A trait which signifies a try drop strategy which can be used as the primary or fallback\n\n/// handler.\n", "file_path": "src/lib.rs", "rank": 38, "score": 100774.89996555207 }, { "content": "#[cfg(any(feature = \"global\", feature = \"thread-local\"))]\n\npub trait ImpureTryDrop {\n\n /// The type of the error that may occur during drop.\n\n type Error: Into<anyhow::Error>;\n\n\n\n /// Execute the fallible destructor for this type. This function is unsafe because if this is\n\n /// called outside of a [`Drop::drop`] context, once the scope of the object implementing trait\n\n /// ends, this function will be called twice, potentially resulting in a double-free.\n\n ///\n\n /// Use [`DropAdapter`] to ensure that the destructor is only called once.\n\n ///\n\n /// # Safety\n\n /// The caller must ensure that this function is called within a [`Drop::drop`] context.\n\n ///\n\n /// If the implementing type implements [`RepeatableTryDrop`], however, then this function is\n\n /// safe to call multiple times. If the `unsafe` seems ugly to you, you can use\n\n /// [`RepeatableTryDrop::safe_try_drop`].\n\n unsafe fn try_drop(&mut self) -> Result<(), Self::Error>;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 39, "score": 100771.92959180917 }, { "content": " pub trait Sealed {}\n\n}\n\n\n\nuse crate::TryDropStrategy;\n\nuse core::marker::PhantomData;\n\n\n", "file_path": "src/drop_strategies/unreachable.rs", "rank": 40, "score": 100768.20003028843 }, { "content": " pub trait Sealed {}\n\n}\n\n\n\nuse crate::{FallibleTryDropStrategy, TryDropStrategy};\n\n\n\nuse std::marker::PhantomData;\n\n\n\nuse crate::adapters::ArcError;\n\npub use tokio::runtime::Handle;\n\nuse tokio::sync::broadcast;\n\nuse tokio::sync::broadcast::error::SendError;\n\nuse tokio::sync::broadcast::error::{RecvError, TryRecvError};\n\npub use tokio::sync::broadcast::Receiver as AsyncReceiver;\n\nuse tokio::sync::broadcast::{Receiver, Sender};\n\n\n\n/// An async receiver, which is made sync via blocking on a handle to the tokio runtime.\n\n#[cfg_attr(feature = \"derives\", derive(Debug))]\n\npub struct BlockingReceiver<T> {\n\n receiver: Receiver<T>,\n\n handle: Handle,\n", "file_path": "src/drop_strategies/broadcast.rs", "rank": 41, "score": 100768.20003028843 }, { "content": "/// Turn this type into a [`ThreadUnsafeAdHocMutDropStrategy`].\n\npub trait IntoThreadUnsafeAdHocMutDropStrategy: FnMut(crate::Error) + Sized {\n\n /// Turn this type into a [`ThreadUnsafeAdHocMutDropStrategy`].\n\n fn into_drop_strategy(self) -> ThreadUnsafeAdHocMutDropStrategy<Self> {\n\n ThreadUnsafeAdHocMutDropStrategy::new(self)\n\n }\n\n}\n\n\n\nimpl<F: FnMut(crate::Error)> IntoThreadUnsafeAdHocMutDropStrategy for F {}\n\n\n\n/// A fallible drop strategy which uses a function to handle errors. This is less flexible than its\n\n/// thread safe counterpart however there is less overhead.\n\n#[cfg_attr(\n\n feature = \"derives\",\n\n derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq)\n\n)]\n\npub struct ThreadUnsafeAdHocMutFallibleDropStrategy<F, E>(pub RefCell<F>)\n\nwhere\n\n F: FnMut(crate::Error) -> Result<(), E>,\n\n E: Into<crate::Error>;\n\n\n", "file_path": "src/drop_strategies/adhoc/fn_mut/thread_unsafe.rs", "rank": 42, "score": 100459.93506156046 }, { "content": "type Abstracter<S> = TheGreatAbstracter<Fallback, S>;\n\n\n\nimpl<S: Scope> CommonHandler<DefaultOnUninit, S, Fallback> {\n\n pub const DEFAULT: Self = Self {\n\n extra_data: (),\n\n _scope: PhantomData,\n\n };\n\n}\n\n\n\nimpl<S: Scope> Default for CommonHandler<DefaultOnUninit, S, Fallback> {\n\n fn default() -> Self {\n\n Self::DEFAULT\n\n }\n\n}\n", "file_path": "src/handlers/fallback/mod.rs", "rank": 43, "score": 99864.7388571664 }, { "content": "type Abstracter<S> = TheGreatAbstracter<Primary, S>;\n\n\n\nimpl<S: Scope> CommonHandler<ErrorOnUninit, S, Primary> {\n\n pub const ON_UNINIT_ERROR: Self = Self {\n\n extra_data: (),\n\n _scope: PhantomData,\n\n };\n\n\n\n pub fn error_on_uninit() -> Self {\n\n Self::ON_UNINIT_ERROR\n\n }\n\n}\n\n\n\nimpl<S: Scope> CommonHandler<DefaultOnUninit, S, Primary> {\n\n pub const DEFAULT: Self = Self {\n\n extra_data: (),\n\n _scope: PhantomData,\n\n };\n\n}\n", "file_path": "src/handlers/primary/mod.rs", "rank": 44, "score": 99864.7388571664 }, { "content": "#[cfg(feature = \"global\")]\n\n#[cfg(feature = \"downcast-rs\")]\n\npub trait GlobalTryDropStrategy: ThreadSafe + downcast_rs::DowncastSync + TryDropStrategy {}\n\n\n\n#[cfg(feature = \"global\")]\n\n#[cfg(feature = \"downcast-rs\")]\n\ndowncast_rs::impl_downcast!(sync GlobalTryDropStrategy);\n\n\n\n#[cfg(feature = \"global\")]\n\nimpl<T: ThreadSafe + TryDropStrategy> GlobalTryDropStrategy for T {}\n\n\n\n/// A trait which signifies an infallible try drop strategy which can be used in a thread local.\n", "file_path": "src/lib.rs", "rank": 45, "score": 98968.03517686337 }, { "content": "/// A trait which signifies a try drop strategy which can fail.\n\npub trait FallibleTryDropStrategy {\n\n /// The type of the error that may occur when handling a drop error.\n\n type Error: Into<anyhow::Error>;\n\n\n\n /// Try and handle a drop error.\n\n fn try_handle_error(&self, error: anyhow::Error) -> Result<(), Self::Error>;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 46, "score": 98763.81423807761 }, { "content": "/// A trait which signifies a try drop strategy which can fail. Can be dynamically dispatched.\n\npub trait DynFallibleTryDropStrategy {\n\n /// Try to handle the drop error.\n\n fn dyn_try_handle_error(&self, error: anyhow::Error) -> anyhow::Result<()>;\n\n}\n\n\n\nimpl<T: FallibleTryDropStrategy> DynFallibleTryDropStrategy for T {\n\n fn dyn_try_handle_error(&self, error: anyhow::Error) -> anyhow::Result<()> {\n\n self.try_handle_error(error).map_err(Into::into)\n\n }\n\n}\n\n\n\n/// A trait which signifies a try drop strategy which can fail, can be dynamically dispatched, and\n\n/// can be used as the global try drop strategy.\n", "file_path": "src/lib.rs", "rank": 47, "score": 96881.57091644898 }, { "content": "fn main() {\n\n println!(\"install global handlers from main thread\");\n\n let global_fail = Arc::new(AtomicBool::new(false));\n\n let gf = Arc::clone(&global_fail);\n\n try_drop::install_global_handlers(\n\n AdHocFallibleDropStrategy(move |error| {\n\n println!(\"from primary global handler: {error}\");\n\n\n\n if gf.load(Ordering::Acquire) {\n\n println!(\"forcing failure\");\n\n anyhow::bail!(\"forced failure of primary global handler\")\n\n } else {\n\n Ok(())\n\n }\n\n }),\n\n AdHocDropStrategy(|error| println!(\"from fallback global handler: {error}\")),\n\n );\n\n\n\n println!(\"drop, don't fail for global handler\");\n\n let thing = ErrorsOnDrop::<Fallible, _>::not_given().adapt();\n", "file_path": "examples/global.rs", "rank": 48, "score": 93470.62997545251 }, { "content": "pub trait Mode: private::Sealed {}\n\n\n\npub enum Fallible {}\n\n\n\nimpl Mode for Fallible {}\n\nimpl private::Sealed for Fallible {}\n\n\n\npub enum Infallible {}\n\n\n\nimpl Mode for Infallible {}\n\nimpl private::Sealed for Infallible {}\n\n\n\npub enum Random {}\n\n\n\nimpl Mode for Random {}\n\nimpl private::Sealed for Random {}\n\n\n", "file_path": "src/test_utils.rs", "rank": 49, "score": 93264.52973681864 }, { "content": "/// Turn this type into a [`ThreadUnsafeAdHocMutFallibleDropStrategy`].\n\npub trait IntoThreadUnsafeAdHocMutFallibleDropStrategy<E: Into<anyhow::Error>>:\n\n FnMut(crate::Error) -> Result<(), E> + Sized\n\n{\n\n /// Turn this type into a [`ThreadUnsafeAdHocMutFallibleDropStrategy`].\n\n fn into_drop_strategy(self) -> ThreadUnsafeAdHocMutFallibleDropStrategy<Self, E> {\n\n ThreadUnsafeAdHocMutFallibleDropStrategy::new(self)\n\n }\n\n}\n\n\n\nimpl<T, E> IntoThreadUnsafeAdHocMutFallibleDropStrategy<E> for T\n\nwhere\n\n T: FnMut(crate::Error) -> Result<(), E>,\n\n E: Into<crate::Error>,\n\n{}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::cell::Cell;\n\n use std::rc::Rc;\n\n use crate::drop_strategies::PanicDropStrategy;\n", "file_path": "src/drop_strategies/adhoc/fn_mut/thread_unsafe.rs", "rank": 50, "score": 92480.79789022368 }, { "content": "/// How to handle errors when sending a message to all receivers.\n\npub trait Mode: private::Sealed {}\n\n\n\n/// Continue on sending errors to nobody if no receivers are available.\n\n#[cfg_attr(\n\n feature = \"derives\",\n\n derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)\n\n)]\n\npub enum OkIfAlone {}\n\n\n\nimpl Mode for OkIfAlone {}\n\n\n\nimpl private::Sealed for OkIfAlone {}\n\n\n\n/// Return an error if there are no receivers to send errors to.\n\n#[cfg_attr(\n\n feature = \"derives\",\n\n derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)\n\n)]\n\npub enum NeedsReceivers {}\n\n\n", "file_path": "src/drop_strategies/broadcast.rs", "rank": 51, "score": 91382.42193425531 }, { "content": "/// How safe will the [`UnreachableDropStrategy`] be.\n\npub trait Safety: private::Sealed {}\n\n\n\n/// Just panic when an error occurs.\n\n#[cfg_attr(\n\n feature = \"derives\",\n\n derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)\n\n)]\n\npub enum Safe {}\n\nimpl Safety for Safe {}\n\nimpl private::Sealed for Safe {}\n\n\n\n/// Tell to the compiler that this branch never happens, a.k.a. call\n\n/// [`core::hint::unreachable_unchecked`].\n\n///\n\n/// Note that when `debug_assertions` or the debug profile is used, this will just panic instead.\n\n#[cfg(feature = \"ds-unreachable-unsafe\")]\n\n#[cfg_attr(\n\n feature = \"derives\",\n\n derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)\n\n)]\n", "file_path": "src/drop_strategies/unreachable.rs", "rank": 52, "score": 91382.42193425531 }, { "content": "pub fn fallible() -> DropAdapter<ErrorsOnDrop<Fallible, NotGiven>> {\n\n ErrorsOnDrop::<Fallible, _>::not_given().adapt()\n\n}\n\n\n", "file_path": "src/test_utils.rs", "rank": 53, "score": 83603.272376764 }, { "content": "type ThreadLocal = GenericThreadLocal<Primary>;\n\n\n\n/// A scope guard for the thread local primary handler. It is used to set the thread local primary\n\n/// handler for the duration of the scope.\n\npub type ScopeGuard = GenericScopeGuard<Primary>;\n\n\n\n/// Handy type alias to `Box<dyn ThreadLocalFallibleTryDropStrategy>`.\n\npub type BoxDynFallibleTryDropStrategy = Box<dyn ThreadLocalFallibleTryDropStrategy>;\n\n\n\nthread_local_methods! {\n\n ThreadLocal = ThreadLocal;\n\n ScopeGuard = ScopeGuard;\n\n GenericStrategy = ThreadLocalFallibleTryDropStrategy;\n\n DynStrategy = BoxDynFallibleTryDropStrategy;\n\n feature = \"ds-write\";\n\n\n\n /// Install a new thread local primary handler.\n\n install;\n\n\n\n /// Install a new thread local primary handler. Must be a dynamic trait object.\n", "file_path": "src/handlers/primary/thread_local.rs", "rank": 54, "score": 74357.59882907532 }, { "content": "type ThreadLocal = GenericThreadLocal<Fallback>;\n\n\n\n/// A scope guard for the thread local fallback handler. This sets the thread local fallback handler\n\n/// to the one specified for the duration of the scope.\n\npub type ScopeGuard = GenericScopeGuard<Fallback>;\n\n\n\n/// A handy type alias for `Box<dyn ThreadLocalTryDropStrategy>`.\n\npub type BoxDynTryDropStrategy = Box<dyn ThreadLocalTryDropStrategy>;\n\n\n\nthread_local_methods! {\n\n ThreadLocal = ThreadLocal;\n\n ScopeGuard = ScopeGuard;\n\n GenericStrategy = ThreadLocalTryDropStrategy;\n\n DynStrategy = BoxDynTryDropStrategy;\n\n feature = \"ds-panic\";\n\n\n\n /// Install a new fallback thread local handler.\n\n install;\n\n\n\n /// Install a new fallback thread local handler. Must be a dynamic trait object.\n", "file_path": "src/handlers/fallback/thread_local.rs", "rank": 55, "score": 74357.59882907532 }, { "content": "#[cfg(all(feature = \"global\", not(feature = \"thread-local\")))]\n\npub use primary::global::GlobalPrimaryHandler as PrimaryHandler;\n\n\n\n#[cfg(all(feature = \"global\", not(feature = \"thread-local\")))]\n\npub use primary::global::DEFAULT_GLOBAL_PRIMARY_HANDLER as DEFAULT_PRIMARY_HANDLER;\n\n\n\n#[cfg(all(feature = \"thread-local\", not(feature = \"global\")))]\n\npub use primary::thread_local::ThreadLocalPrimaryHandler as PrimaryHandler;\n\n\n\n#[cfg(all(feature = \"thread-local\", not(feature = \"global\")))]\n\npub use primary::thread_local::DEFAULT_THREAD_LOCAL_PRIMARY_HANDLER as DEFAULT_PRIMARY_HANDLER;\n\n\n\n#[cfg(all(feature = \"thread-local\", feature = \"global\"))]\n\npub use primary::shim::ShimPrimaryHandler as PrimaryHandler;\n\n\n\n#[cfg(all(feature = \"thread-local\", feature = \"global\"))]\n\npub use primary::shim::DEFAULT_SHIM_PRIMARY_HANDLER as DEFAULT_PRIMARY_HANDLER;\n\n\n\n#[cfg(all(feature = \"global\", not(feature = \"thread-local\")))]\n\npub use fallback::global::GlobalFallbackHandler as FallbackHandler;\n", "file_path": "src/handlers/mod.rs", "rank": 56, "score": 66127.5259188066 }, { "content": "\n\n#[cfg(all(feature = \"global\", not(feature = \"thread-local\")))]\n\npub use fallback::global::DEFAULT_GLOBAL_FALLBACK_HANDLER as DEFAULT_FALLBACK_HANDLER;\n\n\n\n#[cfg(all(feature = \"thread-local\", not(feature = \"global\")))]\n\npub use fallback::thread_local::ThreadLocalFallbackHandler as FallbackHandler;\n\n\n\n#[cfg(all(feature = \"thread-local\", not(feature = \"global\")))]\n\npub use fallback::thread_local::DEFAULT_THREAD_LOCAL_FALLBACK_HANDLER as DEFAULT_FALLBACK_HANDLER;\n\n\n\n#[cfg(all(feature = \"thread-local\", feature = \"global\"))]\n\npub use fallback::shim::ShimFallbackHandler as FallbackHandler;\n\n\n\n#[cfg(all(feature = \"thread-local\", feature = \"global\"))]\n\npub use fallback::shim::DEFAULT_SHIM_FALLBACK_HANDLER as DEFAULT_FALLBACK_HANDLER;\n", "file_path": "src/handlers/mod.rs", "rank": 57, "score": 66127.30104742793 }, { "content": "//! Manage the primary and fallback handlers and their scopes.\n\n\n\n#[macro_use]\n\nmod common;\n\n\n\npub mod fallback;\n\npub(crate) mod fns;\n\npub mod primary;\n\n\n\n#[cfg(any(feature = \"global\", feature = \"thread-local\"))]\n\npub mod on_uninit;\n\n\n\n#[cfg(any(feature = \"global\", feature = \"thread-local\"))]\n\nmod uninit_error;\n\n\n\n#[cfg(any(feature = \"global\", feature = \"thread-local\"))]\n\npub use uninit_error::UninitializedError;\n\n\n\npub use fns::*;\n\n\n", "file_path": "src/handlers/mod.rs", "rank": 58, "score": 66125.85043340264 }, { "content": "\n\n/// A fallback handler which uses the global scope.\n\npub type GlobalFallbackHandler<OU = DefaultOnUninit> = CommonHandler<OU, GlobalScope, Fallback>;\n\n\n\n/// The default global fallback handler.\n\npub static DEFAULT_GLOBAL_FALLBACK_HANDLER: GlobalFallbackHandler = GlobalFallbackHandler::DEFAULT;\n\n\n\nstatic FALLBACK_HANDLER: RwLock<Option<Box<dyn GlobalTryDropStrategy>>> =\n\n parking_lot::const_rwlock(None);\n\n\n\nimpl_try_drop_strategy_for!(GlobalFallbackHandler where Scope: GlobalScope);\n\n\n\nimpl GlobalDefinition for Fallback {\n\n const UNINITIALIZED_ERROR: &'static str = \"the global fallback handler is not initialized yet\";\n\n type Global = Box<dyn GlobalTryDropStrategy>;\n\n\n\n fn global() -> &'static RwLock<Option<Self::Global>> {\n\n &FALLBACK_HANDLER\n\n }\n\n}\n", "file_path": "src/handlers/fallback/global.rs", "rank": 59, "score": 64462.23793005988 }, { "content": "/// The primary global handler which uses the global scope.\n\npub type GlobalPrimaryHandler<OU = DefaultOnUninit> = CommonHandler<OU, GlobalScope, Primary>;\n\n\n\n/// The default global primary handler.\n\npub static DEFAULT_GLOBAL_PRIMARY_HANDLER: GlobalPrimaryHandler = GlobalPrimaryHandler::DEFAULT;\n\n\n\nimpl_fallible_try_drop_strategy_for!(GlobalPrimaryHandler\n\nwhere\n\n Scope: GlobalScope,\n\n Definition: GlobalDefinition\n\n);\n\n\n\nstatic PRIMARY_HANDLER: RwLock<Option<Box<dyn GlobalDynFallibleTryDropStrategy>>> =\n\n parking_lot::const_rwlock(None);\n\n\n\nimpl GlobalDefinition for Primary {\n\n const UNINITIALIZED_ERROR: &'static str = \"the global primary handler is not initialized yet\";\n\n type Global = Box<dyn GlobalDynFallibleTryDropStrategy>;\n\n\n\n fn global() -> &'static RwLock<Option<Self::Global>> {\n", "file_path": "src/handlers/primary/global.rs", "rank": 60, "score": 64461.62311653745 }, { "content": "//! Manage the global fallback handler.\n\n\n\nuse super::DefaultOnUninit;\n\nuse crate::handlers::common::global::{Global as GenericGlobal, GlobalDefinition};\n\nuse crate::handlers::common::handler::CommonHandler;\n\nuse crate::handlers::common::Fallback;\n\nuse crate::handlers::common::Global as GlobalScope;\n\nuse crate::handlers::fallback::Abstracter;\n\nuse crate::handlers::on_uninit::{FlagOnUninit, PanicOnUninit};\n\nuse crate::handlers::uninit_error::UninitializedError;\n\nuse crate::{GlobalTryDropStrategy, TryDropStrategy};\n\nuse anyhow::Error;\n\nuse parking_lot::{MappedRwLockReadGuard, MappedRwLockWriteGuard, RwLock};\n\nuse std::boxed::Box;\n\n\n\n#[cfg(feature = \"ds-panic\")]\n\nuse crate::handlers::common::global::DefaultGlobalDefinition;\n\n\n\n#[cfg(feature = \"ds-panic\")]\n\nuse crate::handlers::on_uninit::UseDefaultOnUninit;\n", "file_path": "src/handlers/fallback/global.rs", "rank": 65, "score": 64453.176115952025 }, { "content": "//! Manage the primary global handler.\n\n\n\nuse crate::handlers::common::global::{Global as GenericGlobal, GlobalDefinition};\n\nuse crate::handlers::common::handler::CommonHandler;\n\nuse crate::handlers::common::{Global as GlobalScope, Primary};\n\nuse crate::handlers::on_uninit::{ErrorOnUninit, FlagOnUninit, PanicOnUninit};\n\nuse crate::handlers::primary::{Abstracter, DefaultOnUninit};\n\nuse crate::handlers::uninit_error::UninitializedError;\n\nuse crate::{FallibleTryDropStrategy, GlobalDynFallibleTryDropStrategy};\n\n\n\n#[cfg(feature = \"ds-write\")]\n\nuse crate::handlers::common::global::DefaultGlobalDefinition;\n\n\n\nuse parking_lot::{MappedRwLockReadGuard, MappedRwLockWriteGuard, RwLock};\n\nuse std::boxed::Box;\n\nuse std::convert;\n\n\n\n#[cfg(feature = \"ds-write\")]\n\nuse crate::handlers::on_uninit::UseDefaultOnUninit;\n\n\n", "file_path": "src/handlers/primary/global.rs", "rank": 67, "score": 64453.06642862539 }, { "content": " &PRIMARY_HANDLER\n\n }\n\n}\n\n\n\n#[cfg(feature = \"ds-write\")]\n\nimpl DefaultGlobalDefinition for Primary {\n\n fn default() -> Self::Global {\n\n let mut strategy = crate::drop_strategies::WriteDropStrategy::stderr();\n\n strategy.prelude(\"error: \");\n\n Box::new(strategy)\n\n }\n\n}\n\n\n\nimpl<T: GlobalDynFallibleTryDropStrategy + 'static> From<T>\n\n for Box<dyn GlobalDynFallibleTryDropStrategy>\n\n{\n\n fn from(handler: T) -> Self {\n\n Box::new(handler)\n\n }\n\n}\n\n\n", "file_path": "src/handlers/primary/global.rs", "rank": 68, "score": 64451.0626877262 }, { "content": "\n\n /// Get a reference to the global fallback handler.\n\n ///\n\n /// If the global fallback handler is not initialized yet, it is initialized with the default\n\n /// one.\n\n read_or_default;\n\n\n\n /// Get a mutable reference to the global fallback handler.\n\n ///\n\n /// If the global fallback handler is not initialized yet, it is initialized with the default\n\n /// one.\n\n write_or_default;\n\n}\n", "file_path": "src/handlers/fallback/global.rs", "rank": 69, "score": 64450.71628757645 }, { "content": "\n\n /// Uninstall the global primary handler.\n\n uninstall;\n\n\n\n /// Get a reference to the global primary handler.\n\n ///\n\n /// If the global primary handler is not initialized yet, it is initialized with the default\n\n /// value.\n\n read_or_default;\n\n\n\n /// Get a mutable reference to the global primary handler.\n\n ///\n\n /// If the global primary handler is not initialized yet, it is initialized with the default\n\n /// value.\n\n write_or_default;\n\n}\n", "file_path": "src/handlers/primary/global.rs", "rank": 70, "score": 64450.653938442185 }, { "content": " /// If the global primary handler is not initialized yet, an error is returned.\n\n try_read;\n\n\n\n /// Get a reference to the global primary handler.\n\n ///\n\n /// # Panics\n\n /// If the global primary handler is not initialized yet, a panic is raised.\n\n read;\n\n\n\n /// Try and get a mutable reference to the global primary handler.\n\n ///\n\n /// # Errors\n\n /// If the global primary handler is not initialized yet, an error is returned.\n\n try_write;\n\n\n\n /// Get a mutable reference to the global primary handler.\n\n ///\n\n /// # Panics\n\n /// If the global primary handler is not initialized yet, a panic is raised.\n\n write;\n", "file_path": "src/handlers/primary/global.rs", "rank": 72, "score": 64447.77208912398 }, { "content": " /// Get a reference to the global fallback handler.\n\n ///\n\n /// # Panics\n\n /// If the global fallback handler is not initialized yet, a panic is raised.\n\n read;\n\n\n\n /// Try and get a mutable reference to the global fallback handler.\n\n ///\n\n /// # Errors\n\n /// If the global fallback handler is not initialized yet, an error is returned.\n\n try_write;\n\n\n\n /// Get a mutable reference to the global fallback handler.\n\n ///\n\n /// # Panics\n\n /// If the global fallback handler is not initialized yet, a panic is raised.\n\n write;\n\n\n\n /// Uninstall the current global fallback handler.\n\n uninstall;\n", "file_path": "src/handlers/fallback/global.rs", "rank": 74, "score": 64446.928975822164 }, { "content": "\n\n#[cfg(feature = \"ds-panic\")]\n\nimpl DefaultGlobalDefinition for Fallback {\n\n fn default() -> Self::Global {\n\n Box::new(crate::drop_strategies::PanicDropStrategy::DEFAULT)\n\n }\n\n}\n\n\n\nimpl<T: GlobalTryDropStrategy> From<T> for Box<dyn GlobalTryDropStrategy> {\n\n fn from(t: T) -> Self {\n\n Box::new(t)\n\n }\n\n}\n\n\n", "file_path": "src/handlers/fallback/global.rs", "rank": 75, "score": 64446.625337247104 }, { "content": "//! Manage the primary handler.\n\n\n\nuse crate::handlers::common::handler::CommonHandler;\n\nuse crate::handlers::common::proxy::TheGreatAbstracter;\n\nuse crate::handlers::common::{Primary, Scope};\n\nuse crate::handlers::on_uninit::ErrorOnUninit;\n\nuse std::marker::PhantomData;\n\n\n\n#[macro_use]\n\nmod macros;\n\n\n\n#[cfg(feature = \"global\")]\n\npub mod global;\n\n\n\n#[cfg(feature = \"thread-local\")]\n\npub mod thread_local;\n\n\n\n#[cfg(all(feature = \"global\", feature = \"thread-local\"))]\n\npub mod shim;\n\n\n", "file_path": "src/handlers/primary/mod.rs", "rank": 77, "score": 63752.92036904709 }, { "content": "//! Manage the fallback handler.\n\n\n\n#[macro_use]\n\nmod macros;\n\n\n\n#[cfg(feature = \"global\")]\n\npub mod global;\n\n\n\n#[cfg(feature = \"thread-local\")]\n\npub mod thread_local;\n\n\n\n#[cfg(all(feature = \"global\", feature = \"thread-local\"))]\n\npub mod shim;\n\n\n\nmod private {\n", "file_path": "src/handlers/fallback/mod.rs", "rank": 78, "score": 63751.611736496016 }, { "content": "/// The default thing to do when the primary handler is uninitialized, that is\n\n/// to panic.\n\n#[cfg(not(feature = \"ds-write\"))]\n\npub type DefaultOnUninit = crate::handlers::on_uninit::PanicOnUninit;\n\n\n\n/// The default thing to do when the primary handler is uninitialized, that is\n\n/// to use the default strategy. Note that this mutates the primary handler.\n\n#[cfg(feature = \"ds-write\")]\n\npub type DefaultOnUninit = crate::handlers::on_uninit::UseDefaultOnUninit;\n\n\n", "file_path": "src/handlers/primary/mod.rs", "rank": 79, "score": 63748.109008810825 }, { "content": "///\n\n/// # Examples\n\n/// ```rust\n\n/// {\n\n/// let _guard = ScopeGuard::new(PanicDropStrategy::DEFAULT));\n\n/// {\n\n/// // this isn't allowed\n\n/// let _guard = ScopeGuard::new(PanicDropStrategy::DEFAULT));\n\n/// }\n\n/// }\n\n/// ```\n\n#[cfg_attr(\n\n feature = \"derives\",\n\n derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)\n\n)]\n\n#[derive(Debug)]\n\npub struct NestedScopeError(pub(crate) ());\n\n\n\nimpl Error for NestedScopeError {}\n\n\n\nimpl fmt::Display for NestedScopeError {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n f.write_str(\"you cannot nest scope guards\")\n\n }\n\n}\n\n\n", "file_path": "src/handlers/common/mod.rs", "rank": 80, "score": 63744.67443147479 }, { "content": "mod private {\n", "file_path": "src/handlers/common/mod.rs", "rank": 81, "score": 63737.629762681994 }, { "content": "mod thread_unsafe;\n\n\n\nuse crate::{FallibleTryDropStrategy, TryDropStrategy};\n\nuse anyhow::Error;\n\nuse parking_lot::Mutex;\n\nuse std::marker::PhantomData;\n\npub use thread_unsafe::*;\n\n\n\n/// A quick and dirty drop strategy which uses a function.\n\n///\n\n/// This is more flexible compared to [`AdHocDropStrategy`], accepting also [`FnMut`]s instead of\n\n/// only [`Fn`]s, but the function is guarded by a [`Mutex`], which has more overhead.\n\n///\n\n/// [`AdHocDropStrategy`]: super::AdHocDropStrategy\n\n#[cfg_attr(feature = \"derives\", derive(Debug, Default))]\n\npub struct AdHocMutDropStrategy<F: FnMut(crate::Error)>(pub Mutex<F>);\n\n\n\nimpl<F: FnMut(crate::Error)> AdHocMutDropStrategy<F> {\n\n /// Create a new ad-hoc try drop strategy.\n\n pub fn new(f: F) -> Self {\n", "file_path": "src/drop_strategies/adhoc/fn_mut/mod.rs", "rank": 82, "score": 59586.250516143875 }, { "content": " use crate::{LOAD_ORDERING, STORE_ORDERING};\n\n use crate::test_utils::fallible;\n\n use super::*;\n\n\n\n // we need this lock otherwise the test results will be inconsistent\n\n static LOCK: Mutex<()> = parking_lot::const_mutex(());\n\n\n\n #[test]\n\n fn test_adhoc_mut_drop_strategy() {\n\n let _lock = LOCK.lock();\n\n let works = Arc::new(AtomicBool::new(false));\n\n let w = Arc::clone(&works);\n\n let strategy = AdHocMutDropStrategy::new(move |_| w.store(true, STORE_ORDERING));\n\n crate::install_global_handlers(strategy, PanicDropStrategy::DEFAULT);\n\n drop(fallible());\n\n assert!(works.load(LOAD_ORDERING));\n\n }\n\n\n\n #[test]\n\n fn test_into_adhoc_mut_drop_strategy() {\n", "file_path": "src/drop_strategies/adhoc/fn_mut/mod.rs", "rank": 83, "score": 59581.72324760268 }, { "content": "{\n\n /// The function to call.\n\n pub f: Mutex<F>,\n\n _error: PhantomData<E>,\n\n}\n\n\n\nimpl<F, E> AdHocMutFallibleDropStrategy<F, E>\n\nwhere\n\n F: FnMut(crate::Error) -> Result<(), E>,\n\n E: Into<anyhow::Error>,\n\n{\n\n /// Create a new ad-hoc fallible drop strategy.\n\n pub fn new(f: F) -> Self {\n\n Self {\n\n f: Mutex::new(f),\n\n _error: PhantomData,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/drop_strategies/adhoc/fn_mut/mod.rs", "rank": 84, "score": 59578.740199739324 }, { "content": " let _lock = LOCK.lock();\n\n let works = Arc::new(AtomicBool::new(false));\n\n let w = Arc::clone(&works);\n\n let strategy = move |_| w.store(true, STORE_ORDERING);\n\n let strategy = IntoAdHocMutDropStrategy::into_drop_strategy(strategy);\n\n crate::install_global_handlers(strategy, PanicDropStrategy::DEFAULT);\n\n drop(fallible());\n\n assert!(works.load(LOAD_ORDERING));\n\n }\n\n\n\n #[test]\n\n fn test_adhoc_mut_fallible_drop_strategy() {\n\n let _lock = LOCK.lock();\n\n let works = Arc::new(AtomicBool::new(false));\n\n let w = Arc::clone(&works);\n\n let strategy = AdHocMutFallibleDropStrategy::<_, crate::Error>::new(move |_| {\n\n w.store(true, STORE_ORDERING);\n\n Ok(())\n\n });\n\n crate::install_global_handlers(strategy, PanicDropStrategy::DEFAULT);\n", "file_path": "src/drop_strategies/adhoc/fn_mut/mod.rs", "rank": 85, "score": 59578.732540404824 }, { "content": " drop(fallible());\n\n assert!(works.load(LOAD_ORDERING));\n\n }\n\n\n\n #[test]\n\n fn test_into_adhoc_mut_fallible_drop_strategy() {\n\n let _lock = LOCK.lock();\n\n let works = Arc::new(AtomicBool::new(false));\n\n let w = Arc::clone(&works);\n\n let strategy = move |_| {\n\n w.store(true, STORE_ORDERING);\n\n Ok::<_, crate::Error>(())\n\n };\n\n let strategy = IntoAdHocMutFallibleDropStrategy::into_drop_strategy(strategy);\n\n crate::install_global_handlers(strategy, PanicDropStrategy::DEFAULT);\n\n drop(fallible());\n\n assert!(works.load(LOAD_ORDERING));\n\n }\n\n}", "file_path": "src/drop_strategies/adhoc/fn_mut/mod.rs", "rank": 86, "score": 59577.19654462442 }, { "content": " Self(Mutex::new(f))\n\n }\n\n}\n\n\n\nimpl<F: FnMut(crate::Error)> TryDropStrategy for AdHocMutDropStrategy<F> {\n\n fn handle_error(&self, error: crate::Error) {\n\n self.0.lock()(error)\n\n }\n\n}\n\n\n\nimpl<F: FnMut(crate::Error)> From<F> for AdHocMutDropStrategy<F> {\n\n fn from(f: F) -> Self {\n\n Self::new(f)\n\n }\n\n}\n\n\n\n/// Signifies that this type can be converted into an [`AdHocMutDropStrategy`].\n", "file_path": "src/drop_strategies/adhoc/fn_mut/mod.rs", "rank": 87, "score": 59577.19321638466 }, { "content": "impl<F, E> FallibleTryDropStrategy for AdHocMutFallibleDropStrategy<F, E>\n\nwhere\n\n F: FnMut(crate::Error) -> Result<(), E>,\n\n E: Into<anyhow::Error>,\n\n{\n\n type Error = E;\n\n\n\n fn try_handle_error(&self, error: Error) -> Result<(), Self::Error> {\n\n self.f.lock()(error)\n\n }\n\n}\n\n\n\nimpl<F, E> From<F> for AdHocMutFallibleDropStrategy<F, E>\n\nwhere\n\n F: FnMut(crate::Error) -> Result<(), E>,\n\n E: Into<anyhow::Error>,\n\n{\n\n fn from(f: F) -> Self {\n\n Self::new(f)\n\n }\n\n}\n\n\n", "file_path": "src/drop_strategies/adhoc/fn_mut/mod.rs", "rank": 88, "score": 59576.79210081267 }, { "content": "\n\n $(#[$($scope_dyn_meta:meta)*])*\n\n scope_dyn;\n\n ) => {\n\n #[allow(unused_imports)]\n\n use $crate::handlers::common::thread_local::imports::*;\n\n\n\n $(#[$($install_meta)*])*\n\n pub fn install(strategy: impl $generic_strategy) {\n\n $thread_local::install(strategy)\n\n }\n\n\n\n $(#[$($install_dyn_meta)*])*\n\n pub fn install_dyn(strategy: $dyn_strategy) {\n\n $thread_local::install_dyn(strategy)\n\n }\n\n\n\n $(#[$($read_meta)*])*\n\n pub fn read<T>(f: impl FnOnce(&$dyn_strategy) -> T) -> T {\n\n $thread_local::read(f)\n", "file_path": "src/handlers/common/thread_local/mod.rs", "rank": 89, "score": 59483.339224547 }, { "content": " }\n\n\n\n $(#[$($try_read_meta)*])*\n\n pub fn try_read<T>(f: impl FnOnce(&$dyn_strategy) -> T) -> Result<T, UninitializedError> {\n\n $thread_local::try_read(f)\n\n }\n\n\n\n $(#[$($read_or_default_meta)*])*\n\n #[cfg(feature = $feature)]\n\n pub fn read_or_default<T>(f: impl FnOnce(&$dyn_strategy) -> T) -> T {\n\n $thread_local::read_or_default(f)\n\n }\n\n\n\n $(#[$($write_meta)*])*\n\n pub fn write<T>(f: impl FnOnce(&mut $dyn_strategy) -> T) -> T {\n\n $thread_local::write(f)\n\n }\n\n\n\n $(#[$($try_write_meta)*])*\n\n pub fn try_write<T>(f: impl FnOnce(&mut $dyn_strategy) -> T) -> Result<T, UninitializedError> {\n", "file_path": "src/handlers/common/thread_local/mod.rs", "rank": 90, "score": 59481.85693034742 }, { "content": "pub mod scope_guard;\n\npub(crate) mod imports {\n\n pub use crate::handlers::UninitializedError;\n\n pub use crate::{DynFallibleTryDropStrategy, ThreadLocalFallibleTryDropStrategy};\n\n pub use std::boxed::Box;\n\n}\n\n\n\nuse crate::handlers::common::thread_local::scope_guard::ScopeGuard;\n\nuse crate::handlers::common::Handler;\n\nuse crate::handlers::UninitializedError;\n\nuse std::cell::{Cell, RefCell};\n\nuse std::marker::PhantomData;\n\nuse std::thread::LocalKey;\n\n\n\nmacro_rules! thread_local_methods {\n\n (\n\n ThreadLocal = $thread_local:ident;\n\n ScopeGuard = $scope_guard:ident;\n\n GenericStrategy = $generic_strategy:ident;\n\n DynStrategy = $dyn_strategy:ident;\n", "file_path": "src/handlers/common/thread_local/mod.rs", "rank": 91, "score": 59478.87360333653 }, { "content": "\n\n pub fn scope_dyn(strategy: T::ThreadLocal) -> ScopeGuard<T> {\n\n ScopeGuard::new_dyn(strategy)\n\n }\n\n}\n\n\n\nimpl<T: DefaultThreadLocalDefinition> ThreadLocal<T> {\n\n pub fn read_or_default<R>(f: impl FnOnce(&T::ThreadLocal) -> R) -> R {\n\n T::thread_local().with(|cell| {\n\n let mut strategy = cell.borrow_mut();\n\n let strategy = strategy.get_or_insert_with(T::default);\n\n let strategy = &*strategy;\n\n f(strategy)\n\n })\n\n }\n\n\n\n pub fn write_or_default<R>(f: impl FnOnce(&mut T::ThreadLocal) -> R) -> R {\n\n T::thread_local().with(|cell| f(cell.borrow_mut().get_or_insert_with(T::default)))\n\n }\n\n}\n", "file_path": "src/handlers/common/thread_local/mod.rs", "rank": 92, "score": 59477.68142373792 }, { "content": " $thread_local::try_write(f)\n\n }\n\n\n\n $(#[$($write_or_default_meta)*])*\n\n #[cfg(feature = $feature)]\n\n pub fn write_or_default<T>(f: impl FnOnce(&mut $dyn_strategy) -> T) -> T {\n\n $thread_local::write_or_default(f)\n\n }\n\n\n\n $(#[$($uninstall_meta)*])*\n\n pub fn uninstall() {\n\n $thread_local::uninstall()\n\n }\n\n\n\n $(#[$($take_meta)*])*\n\n pub fn take() -> Option<$dyn_strategy> {\n\n $thread_local::take()\n\n }\n\n\n\n $(#[$($replace_meta)*])*\n", "file_path": "src/handlers/common/thread_local/mod.rs", "rank": 93, "score": 59475.418887168744 }, { "content": " pub fn write<R>(f: impl FnOnce(&mut T::ThreadLocal) -> R) -> R {\n\n Self::try_write(f).expect(T::UNINITIALIZED_ERROR)\n\n }\n\n\n\n pub fn try_write<R>(f: impl FnOnce(&mut T::ThreadLocal) -> R) -> Result<R, UninitializedError> {\n\n T::thread_local().with(|cell| {\n\n cell.borrow_mut()\n\n .as_mut()\n\n .map(f)\n\n .ok_or(UninitializedError(()))\n\n })\n\n }\n\n\n\n pub fn install(strategy: impl Into<T::ThreadLocal>) {\n\n Self::install_dyn(strategy.into())\n\n }\n\n\n\n pub fn install_dyn(strategy: T::ThreadLocal) {\n\n Self::replace_dyn(strategy);\n\n }\n", "file_path": "src/handlers/common/thread_local/mod.rs", "rank": 94, "score": 59473.02852124859 }, { "content": " feature = $feature:literal;\n\n\n\n $(#[$($install_meta:meta)*])*\n\n install;\n\n\n\n $(#[$($install_dyn_meta:meta)*])*\n\n install_dyn;\n\n\n\n $(#[$($read_meta:meta)*])*\n\n read;\n\n\n\n $(#[$($try_read_meta:meta)*])*\n\n try_read;\n\n\n\n $(#[$($read_or_default_meta:meta)*])*\n\n read_or_default;\n\n\n\n $(#[$($write_meta:meta)*])*\n\n write;\n\n\n", "file_path": "src/handlers/common/thread_local/mod.rs", "rank": 95, "score": 59471.244604071464 }, { "content": "\n\n pub fn uninstall() {\n\n Self::take();\n\n }\n\n\n\n pub fn take() -> Option<T::ThreadLocal> {\n\n T::thread_local().with(|cell| cell.borrow_mut().take())\n\n }\n\n\n\n pub fn replace(new: impl Into<T::ThreadLocal>) -> Option<T::ThreadLocal> {\n\n Self::replace_dyn(new.into())\n\n }\n\n\n\n pub fn replace_dyn(new: T::ThreadLocal) -> Option<T::ThreadLocal> {\n\n T::thread_local().with(|cell| cell.borrow_mut().replace(new))\n\n }\n\n\n\n pub fn scope(strategy: impl Into<T::ThreadLocal>) -> ScopeGuard<T> {\n\n Self::scope_dyn(strategy.into())\n\n }\n", "file_path": "src/handlers/common/thread_local/mod.rs", "rank": 96, "score": 59469.5638912284 }, { "content": " pub fn replace(strategy: impl $generic_strategy) -> Option<$dyn_strategy> {\n\n $thread_local::replace(strategy)\n\n }\n\n\n\n $(#[$($replace_dyn_meta)*])*\n\n pub fn replace_dyn(strategy: $dyn_strategy) -> Option<$dyn_strategy> {\n\n $thread_local::replace_dyn(strategy)\n\n }\n\n\n\n $(#[$($scope_meta)*])*\n\n pub fn scope(strategy: impl $generic_strategy) -> $scope_guard {\n\n $thread_local::scope(strategy)\n\n }\n\n\n\n $(#[$($scope_dyn_meta)*])*\n\n pub fn scope_dyn(strategy: $dyn_strategy) -> $scope_guard {\n\n $thread_local::scope_dyn(strategy)\n\n }\n\n };\n\n}\n\n\n", "file_path": "src/handlers/common/thread_local/mod.rs", "rank": 97, "score": 59469.543784637535 }, { "content": " $(#[$($try_write_meta:meta)*])*\n\n try_write;\n\n\n\n $(#[$($write_or_default_meta:meta)*])*\n\n write_or_default;\n\n\n\n $(#[$($uninstall_meta:meta)*])*\n\n uninstall;\n\n\n\n $(#[$($take_meta:meta)*])*\n\n take;\n\n\n\n $(#[$($replace_meta:meta)*])*\n\n replace;\n\n\n\n $(#[$($replace_dyn_meta:meta)*])*\n\n replace_dyn;\n\n\n\n $(#[$($scope_meta:meta)*])*\n\n scope;\n", "file_path": "src/handlers/common/thread_local/mod.rs", "rank": 98, "score": 59466.49766587738 }, { "content": "fn drops_value<M: Mode, TDT: TryDropTypes>(value: DropAdapter<ErrorsOnDrop<M, TDT>>)\n\nwhere\n\n ErrorsOnDrop<M, TDT>: PureTryDrop,\n\n{\n\n drop(value)\n\n}\n\n\n", "file_path": "examples/once_cell.rs", "rank": 99, "score": 59283.359114406594 } ]
Rust
crates/taplo-cli/src/commands/toml_test.rs
koalp/taplo
03044c5d4fd2ab64656d371ad2043ccd0146325b
use crate::Taplo; use anyhow::anyhow; use serde::{ ser::{SerializeMap, SerializeSeq}, Serialize, }; use taplo::dom::{ node::{DateTimeValue, DomNode}, Node, }; use taplo_common::environment::Environment; use tokio::io::AsyncReadExt; impl<E: Environment> Taplo<E> { pub async fn execute_toml_test(&self) -> Result<(), anyhow::Error> { let mut buf = String::new(); self.env.stdin().read_to_string(&mut buf).await?; let parse = taplo::parser::parse(&buf); if !parse.errors.is_empty() { for err in parse.errors { eprintln!("{err}"); } return Err(anyhow!("invalid toml")); } let dom = parse.into_dom(); if let Err(err) = dom.validate() { for err in err { eprintln!("{err}"); } return Err(anyhow!("invalid toml")); } serde_json::to_writer(std::io::stdout(), &TomlTestValue::new(&dom))?; Ok(()) } } #[derive(Clone, Copy, Serialize)] #[serde(rename_all = "lowercase")] pub enum TomlTestType { String, Integer, Float, Bool, DateTime, #[serde(rename = "datetime-local")] DateTimeLocal, #[serde(rename = "date-local")] DateLocal, #[serde(rename = "time-local")] TimeLocal, } impl TomlTestType { fn of(node: &Node) -> Option<Self> { match node { Node::Bool(_) => Some(TomlTestType::Bool), Node::Integer(_) => Some(TomlTestType::Integer), Node::Float(_) => Some(TomlTestType::Float), Node::Str(_) => Some(TomlTestType::String), Node::Date(d) => match d.value() { DateTimeValue::OffsetDateTime(_) => Some(TomlTestType::DateTime), DateTimeValue::LocalDateTime(_) => Some(TomlTestType::DateTimeLocal), DateTimeValue::Date(_) => Some(TomlTestType::DateLocal), DateTimeValue::Time(_) => Some(TomlTestType::TimeLocal), }, Node::Array(_) => None, Node::Table(_) => None, Node::Invalid(_) => unreachable!(), } } } pub struct TomlTestValue<'a> { r#type: Option<TomlTestType>, node: &'a Node, } impl<'a> TomlTestValue<'a> { pub fn new(node: &'a Node) -> Self { Self { r#type: TomlTestType::of(node), node, } } } impl<'a> Serialize for TomlTestValue<'a> { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, { if let Some(ty) = self.r#type { let mut map = serializer.serialize_map(Some(2))?; map.serialize_entry("type", &ty)?; map.serialize_entry( "value", &match self.node { Node::Str(d) => d.value().to_string(), Node::Float(f) if f.value().is_nan() => String::from("nan"), Node::Float(f) if f.value().is_infinite() => f.syntax().unwrap().to_string(), _ => serde_json::to_string(&self.node).map_err(serde::ser::Error::custom)?, }, )?; map.end() } else { match &self.node { Node::Array(array) => { let items = array.items().read(); let mut seq = serializer.serialize_seq(Some(items.len()))?; for value in &**items { seq.serialize_element(&TomlTestValue::new(value))?; } seq.end() } Node::Table(table) => { let entries = table.entries().read(); let mut map = serializer.serialize_map(Some(entries.len()))?; for (key, value) in entries.iter() { map.serialize_entry(key.value(), &TomlTestValue::new(value))?; } map.end() } _ => unreachable!(), } } } }
use crate::Taplo; use anyhow::anyhow; use serde::{ ser::{SerializeMap, SerializeSeq}, Serialize, }; use taplo::dom::{ node::{DateTimeValue, DomNode}, Node, }; use taplo_common::environment::Environment; use tokio::io::AsyncReadExt; impl<E: Environment> Taplo<E> { pub async fn execute_toml_test(&self) -> Result<(), anyhow::Error> { let mut buf = String::new(); self.env.stdin().read_to_string(&mut buf).await?; let parse = taplo::parser::parse(&buf); if !parse.errors.is_empty() { for err in parse.errors { eprintln!("{err}"); } return Err(anyhow!("invalid toml")); } let dom = parse.into_dom(); if let Err(err) = dom.validate() { for err in err { eprintln!("{err}"); } return Err(anyhow!("invalid toml")); } serde_json::to_writer(std::io::stdout(), &TomlTestValue::new(&dom))?; Ok(()) } } #[derive(Clone, Copy, Serialize)] #[serde(rename_all = "lowercase")] pub enum TomlTestType { String, Integer, Float, Bool, DateTime, #[serde(rename = "datetime-local")] DateTimeLocal, #[serde(rename = "date-local")] DateLocal, #[serde(rename = "time-local")] TimeLocal, } impl TomlTestType { fn of(node: &Node) -> Option<Self> { match node { Node::Bool(_) => Some(TomlTestType::Bool), Node::Integer(_) => Some(TomlTestType::Integer), Node::Float(_) => Some(TomlTestType::Float), Node::Str(_) => Some(TomlTestType::String), Node::Date(d) => match d.value() { DateTimeValue::OffsetDateTime(_) => Some(TomlTestType::DateTime), DateTimeValue::LocalDateTime(_) => Some(TomlTestType::DateTimeLocal), DateTimeValue::Date(_) => Some(TomlTestType::DateLocal), DateTimeValue::Time(_) => Some(TomlTestType::TimeLocal), }, Node::Array(_) => None, Node::Table(_) => None, Node::Invalid(_) => unreachable!(), } } } pub struct TomlTestValue<'a> { r#type: Option<TomlTestType>, node: &'a Node, } impl<'a> TomlTestValue<'a> { pub fn new(node: &'a Node) -> Self { Self { r#type: TomlTestType::of(node), node, } } } impl<'a> Serialize for TomlTestValue<'a> { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Erro
Node::Float(f) if f.value().is_nan() => String::from("nan"), Node::Float(f) if f.value().is_infinite() => f.syntax().unwrap().to_string(), _ => serde_json::to_string(&self.node).map_err(serde::ser::Error::custom)?, }, )?; map.end() } else { match &self.node { Node::Array(array) => { let items = array.items().read(); let mut seq = serializer.serialize_seq(Some(items.len()))?; for value in &**items { seq.serialize_element(&TomlTestValue::new(value))?; } seq.end() } Node::Table(table) => { let entries = table.entries().read(); let mut map = serializer.serialize_map(Some(entries.len()))?; for (key, value) in entries.iter() { map.serialize_entry(key.value(), &TomlTestValue::new(value))?; } map.end() } _ => unreachable!(), } } } }
r> where S: serde::Serializer, { if let Some(ty) = self.r#type { let mut map = serializer.serialize_map(Some(2))?; map.serialize_entry("type", &ty)?; map.serialize_entry( "value", &match self.node { Node::Str(d) => d.value().to_string(),
random
[ { "content": "fn extract_value(node: &Node) -> Result<String, anyhow::Error> {\n\n Ok(match node {\n\n Node::Table(_) => {\n\n return Err(anyhow!(\n\n r#\"cannot print tables with the given output format, specify a different output format (e.g. with `-o json`) \"#\n\n ))\n\n }\n\n Node::Array(arr) => {\n\n let mut s = String::new();\n\n\n\n let mut start = true;\n\n for item in &**arr.items().read() {\n\n if !start {\n\n s += \"\\n\";\n\n }\n\n start = false;\n\n\n\n s += &extract_value(item)?;\n\n }\n\n\n", "file_path": "crates/taplo-cli/src/commands/queries.rs", "rank": 0, "score": 293454.355543233 }, { "content": "/// Formats a DOM root node with given scopes.\n\n///\n\n/// **This doesn't check errors of the DOM.**\n\npub fn format_with_scopes(dom: Node, options: Options, scopes: ScopedOptions) -> String {\n\n let c = Context {\n\n scopes: Rc::new(scopes),\n\n ..Context::default()\n\n };\n\n\n\n let mut s = format_impl(\n\n dom.syntax().unwrap().clone().into_node().unwrap(),\n\n options.clone(),\n\n c,\n\n );\n\n\n\n s = s.trim_end().into();\n\n\n\n if options.trailing_newline {\n\n s += options.newline();\n\n }\n\n\n\n s\n\n}\n\n\n", "file_path": "crates/taplo/src/formatter/mod.rs", "rank": 1, "score": 291952.445655868 }, { "content": "pub fn add_all(node: SyntaxNode, builder: &mut GreenNodeBuilder) {\n\n builder.start_node(node.kind().into());\n\n\n\n for c in node.children_with_tokens() {\n\n match c {\n\n NodeOrToken::Node(n) => add_all(n, builder),\n\n NodeOrToken::Token(t) => builder.token(t.kind().into(), t.text()),\n\n }\n\n }\n\n\n\n builder.finish_node()\n\n}\n", "file_path": "crates/taplo/src/util/syntax.rs", "rank": 2, "score": 291694.7454115391 }, { "content": "pub fn json_to_toml(json: &str, inline: bool) -> Result<String, anyhow::Error> {\n\n let root: Node = serde_json::from_str(json)?;\n\n Ok(root.to_toml(inline))\n\n}\n\n\n", "file_path": "crates/taplo-common/src/convert.rs", "rank": 3, "score": 289149.131239196 }, { "content": "pub fn toml_to_json(toml: &str) -> Result<String, anyhow::Error> {\n\n let root = parse(toml).into_dom();\n\n Ok(serde_json::to_string_pretty(&root)?)\n\n}\n", "file_path": "crates/taplo-common/src/convert.rs", "rank": 4, "score": 284513.28454103234 }, { "content": "/// Formats a parsed TOML syntax tree.\n\npub fn format_syntax(node: SyntaxNode, options: Options) -> String {\n\n let mut s = format_impl(node, options.clone(), Context::default());\n\n\n\n s = s.trim_end().into();\n\n\n\n if options.trailing_newline {\n\n s += options.newline();\n\n }\n\n\n\n s\n\n}\n\n\n", "file_path": "crates/taplo/src/formatter/mod.rs", "rank": 5, "score": 283903.30220413016 }, { "content": "pub fn dom(c: &mut Criterion) {\n\n let source = include_str!(\"../../../test-data/example.toml\");\n\n c.bench_function(\"toml-dom\", |b| {\n\n b.iter(|| parse(black_box(source)).into_dom())\n\n });\n\n}\n\n\n\ncriterion_group!(\n\n name = benches;\n\n config = Criterion::default().with_profiler(PProfProfiler::new(100, Output::Flamegraph(None)));\n\n targets = syntax, dom\n\n);\n\ncriterion_main!(benches);\n", "file_path": "crates/taplo/benches/profile.rs", "rank": 6, "score": 261829.25928333777 }, { "content": "fn format_key(node: SyntaxNode, formatted: &mut String, _options: &Options, _context: &Context) {\n\n // Idents and periods without whitespace\n\n for c in node.children_with_tokens() {\n\n match c {\n\n NodeOrToken::Node(_) => {}\n\n NodeOrToken::Token(t) => match t.kind() {\n\n WHITESPACE | NEWLINE => {}\n\n _ => {\n\n *formatted += t.text();\n\n }\n\n },\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/taplo/src/formatter/mod.rs", "rank": 7, "score": 260972.06887676657 }, { "content": "fn format_impl(node: SyntaxNode, options: Options, context: Context) -> String {\n\n assert!(node.kind() == ROOT);\n\n let mut formatted = format_root(node, &options, &context);\n\n\n\n if formatted.ends_with(\"\\r\\n\") {\n\n formatted.truncate(formatted.len() - 2);\n\n } else if formatted.ends_with('\\n') {\n\n formatted.truncate(formatted.len() - 1);\n\n }\n\n\n\n if options.trailing_newline {\n\n formatted += options.newline();\n\n }\n\n\n\n formatted\n\n}\n\n\n", "file_path": "crates/taplo/src/formatter/mod.rs", "rank": 8, "score": 260501.28522021946 }, { "content": "// Check whether the array spans multiple lines in its current form.\n\nfn is_array_multiline(node: &SyntaxNode) -> bool {\n\n node.descendants_with_tokens().any(|n| n.kind() == NEWLINE)\n\n}\n\n\n", "file_path": "crates/taplo/src/formatter/mod.rs", "rank": 9, "score": 257592.05726824002 }, { "content": "fn can_collapse_array(node: &SyntaxNode) -> bool {\n\n !node.descendants_with_tokens().any(|n| n.kind() == COMMENT)\n\n}\n\n\n", "file_path": "crates/taplo/src/formatter/mod.rs", "rank": 10, "score": 253941.91119753598 }, { "content": "/// Formats a parsed TOML green tree.\n\npub fn format_green(green: GreenNode, options: Options) -> String {\n\n format_syntax(SyntaxNode::new_root(green), options)\n\n}\n\n\n", "file_path": "crates/taplo/src/formatter/mod.rs", "rank": 11, "score": 244501.7146557089 }, { "content": "pub fn setup_stderr_logging(e: impl Environment, taplo: &TaploArgs) {\n\n let span_events = if taplo.log_spans {\n\n FmtSpan::NEW | FmtSpan::CLOSE\n\n } else {\n\n FmtSpan::NONE\n\n };\n\n\n\n let registry = tracing_subscriber::registry();\n\n\n\n let env_filter = match e.env_var(\"RUST_LOG\") {\n\n Some(log) => EnvFilter::new(log),\n\n None => EnvFilter::default().add_directive(tracing::Level::INFO.into()),\n\n };\n\n\n\n if taplo.verbose {\n\n registry\n\n .with(env_filter)\n\n .with(\n\n tracing_subscriber::fmt::layer()\n\n .with_ansi(match taplo.colors {\n", "file_path": "crates/taplo-cli/src/log.rs", "rank": 12, "score": 239142.44392891767 }, { "content": "/// Unescape all supported sequences found in [Escape](Escape).\n\n///\n\n/// If it fails, the index of failure is returned.\n\npub fn unescape(s: &str) -> Result<String, usize> {\n\n let mut new_s = String::with_capacity(s.len());\n\n let mut lexer: Lexer<Escape> = Lexer::new(s);\n\n\n\n while let Some(t) = lexer.next() {\n\n match t {\n\n Backspace => new_s += \"\\u{0008}\",\n\n Tab => new_s += \"\\u{0009}\",\n\n LineFeed => new_s += \"\\u{000A}\",\n\n FormFeed => new_s += \"\\u{000C}\",\n\n CarriageReturn => new_s += \"\\u{000D}\",\n\n Quote => new_s += \"\\u{0022}\",\n\n Backslash => new_s += \"\\u{005C}\",\n\n Newline => {}\n\n Unicode => {\n\n new_s += &std::char::from_u32(\n\n u32::from_str_radix(&lexer.slice()[2..], 16).map_err(|_| lexer.span().start)?,\n\n )\n\n .ok_or(lexer.span().start)?\n\n .to_string();\n", "file_path": "crates/taplo/src/util/escape.rs", "rank": 13, "score": 238096.6801656612 }, { "content": "pub trait DomNode: Sized + Sealed {\n\n fn syntax(&self) -> Option<&SyntaxElement>;\n\n fn errors(&self) -> &Shared<Vec<Error>>;\n\n fn validate_node(&self) -> Result<(), &Shared<Vec<Error>>>;\n\n fn is_valid_node(&self) -> bool {\n\n self.validate_node().is_ok()\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum Node {\n\n Table(Table),\n\n Array(Array),\n\n Bool(Bool),\n\n Str(Str),\n\n Integer(Integer),\n\n Float(Float),\n\n Date(DateTime),\n\n Invalid(Invalid),\n\n}\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 14, "score": 231761.6606502635 }, { "content": "fn format_root(node: SyntaxNode, options: &Options, context: &Context) -> String {\n\n assert!(node.kind() == ROOT);\n\n let mut formatted = String::new();\n\n\n\n let mut entry_group: Vec<FormattedEntry> = Vec::new();\n\n\n\n // We defer printing the entries so that we can align them vertically.\n\n // Whenever an entry is added to the group, we skip its trailing newline,\n\n // otherwise the inserted new line would end up before the actual entries.\n\n let mut skip_newlines = 0;\n\n\n\n // We defer printing comments as well because we need to know\n\n // what comes after them for correct indentation.\n\n let mut comment_group: Vec<String> = Vec::new();\n\n\n\n let mut context = context.clone();\n\n\n\n // Table key for determining indents\n\n let mut last_table_key = None;\n\n let mut table_key_indent_history: Vec<(Keys, usize)> = Vec::new();\n", "file_path": "crates/taplo/src/formatter/mod.rs", "rank": 15, "score": 226089.31697698316 }, { "content": "#[derive(Default)]\n\nstruct TomlVisitor;\n\n\n\nimpl<'de> Visitor<'de> for TomlVisitor {\n\n type Value = Node;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(formatter, \"a TOML value\")\n\n }\n\n\n\n fn visit_bool<E>(self, v: bool) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n Ok(BoolInner {\n\n errors: Default::default(),\n\n syntax: None,\n\n value: v.into(),\n\n }\n\n .wrap()\n\n .into())\n", "file_path": "crates/taplo/src/dom/serde.rs", "rank": 16, "score": 223225.69710640176 }, { "content": "fn lex_string(lex: &mut Lexer<SyntaxKind>) -> bool {\n\n let remainder: &str = lex.remainder();\n\n let mut escaped = false;\n\n\n\n let mut total_len = 0;\n\n\n\n for c in remainder.chars() {\n\n total_len += c.len_utf8();\n\n\n\n if c == '\\\\' {\n\n escaped = !escaped;\n\n continue;\n\n }\n\n\n\n if c == '\"' && !escaped {\n\n lex.bump(remainder[0..total_len].as_bytes().len());\n\n return true;\n\n }\n\n\n\n escaped = false;\n\n }\n\n false\n\n}\n\n\n", "file_path": "crates/taplo/src/syntax.rs", "rank": 17, "score": 222016.56470471536 }, { "content": "fn lex_string_literal(lex: &mut Lexer<SyntaxKind>) -> bool {\n\n let remainder: &str = lex.remainder();\n\n let mut total_len = 0;\n\n\n\n for c in remainder.chars() {\n\n total_len += c.len_utf8();\n\n\n\n if c == '\\'' {\n\n lex.bump(remainder[0..total_len].as_bytes().len());\n\n return true;\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "crates/taplo/src/syntax.rs", "rank": 18, "score": 218274.31776348926 }, { "content": "fn format_array(node: SyntaxNode, options: &Options, context: &Context) -> impl FormattedItem {\n\n let mut multiline = is_array_multiline(&node) || context.force_multiline;\n\n\n\n let mut formatted = String::new();\n\n\n\n // We always try to collapse it if possible.\n\n if can_collapse_array(&node) && options.array_auto_collapse && !context.force_multiline {\n\n multiline = false;\n\n }\n\n\n\n // We use the same strategy as for entries, refer to [`format_root`].\n\n let mut skip_newlines = 0;\n\n\n\n // Formatted value, and optional trailing comment.\n\n // The value should also include the comma at the end if needed.\n\n let mut value_group: Vec<(String, Option<String>)> = Vec::new();\n\n\n\n let add_values = |value_group: &mut Vec<(String, Option<String>)>,\n\n formatted: &mut String,\n\n context: &Context|\n", "file_path": "crates/taplo/src/formatter/mod.rs", "rank": 19, "score": 217381.61006674825 }, { "content": "fn format_value(node: SyntaxNode, options: &Options, context: &Context) -> impl FormattedItem {\n\n let mut value = String::new();\n\n let mut comment = None;\n\n for c in node.children_with_tokens() {\n\n match c {\n\n NodeOrToken::Node(n) => match n.kind() {\n\n ARRAY => {\n\n let formatted = format_array(n, options, context);\n\n\n\n let c = formatted.trailing_comment();\n\n\n\n if let Some(c) = c {\n\n debug_assert!(comment.is_none());\n\n comment = Some(c)\n\n }\n\n\n\n debug_assert!(value.is_empty());\n\n formatted.write_to(&mut value, options);\n\n }\n\n INLINE_TABLE => {\n", "file_path": "crates/taplo/src/formatter/mod.rs", "rank": 20, "score": 217381.61006674822 }, { "content": "fn lex_multi_line_string(lex: &mut Lexer<SyntaxKind>) -> bool {\n\n let remainder: &str = lex.remainder();\n\n\n\n let mut total_len = 0;\n\n let mut quote_count = 0;\n\n\n\n let mut escaped = false;\n\n\n\n // As the string can contain \",\n\n // we can end up with more than 3 \"-s at\n\n // the end, in that case we need to include all\n\n // in the string.\n\n let mut quotes_found = false;\n\n\n\n for c in remainder.chars() {\n\n if quotes_found {\n\n if c != '\"' {\n\n if quote_count >= 6 {\n\n return false;\n\n }\n", "file_path": "crates/taplo/src/syntax.rs", "rank": 21, "score": 214692.5073523561 }, { "content": "#[must_use]\n\npub fn lookup_keys(root: Node, keys: &Keys) -> Keys {\n\n let mut node = root;\n\n let mut new_keys = Keys::empty();\n\n\n\n for key in keys.iter().cloned() {\n\n node = node.get(&key);\n\n new_keys = new_keys.join(key);\n\n if let Some(arr) = node.as_array() {\n\n new_keys = new_keys.join(arr.items().read().len().saturating_sub(1));\n\n }\n\n }\n\n\n\n new_keys\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct PositionInfo {\n\n /// The narrowest syntax element that contains the position.\n\n pub syntax: SyntaxToken,\n\n /// The narrowest node that covers the position.\n\n pub dom_node: Option<(Keys, Node)>,\n\n}\n\n\n", "file_path": "crates/taplo-lsp/src/query.rs", "rank": 22, "score": 213178.28777816973 }, { "content": "fn lex_multi_line_string_literal(lex: &mut Lexer<SyntaxKind>) -> bool {\n\n let remainder: &str = lex.remainder();\n\n\n\n let mut total_len = 0;\n\n let mut quote_count = 0;\n\n\n\n // As the string can contain ',\n\n // we can end up with more than 3 '-s at\n\n // the end, in that case we need to include all\n\n // in the string.\n\n let mut quotes_found = false;\n\n\n\n for c in remainder.chars() {\n\n if quotes_found {\n\n if c != '\\'' {\n\n lex.bump(remainder[0..total_len].as_bytes().len());\n\n return true;\n\n } else {\n\n if quote_count > 4 {\n\n return false;\n", "file_path": "crates/taplo/src/syntax.rs", "rank": 23, "score": 211261.03270696645 }, { "content": "pub fn parsing(c: &mut Criterion) {\n\n let source = include_str!(\"../../../test-data/example.toml\");\n\n c.bench_function(\"parse taplo syntax\", |b| {\n\n b.iter(|| parse(black_box(source)))\n\n });\n\n c.bench_function(\"parse taplo dom\", |b| {\n\n b.iter(|| parse(black_box(source)).into_dom())\n\n });\n\n c.bench_function(\"parse taplo dom and validate\", |b| {\n\n b.iter(|| parse(black_box(source)).into_dom().validate())\n\n });\n\n c.bench_function(\"parse toml-rs\", |b| {\n\n b.iter(|| toml::from_str::<toml::Value>(black_box(source)))\n\n });\n\n}\n\n\n", "file_path": "crates/taplo/benches/taplo.rs", "rank": 24, "score": 210067.37593984677 }, { "content": "pub fn conversion(c: &mut Criterion) {\n\n let source = include_str!(\"../../../test-data/example.toml\");\n\n let v: serde_json::Value = toml::from_str(source).unwrap();\n\n\n\n c.bench_function(\"convert from JSON\", |b| {\n\n b.iter(|| {\n\n serde_json::from_value::<Node>(black_box(v.clone()))\n\n .unwrap()\n\n .to_toml(false)\n\n })\n\n });\n\n}\n\n\n\ncriterion_group!(benches, parsing, formatting, conversion);\n\ncriterion_main!(benches);\n", "file_path": "crates/taplo/benches/taplo.rs", "rank": 25, "score": 210067.37593984677 }, { "content": "pub fn formatting(c: &mut Criterion) {\n\n let source = include_str!(\"../../../test-data/example.toml\");\n\n\n\n let syntax = parse(source).into_syntax();\n\n c.bench_function(\"format syntax\", |b| {\n\n b.iter(|| format_syntax(black_box(syntax.clone()), Options::default()))\n\n });\n\n c.bench_function(\"parse and format\", |b| {\n\n b.iter(|| format(black_box(source), Options::default()))\n\n });\n\n}\n\n\n", "file_path": "crates/taplo/benches/taplo.rs", "rank": 26, "score": 210067.37593984677 }, { "content": "fn entry_from_syntax(syntax: SyntaxElement) -> (Key, Node) {\n\n assert!(syntax.kind() == ENTRY);\n\n\n\n let mut keys = keys_from_syntax(\n\n &syntax\n\n .as_node()\n\n .and_then(|n| n.first_child())\n\n .map(Into::into)\n\n .unwrap_or_else(|| syntax.clone()),\n\n );\n\n let first_key = keys\n\n .next()\n\n .unwrap_or_else(|| Key::from_syntax_invalid(syntax.clone()));\n\n\n\n let mut value = syntax\n\n .as_node()\n\n .and_then(|n| n.first_child())\n\n .and_then(|k| k.next_sibling())\n\n .map(|n| Node::from_syntax(n.into()))\n\n .unwrap_or_else(|| Invalid::from_syntax(syntax).into());\n", "file_path": "crates/taplo/src/dom/from_syntax.rs", "rank": 27, "score": 205471.68828100877 }, { "content": "pub fn syntax(c: &mut Criterion) {\n\n let source = include_str!(\"../../../test-data/example.toml\");\n\n c.bench_function(\"parse-toml\", |b| b.iter(|| parse(black_box(source))));\n\n}\n\n\n", "file_path": "crates/taplo/benches/profile.rs", "rank": 28, "score": 202725.19947306346 }, { "content": "fn full_range(keys: &Keys, node: &Node) -> TextRange {\n\n let last_key = match keys\n\n .iter()\n\n .filter_map(KeyOrIndex::as_key)\n\n .last()\n\n .map(Key::text_ranges)\n\n {\n\n Some(k) => k,\n\n None => {\n\n return join_ranges(node.text_ranges());\n\n }\n\n };\n\n\n\n join_ranges(last_key.chain(node.text_ranges()))\n\n}\n", "file_path": "crates/taplo-lsp/src/query.rs", "rank": 29, "score": 195546.3617359657 }, { "content": "/// Same as unescape, but doesn't create a new\n\n/// unescaped string, and returns all invalid escape indices.\n\npub fn check_escape(s: &str) -> Result<(), Vec<usize>> {\n\n let mut lexer: Lexer<Escape> = Lexer::new(s);\n\n let mut invalid = Vec::new();\n\n\n\n while let Some(t) = lexer.next() {\n\n match t {\n\n Backspace => {}\n\n Tab => {}\n\n LineFeed => {}\n\n FormFeed => {}\n\n CarriageReturn => {}\n\n Quote => {}\n\n Backslash => {}\n\n Newline => {}\n\n Unicode => {\n\n let char_val = match u32::from_str_radix(&lexer.slice()[2..], 16) {\n\n Ok(v) => v,\n\n Err(_) => {\n\n invalid.push(lexer.span().start);\n\n continue;\n", "file_path": "crates/taplo/src/util/escape.rs", "rank": 30, "score": 181148.23020213805 }, { "content": "/// Parses then formats a TOML document, ignoring errors.\n\npub fn format(src: &str, options: Options) -> String {\n\n let p = crate::parser::parse(src);\n\n\n\n let ctx = Context {\n\n errors: p.errors.iter().map(|err| err.range).collect(),\n\n ..Context::default()\n\n };\n\n\n\n format_impl(p.into_syntax(), options, ctx)\n\n}\n\n\n", "file_path": "crates/taplo/src/formatter/mod.rs", "rank": 31, "score": 180813.23992870605 }, { "content": "pub fn overlaps(range: TextRange, other: TextRange) -> bool {\n\n range.contains_range(other)\n\n || other.contains_range(range)\n\n || range.contains(other.start())\n\n || range.contains(other.end())\n\n || other.contains(range.start())\n\n || other.contains(range.end())\n\n}\n", "file_path": "crates/taplo/src/util/mod.rs", "rank": 32, "score": 178166.51054877666 }, { "content": "#[tracing::instrument(level = \"debug\", skip_all)]\n\npub fn create_tokens(syntax: &SyntaxNode, mapper: &Mapper) -> Vec<SemanticToken> {\n\n let mut builder = SemanticTokensBuilder::new(mapper);\n\n\n\n for element in syntax.descendants_with_tokens() {\n\n match element {\n\n SyntaxElement::Node(_node) => {}\n\n SyntaxElement::Token(token) => {\n\n if let IDENT = token.kind() {\n\n // look for an inline table value\n\n let is_table_key = token\n\n .parent()\n\n .and_then(|p| p.next_sibling())\n\n .and_then(|t| t.first_child())\n\n .map_or(false, |t| t.kind() == INLINE_TABLE);\n\n\n\n if is_table_key {\n\n builder.add_token(&token, TokenType::TomlTableKey, &[]);\n\n continue;\n\n }\n\n\n", "file_path": "crates/taplo-lsp/src/handlers/semantic_tokens.rs", "rank": 33, "score": 178014.7091588159 }, { "content": "#[tracing::instrument(level = \"debug\", skip_all)]\n\npub fn create_folding_ranges(syntax: &SyntaxNode, mapper: &Mapper) -> Vec<FoldingRange> {\n\n let mut folding_ranges = Vec::with_capacity(20);\n\n\n\n let mut comments_start: Option<TextRange> = None;\n\n let mut last_comment: Option<TextRange> = None;\n\n let mut was_comment: bool = false;\n\n\n\n let mut header_starts: Vec<(String, TextRange)> = Vec::new();\n\n\n\n let mut last_non_header: Option<TextRange> = None;\n\n\n\n for element in syntax.children_with_tokens() {\n\n let mut is_comment = false;\n\n\n\n match element.kind() {\n\n TABLE_ARRAY_HEADER | TABLE_HEADER => {\n\n let key = element\n\n .as_node()\n\n .unwrap()\n\n .first_child()\n", "file_path": "crates/taplo-lsp/src/handlers/folding_ranges.rs", "rank": 34, "score": 175112.32030875311 }, { "content": " }\n\n}\n\n\n\nimpl core::fmt::Display for IntegerValue {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n IntegerValue::Negative(v) => v.fmt(f),\n\n IntegerValue::Positive(v) => v.fmt(f),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) struct FloatInner {\n\n pub(crate) errors: Shared<Vec<Error>>,\n\n pub(crate) syntax: Option<SyntaxElement>,\n\n pub(crate) value: OnceCell<f64>,\n\n}\n\n\n\nwrap_node! {\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 35, "score": 174563.86620002342 }, { "content": " Bin,\n\n Oct,\n\n Hex,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum IntegerValue {\n\n Negative(i64),\n\n Positive(u64),\n\n}\n\n\n\nimpl IntegerValue {\n\n /// Returns `true` if the integer value is [`Negative`].\n\n ///\n\n /// [`Negative`]: IntegerValue::Negative\n\n pub fn is_negative(&self) -> bool {\n\n matches!(self, Self::Negative(..))\n\n }\n\n\n\n /// Returns `true` if the integer value is [`Positive`].\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 36, "score": 174563.53440447358 }, { "content": " ),\n\n }\n\n } else {\n\n IntegerValue::Positive(0)\n\n }\n\n })\n\n }\n\n\n\n fn validate_impl(&self) -> Result<(), &Shared<Vec<Error>>> {\n\n if self.errors().read().as_ref().is_empty() {\n\n Ok(())\n\n } else {\n\n Err(self.errors())\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub enum IntegerRepr {\n\n Dec,\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 37, "score": 174560.4841788686 }, { "content": " ///\n\n /// [`Positive`]: IntegerValue::Positive\n\n pub fn is_positive(&self) -> bool {\n\n matches!(self, Self::Positive(..))\n\n }\n\n\n\n pub fn as_negative(&self) -> Option<i64> {\n\n if let Self::Negative(v) = self {\n\n Some(*v)\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n pub fn as_positive(&self) -> Option<u64> {\n\n if let Self::Positive(v) = self {\n\n Some(*v)\n\n } else {\n\n None\n\n }\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 38, "score": 174559.6026819002 }, { "content": " pub(crate) syntax: Option<SyntaxElement>,\n\n}\n\n\n\nwrap_node! {\n\n #[derive(Debug, Clone)]\n\n pub struct Invalid { inner: InvalidInner }\n\n}\n\n\n\nimpl Invalid {\n\n fn validate_impl(&self) -> Result<(), &Shared<Vec<Error>>> {\n\n if self.errors().read().as_ref().is_empty() {\n\n Ok(())\n\n } else {\n\n Err(self.errors())\n\n }\n\n }\n\n}\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 39, "score": 174559.498389582 }, { "content": " fn validate_impl(&self) -> Result<(), &Shared<Vec<Error>>> {\n\n if self.errors().read().as_ref().is_empty() {\n\n Ok(())\n\n } else {\n\n Err(self.errors())\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) struct StrInner {\n\n pub(crate) errors: Shared<Vec<Error>>,\n\n pub(crate) syntax: Option<SyntaxElement>,\n\n pub(crate) repr: StrRepr,\n\n pub(crate) value: OnceCell<String>,\n\n}\n\n\n\nwrap_node! {\n\n #[derive(Debug, Clone)]\n\n pub struct Str { inner: StrInner }\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 40, "score": 174558.80015658154 }, { "content": "\n\nimpl ArrayKind {\n\n /// Returns `true` if the array kind is [`Tables`].\n\n ///\n\n /// [`Tables`]: ArrayKind::Tables\n\n pub fn is_tables(&self) -> bool {\n\n matches!(self, Self::Tables)\n\n }\n\n\n\n /// Returns `true` if the array kind is [`Inline`].\n\n ///\n\n /// [`Inline`]: ArrayKind::Inline\n\n pub fn is_inline(&self) -> bool {\n\n matches!(self, Self::Inline)\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) struct BoolInner {\n\n pub(crate) errors: Shared<Vec<Error>>,\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 41, "score": 174558.58484915155 }, { "content": " pub(crate) is_valid: bool,\n\n pub(crate) value: OnceCell<String>,\n\n\n\n /// The same key can appear at multiple positions\n\n /// in a TOML document.\n\n ///\n\n /// # Example\n\n /// \n\n /// In the following both `table` and `inner` appear multiple times\n\n /// despite being the same key in the DOM.\n\n ///\n\n /// ```toml\n\n /// [table.inner.something]\n\n /// [table.inner.something_else]\n\n /// ```\n\n pub(crate) additional_syntaxes: Shared<Vec<SyntaxElement>>,\n\n}\n\n\n\nwrap_node! {\n\n #[derive(Debug, Clone)]\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 42, "score": 174557.07517378777 }, { "content": " pub(crate) syntax: Option<SyntaxElement>,\n\n pub(crate) value: OnceCell<bool>,\n\n}\n\n\n\nwrap_node! {\n\n #[derive(Debug, Clone)]\n\n pub struct Bool { inner: BoolInner }\n\n}\n\n\n\nimpl Bool {\n\n /// A boolean value.\n\n pub fn value(&self) -> bool {\n\n *self.inner.value.get_or_init(|| {\n\n self.syntax()\n\n .and_then(|s| s.as_token())\n\n .and_then(|s| s.text().parse().ok())\n\n .unwrap_or_default()\n\n })\n\n }\n\n\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 43, "score": 174556.5299017883 }, { "content": " $vis struct $name {\n\n pub(crate) inner: Arc<$inner>,\n\n }\n\n\n\n impl $crate::private::Sealed for $name {}\n\n impl $crate::dom::node::DomNode for $name {\n\n fn syntax(&self) -> Option<&$crate::syntax::SyntaxElement> {\n\n self.inner.syntax.as_ref()\n\n }\n\n\n\n fn errors(&self) -> &$crate::util::shared::Shared<Vec<$crate::dom::error::Error>> {\n\n &self.inner.errors\n\n }\n\n\n\n fn validate_node(&self) -> Result<(), &$crate::util::shared::Shared<Vec<$crate::dom::error::Error>>> {\n\n self.validate_impl()\n\n }\n\n }\n\n\n\n impl $inner {\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 44, "score": 174555.87023516963 }, { "content": " pub struct Key { inner: KeyInner }\n\n}\n\n\n\nimpl<S> From<S> for Key\n\nwhere\n\n S: Into<String>,\n\n{\n\n fn from(s: S) -> Self {\n\n Key::new(s)\n\n }\n\n}\n\n\n\nimpl Key {\n\n /// Return a new key with the given value.\n\n ///\n\n /// # Remarks\n\n ///\n\n /// This **does not** check or modify the input string.\n\n pub fn new(key: impl Into<String>) -> Self {\n\n KeyInner {\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 45, "score": 174555.06720703654 }, { "content": " }\n\n\n\n pub fn kind(&self) -> ArrayKind {\n\n self.inner.kind\n\n }\n\n\n\n fn validate_impl(&self) -> Result<(), &Shared<Vec<Error>>> {\n\n if self.errors().read().as_ref().is_empty() {\n\n Ok(())\n\n } else {\n\n Err(self.errors())\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum ArrayKind {\n\n Tables,\n\n Inline,\n\n}\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 46, "score": 174555.0136925484 }, { "content": " })\n\n }\n\n\n\n fn validate_impl(&self) -> Result<(), &Shared<Vec<Error>>> {\n\n let _ = self.value();\n\n if self.errors().read().as_ref().is_empty() {\n\n Ok(())\n\n } else {\n\n Err(self.errors())\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub enum StrRepr {\n\n Basic,\n\n MultiLine,\n\n Literal,\n\n MultiLineLiteral,\n\n}\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 47, "score": 174554.61086662288 }, { "content": "}\n\n\n\nimpl Str {\n\n /// An unescaped value of the string.\n\n pub fn value(&self) -> &str {\n\n self.inner.value.get_or_init(|| {\n\n self.inner\n\n .syntax\n\n .as_ref()\n\n .map(|s| match self.inner.repr {\n\n StrRepr::Basic => {\n\n let string = s.as_token().unwrap().text();\n\n let string = string.strip_prefix('\"').unwrap_or(string);\n\n let string = string.strip_suffix('\"').unwrap_or(string);\n\n match unescape(string) {\n\n Ok(s) => s,\n\n Err(_) => {\n\n self.inner.errors.update(|errors| {\n\n errors.push(Error::InvalidEscapeSequence { string: s.clone() })\n\n });\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 48, "score": 174553.90148947775 }, { "content": " fn validate_impl(&self) -> Result<(), &Shared<Vec<Error>>> {\n\n if self.errors().read().as_ref().is_empty() {\n\n Ok(())\n\n } else {\n\n Err(self.errors())\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum TableKind {\n\n Regular,\n\n Inline,\n\n Pseudo,\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) struct KeyInner {\n\n pub(crate) errors: Shared<Vec<Error>>,\n\n pub(crate) syntax: Option<SyntaxElement>,\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 49, "score": 174553.5691172 }, { "content": " pub(crate) header: bool,\n\n pub(crate) kind: TableKind,\n\n pub(crate) entries: Shared<Entries>,\n\n}\n\n\n\nwrap_node! {\n\n #[derive(Debug, Clone)]\n\n pub struct Table { inner: TableInner }\n\n}\n\n\n\nimpl Table {\n\n pub fn get(&self, key: impl Into<Key>) -> Option<Node> {\n\n let key = key.into();\n\n let entries = self.inner.entries.read();\n\n entries.lookup.get(&key).cloned()\n\n }\n\n\n\n pub fn entries(&self) -> &Shared<Entries> {\n\n &self.inner.entries\n\n }\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 50, "score": 174553.41181746894 }, { "content": "}\n\n\n\nimpl core::fmt::Display for Key {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n if let Some(s) = self.syntax() {\n\n return s.fmt(f);\n\n }\n\n\n\n if !matches!(\n\n Lexer::<SyntaxKind>::new(self.value()).next(),\n\n Some(SyntaxKind::IDENT) | None\n\n ) {\n\n f.write_char('\\'')?;\n\n self.value().fmt(f)?;\n\n f.write_char('\\'')?;\n\n return Ok(());\n\n }\n\n\n\n self.value().fmt(f)\n\n }\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 51, "score": 174553.07001067378 }, { "content": " #[derive(Debug, Clone)]\n\n pub struct Float { inner: FloatInner }\n\n}\n\n\n\nimpl Float {\n\n /// A float value.\n\n pub fn value(&self) -> f64 {\n\n *self.inner.value.get_or_init(|| {\n\n if let Some(text) = self.syntax().and_then(|s| s.as_token()).map(|s| s.text()) {\n\n text.replace('_', \"\").replace(\"nan\", \"NaN\").parse().unwrap()\n\n } else {\n\n 0_f64\n\n }\n\n })\n\n }\n\n\n\n fn validate_impl(&self) -> Result<(), &Shared<Vec<Error>>> {\n\n let _ = self.value();\n\n if self.errors().read().as_ref().is_empty() {\n\n Ok(())\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 52, "score": 174552.5849943937 }, { "content": "\n\n#[derive(Debug)]\n\npub(crate) struct IntegerInner {\n\n pub(crate) errors: Shared<Vec<Error>>,\n\n pub(crate) syntax: Option<SyntaxElement>,\n\n pub(crate) repr: IntegerRepr,\n\n pub(crate) value: OnceCell<IntegerValue>,\n\n}\n\n\n\nwrap_node! {\n\n #[derive(Debug, Clone)]\n\n pub struct Integer { inner: IntegerInner }\n\n}\n\n\n\nimpl Integer {\n\n /// An integer value.\n\n pub fn value(&self) -> IntegerValue {\n\n *self.inner.value.get_or_init(|| {\n\n if let Some(s) = self.syntax().and_then(|s| s.as_token()) {\n\n let int_text = s.text().replace('_', \"\");\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 53, "score": 174552.37665898394 }, { "content": "use super::{DomNode, Node};\n\nuse crate::{\n\n dom::{error::Error, Entries, KeyOrIndex, Keys},\n\n syntax::{SyntaxElement, SyntaxKind},\n\n util::{shared::Shared, unescape},\n\n};\n\nuse logos::Lexer;\n\nuse once_cell::unsync::OnceCell;\n\nuse rowan::{NodeOrToken, TextRange};\n\nuse std::{fmt::Write, iter::once, sync::Arc};\n\nuse time::macros::format_description;\n\n\n\nmacro_rules! wrap_node {\n\n (\n\n $(#[$attrs:meta])*\n\n $vis:vis struct $name:ident {\n\n inner: $inner:ident\n\n }\n\n ) => {\n\n $(#[$attrs])*\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 54, "score": 174550.78025744666 }, { "content": " errors: Default::default(),\n\n syntax: None,\n\n is_valid: true,\n\n value: OnceCell::from(key.into()),\n\n additional_syntaxes: Default::default(),\n\n }\n\n .wrap()\n\n }\n\n\n\n /// An unescaped value of the key.\n\n pub fn value(&self) -> &str {\n\n self.inner.value.get_or_init(|| {\n\n self.inner\n\n .syntax\n\n .as_ref()\n\n .and_then(NodeOrToken::as_token)\n\n .map(|s| {\n\n if s.text().starts_with('\\'') {\n\n let string = s.text();\n\n let string = string.strip_prefix('\\'').unwrap_or(string);\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 55, "score": 174550.62513233084 }, { "content": " self.value().hash(state)\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) struct ArrayInner {\n\n pub(crate) errors: Shared<Vec<Error>>,\n\n pub(crate) syntax: Option<SyntaxElement>,\n\n pub(crate) kind: ArrayKind,\n\n pub(crate) items: Shared<Vec<Node>>,\n\n}\n\n\n\nwrap_node! {\n\n #[derive(Debug, Clone)]\n\n pub struct Array { inner: ArrayInner }\n\n}\n\n\n\nimpl Array {\n\n pub fn items(&self) -> &Shared<Vec<Node>> {\n\n &self.inner.items\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 56, "score": 174550.47565383097 }, { "content": " } else {\n\n Err(self.errors())\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) struct DateTimeInner {\n\n pub(crate) errors: Shared<Vec<Error>>,\n\n pub(crate) syntax: Option<SyntaxElement>,\n\n pub(crate) value: OnceCell<DateTimeValue>,\n\n}\n\n\n\nwrap_node! {\n\n #[derive(Debug, Clone)]\n\n pub struct DateTime { inner: DateTimeInner }\n\n}\n\n\n\nimpl DateTime {\n\n pub fn value(&self) -> DateTimeValue {\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 57, "score": 174550.23440936036 }, { "content": " return Err(self.errors());\n\n }\n\n\n\n let _ = self.value();\n\n if self.errors().read().as_ref().is_empty() {\n\n Ok(())\n\n } else {\n\n Err(self.errors())\n\n }\n\n }\n\n\n\n pub fn join(&self, key: impl Into<KeyOrIndex>) -> Keys {\n\n Keys::new(once(self.clone().into()).chain(once(key.into())))\n\n }\n\n}\n\n\n\nimpl AsRef<str> for Key {\n\n fn as_ref(&self) -> &str {\n\n self.value()\n\n }\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 58, "score": 174549.1465009857 }, { "content": " DateTimeValue::OffsetDateTime(time::OffsetDateTime::UNIX_EPOCH)\n\n } else {\n\n DateTimeValue::OffsetDateTime(time::OffsetDateTime::UNIX_EPOCH)\n\n }\n\n })\n\n }\n\n\n\n fn validate_impl(&self) -> Result<(), &Shared<Vec<Error>>> {\n\n if self.errors().read().as_ref().is_empty() {\n\n Ok(())\n\n } else {\n\n Err(self.errors())\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, Ord, PartialOrd)]\n\npub enum DateTimeValue {\n\n OffsetDateTime(time::OffsetDateTime),\n\n LocalDateTime(time::PrimitiveDateTime),\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 59, "score": 174548.57352189996 }, { "content": " String::new()\n\n }\n\n }\n\n }\n\n StrRepr::Literal => {\n\n let string = s.as_token().unwrap().text();\n\n let string = string.strip_prefix('\\'').unwrap_or(string);\n\n let string = string.strip_suffix('\\'').unwrap_or(string);\n\n string.to_string()\n\n }\n\n StrRepr::MultiLine => {\n\n let string = s.as_token().unwrap().text();\n\n let string = string.strip_prefix(r#\"\"\"\"\"#).unwrap_or(string);\n\n let string = match string.strip_prefix(\"\\r\\n\") {\n\n Some(s) => s,\n\n None => string.strip_prefix('\\n').unwrap_or(string),\n\n };\n\n let string = string.strip_suffix(r#\"\"\"\"\"#).unwrap_or(string);\n\n match unescape(string) {\n\n Ok(s) => s,\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 60, "score": 174548.39505581433 }, { "content": " let string = string.strip_suffix('\\'').unwrap_or(string);\n\n string.to_string()\n\n } else if s.text().starts_with('\"') {\n\n let string = s.text();\n\n let string = string.strip_prefix('\"').unwrap_or(string);\n\n let string = string.strip_suffix('\"').unwrap_or(string);\n\n match unescape(string) {\n\n Ok(s) => s,\n\n Err(_) => {\n\n self.inner.errors.update(|errors| {\n\n errors.push(Error::InvalidEscapeSequence {\n\n string: s.clone().into(),\n\n })\n\n });\n\n String::new()\n\n }\n\n }\n\n } else {\n\n s.text().to_string()\n\n }\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 61, "score": 174548.16789584624 }, { "content": "}\n\n\n\nimpl PartialEq for Key {\n\n fn eq(&self, other: &Self) -> bool {\n\n if !self.inner.is_valid || !other.inner.is_valid {\n\n return false;\n\n }\n\n\n\n self.value().eq(other.value())\n\n }\n\n}\n\n\n\nimpl Eq for Key {}\n\n\n\nimpl std::hash::Hash for Key {\n\n fn hash<H: std::hash::Hasher>(&self, state: &mut H) {\n\n if !self.inner.is_valid {\n\n return 0.hash(state);\n\n }\n\n\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 62, "score": 174548.00153635128 }, { "content": " Err(_) => {\n\n self.inner.errors.update(|errors| {\n\n errors.push(Error::InvalidEscapeSequence { string: s.clone() })\n\n });\n\n String::new()\n\n }\n\n }\n\n }\n\n StrRepr::MultiLineLiteral => {\n\n let string = s.as_token().unwrap().text();\n\n let string = string.strip_prefix(r#\"'''\"#).unwrap_or(string);\n\n let string = match string.strip_prefix(\"\\r\\n\") {\n\n Some(s) => s,\n\n None => string.strip_prefix('\\n').unwrap_or(string),\n\n };\n\n let string = string.strip_suffix(r#\"'''\"#).unwrap_or(string);\n\n string.to_string()\n\n }\n\n })\n\n .unwrap_or_default()\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 63, "score": 174547.66758027038 }, { "content": " *self.inner.value.get_or_init(|| {\n\n if let Some(token) = self.syntax().and_then(|s| s.as_token()) {\n\n let mut text = token.text().to_string();\n\n\n\n // SAFETY: we're replacing single-byte characters.\n\n unsafe {\n\n for b in text.as_bytes_mut() {\n\n if *b == b' ' || *b == b't' {\n\n *b = b'T';\n\n } else if *b == b'z' {\n\n *b = b'Z';\n\n } else if *b == b',' {\n\n *b = b'.';\n\n }\n\n }\n\n }\n\n\n\n match token.kind() {\n\n SyntaxKind::DATE_TIME_OFFSET => {\n\n if let Ok(d) = time::OffsetDateTime::parse(\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 64, "score": 174546.95395113924 }, { "content": " })\n\n .unwrap_or_default()\n\n })\n\n }\n\n\n\n pub fn text_ranges(&self) -> impl ExactSizeIterator<Item = TextRange> {\n\n let additional_syntaxes = self.inner.additional_syntaxes.read();\n\n\n\n let mut ranges = Vec::with_capacity(1 + additional_syntaxes.len());\n\n if let Some(s) = self.syntax() {\n\n ranges.push(s.text_range());\n\n }\n\n\n\n ranges.extend(additional_syntaxes.iter().map(|s| s.text_range()));\n\n\n\n ranges.into_iter()\n\n }\n\n\n\n fn validate_impl(&self) -> Result<(), &Shared<Vec<Error>>> {\n\n if !self.inner.is_valid {\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 65, "score": 174545.8683261208 }, { "content": "\n\n pub fn kind(&self) -> TableKind {\n\n self.inner.kind\n\n }\n\n\n\n /// Add an entry and also collect errors on conflicts.\n\n pub(crate) fn add_entry(&self, key: Key, node: Node) {\n\n self.inner.entries.update(|entries| {\n\n if let Some((existing_key, value)) = entries.lookup.get_key_value(&key) {\n\n // Merge the two pseudo-tables together.\n\n if let (Node::Table(existing_table), Node::Table(new_table)) = (value, &node) {\n\n if existing_table.inner.kind == TableKind::Pseudo\n\n && new_table.inner.kind == TableKind::Pseudo\n\n {\n\n let new_entries = new_table.entries().read();\n\n for (k, n) in new_entries.iter() {\n\n if let Some(additional_syntax) = k.syntax() {\n\n existing_key.inner.additional_syntaxes.update(|syntaxes| {\n\n syntaxes.push(additional_syntax.clone());\n\n });\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 66, "score": 174545.6241084465 }, { "content": "\n\n match self.inner.repr {\n\n IntegerRepr::Dec => {\n\n if s.text().starts_with('-') {\n\n IntegerValue::Negative(int_text.parse().unwrap_or_default())\n\n } else {\n\n IntegerValue::Positive(int_text.parse().unwrap_or_default())\n\n }\n\n }\n\n IntegerRepr::Bin => IntegerValue::Positive(\n\n u64::from_str_radix(int_text.trim_start_matches(\"0b\"), 2)\n\n .unwrap_or_default(),\n\n ),\n\n IntegerRepr::Oct => IntegerValue::Positive(\n\n u64::from_str_radix(int_text.trim_start_matches(\"0o\"), 8)\n\n .unwrap_or_default(),\n\n ),\n\n IntegerRepr::Hex => IntegerValue::Positive(\n\n u64::from_str_radix(int_text.trim_start_matches(\"0x\"), 16)\n\n .unwrap_or_default(),\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 67, "score": 174543.09168714358 }, { "content": " Date(time::Date),\n\n Time(time::Time),\n\n}\n\n\n\nimpl core::fmt::Display for DateTimeValue {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n DateTimeValue::OffsetDateTime(dt) => dt\n\n .format(&time::format_description::well_known::Rfc3339)\n\n .unwrap()\n\n .fmt(f),\n\n DateTimeValue::LocalDateTime(dt) => dt\n\n .format(if dt.time().nanosecond() > 0 {\n\n &format_description!(\n\n \"[year]-[month]-[day]T[hour]:[minute]:[second].[subsecond]\"\n\n )\n\n } else {\n\n &format_description!(\"[year]-[month]-[day]T[hour]:[minute]:[second]\")\n\n })\n\n .unwrap()\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 68, "score": 174542.84134735318 }, { "content": " #[allow(dead_code)]\n\n pub(crate) fn wrap(self) -> $name {\n\n self.into()\n\n }\n\n }\n\n\n\n impl From<$inner> for $name {\n\n fn from(inner: $inner) -> $name {\n\n $name {\n\n inner: Arc::new(inner)\n\n }\n\n }\n\n }\n\n };\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) struct TableInner {\n\n pub(crate) errors: Shared<Vec<Error>>,\n\n pub(crate) syntax: Option<SyntaxElement>,\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 69, "score": 174541.2524576936 }, { "content": " }\n\n\n\n existing_table.add_entry(k.clone(), n.clone());\n\n }\n\n return;\n\n }\n\n }\n\n\n\n self.inner.errors.update(|errors| {\n\n errors.push(Error::ConflictingKeys {\n\n key: key.clone(),\n\n other: existing_key.clone(),\n\n })\n\n });\n\n }\n\n\n\n entries.add(key, node);\n\n });\n\n }\n\n\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 70, "score": 174540.19069400942 }, { "content": " if let Ok(d) =\n\n time::Date::parse(&text, &format_description!(\"[year]-[month]-[day]\"))\n\n {\n\n return DateTimeValue::Date(d);\n\n }\n\n }\n\n SyntaxKind::TIME => {\n\n let desc = if text.contains('.') {\n\n format_description!(\"[hour]:[minute]:[second].[subsecond]\")\n\n } else {\n\n format_description!(\"[hour]:[minute]:[second]\")\n\n };\n\n\n\n if let Ok(d) = time::Time::parse(&text, &desc) {\n\n return DateTimeValue::Time(d);\n\n }\n\n }\n\n _ => {}\n\n }\n\n\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 71, "score": 174536.98720668402 }, { "content": " .fmt(f),\n\n DateTimeValue::Date(date) => date\n\n .format(&format_description!(\"[year]-[month]-[day]\"))\n\n .unwrap()\n\n .fmt(f),\n\n DateTimeValue::Time(time) => time\n\n .format(if time.nanosecond() > 0 {\n\n &format_description!(\"[hour]:[minute]:[second].[subsecond]\")\n\n } else {\n\n &format_description!(\"[hour]:[minute]:[second]\")\n\n })\n\n .unwrap()\n\n .fmt(f),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) struct InvalidInner {\n\n pub(crate) errors: Shared<Vec<Error>>,\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 72, "score": 174536.7227083186 }, { "content": " &text,\n\n &time::format_description::well_known::Rfc3339,\n\n ) {\n\n return DateTimeValue::OffsetDateTime(d);\n\n }\n\n }\n\n SyntaxKind::DATE_TIME_LOCAL => {\n\n let desc = if text.contains('.') {\n\n format_description!(\n\n \"[year]-[month]-[day]T[hour]:[minute]:[second].[subsecond]\"\n\n )\n\n } else {\n\n format_description!(\"[year]-[month]-[day]T[hour]:[minute]:[second]\")\n\n };\n\n\n\n if let Ok(d) = time::PrimitiveDateTime::parse(&text, &desc) {\n\n return DateTimeValue::LocalDateTime(d);\n\n }\n\n }\n\n SyntaxKind::DATE => {\n", "file_path": "crates/taplo/src/dom/node/nodes.rs", "rank": 73, "score": 174535.4901489827 }, { "content": "fn format_entry(node: SyntaxNode, options: &Options, context: &Context) -> FormattedEntry {\n\n let mut key = String::new();\n\n let mut value = String::new();\n\n let mut comment = None;\n\n\n\n for c in node.children_with_tokens() {\n\n match c {\n\n NodeOrToken::Node(n) => match n.kind() {\n\n KEY => {\n\n format_key(n, &mut key, options, context);\n\n }\n\n VALUE => {\n\n let val = format_value(n, options, context);\n\n let c = val.trailing_comment();\n\n\n\n if c.is_some() {\n\n debug_assert!(comment.is_none());\n\n comment = c;\n\n }\n\n\n", "file_path": "crates/taplo/src/formatter/mod.rs", "rank": 74, "score": 174086.4802018781 }, { "content": "#[must_use]\n\npub fn create_server<E: Environment>() -> Server<World<E>> {\n\n Server::new()\n\n .on_request::<request::Initialize, _>(handlers::initialize)\n\n .on_request::<request::FoldingRangeRequest, _>(handlers::folding_ranges)\n\n .on_request::<request::DocumentSymbolRequest, _>(handlers::document_symbols)\n\n .on_request::<request::Formatting, _>(handlers::format)\n\n .on_request::<request::Completion, _>(handlers::completion)\n\n .on_request::<request::HoverRequest, _>(handlers::hover)\n\n .on_request::<request::DocumentLinkRequest, _>(handlers::links)\n\n .on_request::<request::SemanticTokensFullRequest, _>(handlers::semantic_tokens)\n\n .on_request::<request::PrepareRenameRequest, _>(handlers::prepare_rename)\n\n .on_request::<request::Rename, _>(handlers::rename)\n\n // .on_request::<msg_ext::TomlToJsonRequest, _>(handlers::toml_to_json)\n\n // .on_request::<msg_ext::JsonToTomlRequest, _>(handlers::json_to_toml)\n\n .on_notification::<notification::DidOpenTextDocument, _>(handlers::document_open)\n\n .on_notification::<notification::DidChangeTextDocument, _>(handlers::document_change)\n\n .on_notification::<notification::DidCloseTextDocument, _>(handlers::document_close)\n\n .on_notification::<notification::DidChangeConfiguration, _>(handlers::configuration_change)\n\n .on_notification::<notification::DidChangeWorkspaceFolders, _>(handlers::workspace_change)\n\n .on_request::<lsp_ext::request::ListSchemasRequest, _>(handlers::list_schemas)\n\n .on_request::<lsp_ext::request::AssociatedSchemaRequest, _>(handlers::associated_schema)\n\n .on_notification::<lsp_ext::notification::AssociateSchema, _>(handlers::associate_schema)\n\n .build()\n\n}\n\n\n", "file_path": "crates/taplo-lsp/src/lib.rs", "rank": 75, "score": 161494.5956033833 }, { "content": " /// [`Integer`]: Node::Integer\n\n pub fn is_integer(&self) -> bool {\n\n matches!(self, Self::Integer(..))\n\n }\n\n\n\n /// Returns `true` if the node is [`Float`].\n\n ///\n\n /// [`Float`]: Node::Float\n\n pub fn is_float(&self) -> bool {\n\n matches!(self, Self::Float(..))\n\n }\n\n\n\n /// Returns `true` if the node is [`Date`].\n\n ///\n\n /// [`Date`]: Node::Date\n\n pub fn is_date(&self) -> bool {\n\n matches!(self, Self::Date(..))\n\n }\n\n\n\n /// Returns `true` if the node is [`Invalid`].\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 76, "score": 158376.85508075182 }, { "content": " pub fn is_array(&self) -> bool {\n\n matches!(self, Self::Array(..))\n\n }\n\n\n\n /// Returns `true` if the node is [`Bool`].\n\n ///\n\n /// [`Bool`]: Node::Bool\n\n pub fn is_bool(&self) -> bool {\n\n matches!(self, Self::Bool(..))\n\n }\n\n\n\n /// Returns `true` if the node is [`Str`].\n\n ///\n\n /// [`Str`]: Node::Str\n\n pub fn is_str(&self) -> bool {\n\n matches!(self, Self::Str(..))\n\n }\n\n\n\n /// Returns `true` if the node is [`Integer`].\n\n ///\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 77, "score": 158370.28931991625 }, { "content": " Node::Invalid(v) => {\n\n if let Err(errs) = v.validate_node() {\n\n errors.extend(errs.read().as_ref().iter().cloned())\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Node {\n\n /// Returns `true` if the node is [`Table`].\n\n ///\n\n /// [`Table`]: Node::Table\n\n pub fn is_table(&self) -> bool {\n\n matches!(self, Self::Table(..))\n\n }\n\n\n\n /// Returns `true` if the node is [`Array`].\n\n ///\n\n /// [`Array`]: Node::Array\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 78, "score": 158368.0175069584 }, { "content": "\n\nimpl From<Float> for Node {\n\n fn from(v: Float) -> Self {\n\n Self::Float(v)\n\n }\n\n}\n\n\n\nimpl From<Integer> for Node {\n\n fn from(v: Integer) -> Self {\n\n Self::Integer(v)\n\n }\n\n}\n\n\n\nimpl From<Str> for Node {\n\n fn from(v: Str) -> Self {\n\n Self::Str(v)\n\n }\n\n}\n\n\n\nimpl From<Bool> for Node {\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 79, "score": 158367.0628041833 }, { "content": " }\n\n\n\n pub fn try_into_integer(self) -> Result<Integer, Self> {\n\n if let Self::Integer(v) = self {\n\n Ok(v)\n\n } else {\n\n Err(self)\n\n }\n\n }\n\n\n\n pub fn try_into_float(self) -> Result<Float, Self> {\n\n if let Self::Float(v) = self {\n\n Ok(v)\n\n } else {\n\n Err(self)\n\n }\n\n }\n\n\n\n pub fn try_into_date(self) -> Result<DateTime, Self> {\n\n if let Self::Date(v) = self {\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 80, "score": 158366.29137499668 }, { "content": " _ => {}\n\n }\n\n\n\n Ok(matched.into_iter())\n\n }\n\n\n\n /// Validate the node and then all children recursively.\n\n pub fn validate(&self) -> Result<(), impl Iterator<Item = Error> + core::fmt::Debug> {\n\n let mut errors = Vec::new();\n\n self.validate_all_impl(&mut errors);\n\n if errors.is_empty() {\n\n Ok(())\n\n } else {\n\n Err(errors.into_iter())\n\n }\n\n }\n\n\n\n pub fn flat_iter(&self) -> impl DoubleEndedIterator<Item = (Keys, Node)> {\n\n let mut all = Vec::new();\n\n\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 81, "score": 158365.60272465335 }, { "content": " Ok(v)\n\n } else {\n\n Err(self)\n\n }\n\n }\n\n\n\n pub fn try_into_invalid(self) -> Result<Invalid, Self> {\n\n if let Self::Invalid(v) = self {\n\n Ok(v)\n\n } else {\n\n Err(self)\n\n }\n\n }\n\n}\n\n\n\nimpl From<DateTime> for Node {\n\n fn from(v: DateTime) -> Self {\n\n Self::Date(v)\n\n }\n\n}\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 82, "score": 158365.04137267353 }, { "content": "\n\nimpl Sealed for Node {}\n\nimpl DomNode for Node {\n\n fn syntax(&self) -> Option<&SyntaxElement> {\n\n match self {\n\n Node::Table(n) => n.syntax(),\n\n Node::Array(n) => n.syntax(),\n\n Node::Bool(n) => n.syntax(),\n\n Node::Str(n) => n.syntax(),\n\n Node::Integer(n) => n.syntax(),\n\n Node::Float(n) => n.syntax(),\n\n Node::Date(n) => n.syntax(),\n\n Node::Invalid(n) => n.syntax(),\n\n }\n\n }\n\n\n\n fn errors(&self) -> &Shared<Vec<Error>> {\n\n match self {\n\n Node::Table(n) => n.errors(),\n\n Node::Array(n) => n.errors(),\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 83, "score": 158362.66439617434 }, { "content": " Ok(v)\n\n } else {\n\n Err(self)\n\n }\n\n }\n\n\n\n pub fn try_into_bool(self) -> Result<Bool, Self> {\n\n if let Self::Bool(v) = self {\n\n Ok(v)\n\n } else {\n\n Err(self)\n\n }\n\n }\n\n\n\n pub fn try_into_str(self) -> Result<Str, Self> {\n\n if let Self::Str(v) = self {\n\n Ok(v)\n\n } else {\n\n Err(self)\n\n }\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 84, "score": 158361.50641542088 }, { "content": " InvalidInner {\n\n errors: Shared::from(Vec::from([Error::Query(QueryError::NotFound)])),\n\n syntax: None,\n\n }\n\n .wrap(),\n\n )\n\n })\n\n }\n\n\n\n pub fn try_get(&self, idx: impl Index) -> Result<Node, Error> {\n\n idx.index_into(self)\n\n .ok_or(Error::Query(QueryError::NotFound))\n\n }\n\n\n\n pub fn get_matches(\n\n &self,\n\n pattern: &str,\n\n ) -> Result<impl Iterator<Item = (KeyOrIndex, Node)> + ExactSizeIterator, Error> {\n\n let glob = globset::Glob::new(pattern)\n\n .map_err(QueryError::from)?\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 85, "score": 158361.25332671235 }, { "content": " ///\n\n /// [`Invalid`]: Node::Invalid\n\n pub fn is_invalid(&self) -> bool {\n\n matches!(self, Self::Invalid(..))\n\n }\n\n\n\n pub fn as_table(&self) -> Option<&Table> {\n\n if let Self::Table(v) = self {\n\n Some(v)\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n pub fn as_array(&self) -> Option<&Array> {\n\n if let Self::Array(v) = self {\n\n Some(v)\n\n } else {\n\n None\n\n }\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 86, "score": 158360.51440116426 }, { "content": " Node::Bool(n) => n.errors(),\n\n Node::Str(n) => n.errors(),\n\n Node::Integer(n) => n.errors(),\n\n Node::Float(n) => n.errors(),\n\n Node::Date(n) => n.errors(),\n\n Node::Invalid(n) => n.errors(),\n\n }\n\n }\n\n\n\n fn validate_node(&self) -> Result<(), &Shared<Vec<Error>>> {\n\n match self {\n\n Node::Table(n) => n.validate_node(),\n\n Node::Array(n) => n.validate_node(),\n\n Node::Bool(n) => n.validate_node(),\n\n Node::Str(n) => n.validate_node(),\n\n Node::Integer(n) => n.validate_node(),\n\n Node::Float(n) => n.validate_node(),\n\n Node::Date(n) => n.validate_node(),\n\n Node::Invalid(n) => n.validate_node(),\n\n }\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 87, "score": 158359.92744274266 }, { "content": " if let Some(err) = err {\n\n return Err(err);\n\n }\n\n\n\n Ok(all.into_iter())\n\n }\n\n\n\n pub fn text_ranges(&self) -> impl ExactSizeIterator<Item = TextRange> {\n\n let mut ranges = Vec::with_capacity(1);\n\n\n\n match self {\n\n Node::Table(v) => {\n\n let entries = v.entries().read();\n\n\n\n for (k, entry) in entries.iter() {\n\n ranges.extend(k.text_ranges());\n\n ranges.extend(entry.text_ranges());\n\n }\n\n\n\n if let Some(mut r) = v.syntax().map(|s| s.text_range()) {\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 88, "score": 158358.02843270267 }, { "content": " &self,\n\n keys: Keys,\n\n include_children: bool,\n\n ) -> Result<impl Iterator<Item = (Keys, Node)> + ExactSizeIterator, Error> {\n\n let mut all = self.flat_iter_impl();\n\n\n\n let mut err: Option<Error> = None;\n\n\n\n all.retain(|(k, _)| {\n\n if k.len() < keys.len() {\n\n return false;\n\n }\n\n\n\n let search_keys = keys.clone();\n\n let keys = k.clone();\n\n\n\n for (search_key, key) in search_keys.iter().zip(keys.iter()) {\n\n match search_key {\n\n KeyOrIndex::Key(search_key) => {\n\n let glob = match globset::Glob::new(search_key.value()) {\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 89, "score": 158357.92768301073 }, { "content": " }\n\n}\n\n\n\nimpl Node {\n\n pub fn path(&self, keys: &Keys) -> Option<Node> {\n\n let mut node = self.clone();\n\n for key in keys.iter() {\n\n node = node.get(key);\n\n }\n\n\n\n if node.is_invalid() {\n\n None\n\n } else {\n\n Some(node)\n\n }\n\n }\n\n\n\n pub fn get(&self, idx: impl Index) -> Node {\n\n idx.index_into(self).unwrap_or_else(|| {\n\n Node::from(\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 90, "score": 158357.5438754765 }, { "content": " }\n\n\n\n pub fn as_invalid(&self) -> Option<&Invalid> {\n\n if let Self::Invalid(v) = self {\n\n Some(v)\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n pub fn try_into_table(self) -> Result<Table, Self> {\n\n if let Self::Table(v) = self {\n\n Ok(v)\n\n } else {\n\n Err(self)\n\n }\n\n }\n\n\n\n pub fn try_into_array(self) -> Result<Array, Self> {\n\n if let Self::Array(v) = self {\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 91, "score": 158357.1176222075 }, { "content": " }\n\n\n\n pub fn as_bool(&self) -> Option<&Bool> {\n\n if let Self::Bool(v) = self {\n\n Some(v)\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n pub fn as_str(&self) -> Option<&Str> {\n\n if let Self::Str(v) = self {\n\n Some(v)\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n pub fn as_integer(&self) -> Option<&Integer> {\n\n if let Self::Integer(v) = self {\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 92, "score": 158356.4329216796 }, { "content": " fn from(v: Bool) -> Self {\n\n Self::Bool(v)\n\n }\n\n}\n\n\n\nimpl From<Array> for Node {\n\n fn from(v: Array) -> Self {\n\n Self::Array(v)\n\n }\n\n}\n\n\n\nimpl From<Table> for Node {\n\n fn from(v: Table) -> Self {\n\n Self::Table(v)\n\n }\n\n}\n\n\n\nimpl From<Invalid> for Node {\n\n fn from(v: Invalid) -> Self {\n\n Self::Invalid(v)\n\n }\n\n}\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 93, "score": 158356.42796346117 }, { "content": " }\n\n }\n\n _ => {\n\n all.push((parent, self.clone()));\n\n }\n\n }\n\n }\n\n\n\n fn validate_all_impl(&self, errors: &mut Vec<Error>) {\n\n match self {\n\n Node::Table(v) => {\n\n if let Err(errs) = v.validate_node() {\n\n errors.extend(errs.read().as_ref().iter().cloned())\n\n }\n\n\n\n let items = v.inner.entries.read();\n\n for (k, entry) in items.as_ref().all.iter() {\n\n if let Err(errs) = k.validate_node() {\n\n errors.extend(errs.read().as_ref().iter().cloned())\n\n }\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 94, "score": 158354.55152484615 }, { "content": " }\n\n Node::Bool(v) => ranges.push(v.syntax().map(|s| s.text_range()).unwrap_or_default()),\n\n Node::Str(v) => ranges.push(v.syntax().map(|s| s.text_range()).unwrap_or_default()),\n\n Node::Integer(v) => ranges.push(v.syntax().map(|s| s.text_range()).unwrap_or_default()),\n\n Node::Float(v) => ranges.push(v.syntax().map(|s| s.text_range()).unwrap_or_default()),\n\n Node::Date(v) => ranges.push(v.syntax().map(|s| s.text_range()).unwrap_or_default()),\n\n Node::Invalid(v) => ranges.push(v.syntax().map(|s| s.text_range()).unwrap_or_default()),\n\n }\n\n\n\n ranges.into_iter()\n\n }\n\n\n\n /// All the comments in the tree, including header comments returned from [`Self::header_comments`].\n\n pub fn comments(&self) -> impl Iterator<Item = Comment> {\n\n if let Some(syntax) = self.syntax().cloned().and_then(|s| s.into_node()) {\n\n Either::Left(\n\n syntax\n\n .descendants_with_tokens()\n\n .filter(|t| t.kind() == SyntaxKind::COMMENT)\n\n .map(Comment::from_syntax),\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 95, "score": 158353.24277685993 }, { "content": " Node::Str(v) => {\n\n if let Err(errs) = v.validate_node() {\n\n errors.extend(errs.read().as_ref().iter().cloned())\n\n }\n\n }\n\n Node::Integer(v) => {\n\n if let Err(errs) = v.validate_node() {\n\n errors.extend(errs.read().as_ref().iter().cloned())\n\n }\n\n }\n\n Node::Float(v) => {\n\n if let Err(errs) = v.validate_node() {\n\n errors.extend(errs.read().as_ref().iter().cloned())\n\n }\n\n }\n\n Node::Date(v) => {\n\n if let Err(errs) = v.validate_node() {\n\n errors.extend(errs.read().as_ref().iter().cloned())\n\n }\n\n }\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 96, "score": 158353.11696644445 }, { "content": " None => Either::Right(self.comments()),\n\n }\n\n }\n\n\n\n fn flat_iter_impl(&self) -> Vec<(Keys, Node)> {\n\n let mut all = Vec::new();\n\n\n\n match self {\n\n Node::Table(t) => {\n\n let entries = t.inner.entries.read();\n\n for (key, entry) in &entries.all {\n\n entry.collect_flat(Keys::from(key.clone()), &mut all);\n\n }\n\n }\n\n Node::Array(arr) => {\n\n let items = arr.inner.items.read();\n\n for (idx, item) in items.iter().enumerate() {\n\n item.collect_flat(Keys::from(idx), &mut all);\n\n }\n\n }\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 97, "score": 158352.7932803701 }, { "content": " entry.validate_all_impl(errors);\n\n }\n\n }\n\n Node::Array(v) => {\n\n if let Err(errs) = v.validate_node() {\n\n errors.extend(errs.read().as_ref().iter().cloned())\n\n }\n\n\n\n let items = v.inner.items.read();\n\n for item in &**items.as_ref() {\n\n if let Err(errs) = item.validate_node() {\n\n errors.extend(errs.read().as_ref().iter().cloned())\n\n }\n\n }\n\n }\n\n Node::Bool(v) => {\n\n if let Err(errs) = v.validate_node() {\n\n errors.extend(errs.read().as_ref().iter().cloned())\n\n }\n\n }\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 98, "score": 158352.23007554002 }, { "content": " .compile_matcher();\n\n let mut matched = Vec::new();\n\n\n\n match self {\n\n Node::Table(t) => {\n\n let entries = t.entries().read();\n\n for (key, node) in entries.iter() {\n\n if glob.is_match(pattern) {\n\n matched.push((KeyOrIndex::from(key.clone()), node.clone()));\n\n }\n\n }\n\n }\n\n Node::Array(arr) => {\n\n let items = arr.items().read();\n\n for (idx, node) in items.iter().enumerate() {\n\n if glob.is_match(&idx.to_string()) {\n\n matched.push((KeyOrIndex::from(idx), node.clone()));\n\n }\n\n }\n\n }\n", "file_path": "crates/taplo/src/dom/node.rs", "rank": 99, "score": 158352.01714119618 } ]
Rust
midi/src/message/system_common.rs
alisomay/koto_midi
7cf4579bb8d3c6c8bd4f29d7b101b4d2e8501a08
use crate::impl_midi_message; use crate::Category; use crate::MidiMessage; #[derive(Debug)] pub struct SystemExclusive { bytes: Vec<u8>, pub manufacturer_id: Vec<u8>, pub category: Category, } impl SystemExclusive { pub fn new(manufacturer_id: &[u8], message_content: &[u8]) -> Self { let mut message = message_content.to_vec(); message.insert(0, 0xF0); if manufacturer_id.len() == 1 { message.insert(1, manufacturer_id[0]); } else { message.insert(1, manufacturer_id[0]); message.insert(2, manufacturer_id[1]); message.insert(3, manufacturer_id[2]); } message.push(0xF7); Self { bytes: message, manufacturer_id: manufacturer_id.to_vec(), category: Category::SystemCommon, } } } impl From<&[u8]> for SystemExclusive { fn from(raw_bytes: &[u8]) -> Self { let mut manufacturer_id: Vec<u8> = vec![]; if raw_bytes[1] != 0 { manufacturer_id.push(raw_bytes[1]); } else { manufacturer_id.push(raw_bytes[1]); manufacturer_id.push(raw_bytes[2]); manufacturer_id.push(raw_bytes[3]); } SystemExclusive { bytes: raw_bytes.to_vec(), manufacturer_id, category: Category::SystemCommon, } } } impl Default for SystemExclusive { fn default() -> Self { Self { bytes: vec![0xF0, 0x01, 0x0, 0x0, 0xF7], manufacturer_id: vec![0x01], category: Category::SystemCommon, } } } #[derive(Debug)] pub struct TimeCodeQuarterFrame { bytes: [u8; 2], message_type: u8, values: u8, pub category: Category, } impl TimeCodeQuarterFrame { pub fn new(message_type: u64, values: u64) -> Self { Self { bytes: [ 0xF1, (message_type.min(7) << 4) as u8 | values.min(15) as u8, ], message_type: message_type.min(7) as u8, values: values.min(15) as u8, category: Category::SystemCommon, } } pub fn message_type(&self) -> u8 { self.message_type } pub fn values(&self) -> u8 { self.values } pub fn change_message_type(&mut self, message_type: u8) { self.message_type = message_type; self.bytes[1] = (self.message_type.min(7) << 4) | self.values.min(15); } pub fn change_values(&mut self, values: u8) { self.values = values; self.bytes[1] = (self.message_type.min(7) << 4) | self.values.min(15); } } impl From<&[u8]> for TimeCodeQuarterFrame { fn from(raw_bytes: &[u8]) -> Self { TimeCodeQuarterFrame { bytes: [raw_bytes[0], raw_bytes[1]], message_type: (raw_bytes[1] & 0b0111_0000) >> 4, values: raw_bytes[1] & 0b0000_1111, category: Category::SystemCommon, } } } impl Default for TimeCodeQuarterFrame { fn default() -> Self { todo!() } } #[derive(Debug)] pub struct SongPosition { bytes: [u8; 3], midi_beats_elapsed: u16, pub category: Category, } impl SongPosition { pub fn new(midi_beats_elapsed: u64) -> Self { let midi_beats_elapsed = midi_beats_elapsed.min(16383) as u16; let msb = ((midi_beats_elapsed >> 7) as u8) & 0b0111_1111; let lsb = (midi_beats_elapsed as u8) & 0b0111_1111; Self { bytes: [0xF2, lsb.min(127) as u8, msb.min(127) as u8], midi_beats_elapsed, category: Category::SystemCommon, } } pub fn midi_beats_elapsed(&self) -> u16 { self.midi_beats_elapsed } pub fn change_midi_beats_elapsed(&mut self, midi_beats_elapsed: u16) { self.midi_beats_elapsed = midi_beats_elapsed.min(16383); let msb = ((midi_beats_elapsed >> 7) as u8) & 0b0111_1111; let lsb = (midi_beats_elapsed as u8) & 0b0111_1111; self.bytes[1] = lsb; self.bytes[2] = msb; } } impl From<&[u8]> for SongPosition { fn from(raw_bytes: &[u8]) -> Self { let midi_beats_elapsed = ((raw_bytes[2] as u16) << 7) | raw_bytes[1] as u16; SongPosition { bytes: [raw_bytes[0], raw_bytes[1], raw_bytes[2]], midi_beats_elapsed, category: Category::SystemCommon, } } } impl Default for SongPosition { fn default() -> Self { Self { bytes: [0xF2, 0, 0], midi_beats_elapsed: 0, category: Category::SystemCommon, } } } #[derive(Debug)] pub struct SongSelect { bytes: [u8; 2], number: u8, pub category: Category, } impl SongSelect { pub fn new(number: u64) -> Self { Self { bytes: [0xF3, number.min(127) as u8], number: number.min(127) as u8, category: Category::SystemCommon, } } pub fn number(&self) -> u8 { self.number } } impl From<&[u8]> for SongSelect { fn from(raw_bytes: &[u8]) -> Self { SongSelect { bytes: [raw_bytes[0], raw_bytes[1]], number: raw_bytes[1], category: Category::SystemCommon, } } } impl Default for SongSelect { fn default() -> Self { Self { bytes: [0xF3, 0], number: 0, category: Category::SystemCommon, } } } #[derive(Debug)] pub struct TuneRequest { bytes: [u8; 1], pub category: Category, } impl Default for TuneRequest { fn default() -> Self { TuneRequest { bytes: [0xF6], category: Category::SystemCommon, } } } impl TuneRequest { pub fn new() -> Self { TuneRequest::default() } } impl From<&[u8]> for TuneRequest { fn from(raw_bytes: &[u8]) -> Self { TuneRequest { bytes: [raw_bytes[0]], category: Category::SystemCommon, } } } #[derive(Debug)] pub struct EndOfExclusive { bytes: [u8; 1], pub category: Category, } impl Default for EndOfExclusive { fn default() -> Self { EndOfExclusive { bytes: [0xF7], category: Category::SystemCommon, } } } impl EndOfExclusive { pub fn new() -> Self { EndOfExclusive::default() } } impl From<&[u8]> for EndOfExclusive { fn from(raw_bytes: &[u8]) -> Self { EndOfExclusive { bytes: [raw_bytes[0]], category: Category::SystemCommon, } } } impl_midi_message!(SystemExclusive); impl_midi_message!(TimeCodeQuarterFrame); impl_midi_message!(SongPosition); impl_midi_message!(SongSelect); impl_midi_message!(TuneRequest); impl_midi_message!(EndOfExclusive);
use crate::impl_midi_message; use crate::Category; use crate::MidiMessage; #[derive(Debug)] pub struct SystemExclusive { bytes: Vec<u8>, pub manufacturer_id: Vec<u8>, pub category: Category, } impl SystemExclusive { pub fn new(manufacturer_id: &[u8], message_content: &[u8]) -> Self { let mut message = message_content.to_vec(); message.insert(0, 0xF0); if manufacturer_id.len() == 1 { message.insert(1, manufacturer_id[0]); } else { message.insert(1, manufacturer_id[0]); message.insert(2, manufacturer_id[1]); message.insert(3, manufacturer_id[2]); } message.push(0xF7); Self { bytes: message, manufacturer_id: manufacturer_id.to_vec(), category: Category::SystemCommon, } } } impl From<&[u8]> for SystemExclusive { fn from(raw_bytes: &[u8]) -> Self { let mut manufacturer_id: Vec<u8> = vec![]; if raw_bytes[1] != 0 { manufacturer_id.push(raw_bytes[1]); } else { manufacturer_id.push(raw_bytes[1]); manufacturer_id.push(raw_bytes[2]); manufacturer_id.push(raw_bytes[3]); } SystemExclusive { bytes: raw_bytes.to_vec(), manufacturer_id, category: Category::SystemCommon, } } } impl Default for SystemExclusive { fn default() -> Self { Self { bytes: vec![0xF0, 0x01, 0x0, 0x0, 0xF7], manufacturer_id: vec![0x01], category: Category::SystemCommon, } } } #[derive(Debug)] pub struct TimeCodeQuarterFrame { bytes: [u8; 2], message_type: u8, values: u8, pub category: Category, } impl TimeCodeQuarterFrame { pub fn new(message_type: u64, values: u64) -> Self { Self { bytes: [ 0xF1, (message_type.min(7) << 4) as u8 | values.min(15) as u8, ], message_type: message_type.min(7) as u8, values: values.min(15) as u8, category: Category::SystemCommon, } } pub fn message_type(&self) -> u8 { self.message_type } pub fn values(&self) -> u8 { self.values } pub fn change_message_type(&mut self, message_type: u8) { self.message_type = message_type; self.bytes[1] = (self.message_type.min(7) << 4) | self.values.min(15); } pub fn change_values(&mut self, values: u8) { self.values = values; self.bytes[1] = (self.message_type.min(7) << 4) | self.values.min(15); } } impl From<&[u8]> for TimeCodeQuarterFrame { fn from(raw_bytes: &[u8]) -> Self { TimeCodeQuarterFrame { bytes: [raw_bytes[0], raw_bytes[1]], message_type: (raw_bytes[1] & 0b0111_0000) >> 4, values: raw_bytes[1] & 0b0000_1111, category: Category::SystemCommon, } } } impl Default for TimeCodeQuarterFrame { fn default() -> Self { todo!() } } #[derive(Debug)] pub struct SongPosition { bytes: [u8; 3], midi_beats_elapsed: u16, pub category: Category, } impl SongPosition { pub fn new(midi_beats_elapsed: u64) -> Self { let midi_beats_elapsed = midi_beats_elapsed.min(16383) as u16; let msb = ((midi_beats_elapsed >> 7) as u8) & 0b0111_1111; let lsb = (midi_beats_elapsed as u8) & 0b0111_1111; Self { bytes: [0xF2, lsb.min(127) as u8, msb.min(127) as u8], midi_beats_elapsed, category: Category::SystemCommon, } } pub fn midi_beats_elapsed(&self) -> u16 { self.midi_beats_elapsed } pub fn change_midi_beats_elapsed(&mut self, midi_beats_elapsed: u16) { self.midi_beats_elapsed = midi_beats_elapsed.min(16383); let msb = ((midi_beats_elapsed >> 7) as u8) & 0b0111_1111; let lsb = (midi_beats_elapsed as u8) & 0b0111_1111; self.bytes[1] = lsb; self.bytes[2] = msb; } } impl From<&[u8]> for SongPosition { fn from(raw_bytes: &[u8]) -> Self { let midi_beats_elapsed = ((raw_bytes[2] as u16) << 7) | raw_bytes[1] as u16; SongPosition { bytes: [raw_bytes[0], raw_bytes[1], raw_bytes[2]], midi_beats_elapsed, category: Category::SystemCommon, } } } impl Default for SongPosition { fn default() -> Self { Self { bytes: [0xF2, 0, 0], midi_beats_elapsed: 0, category: Category::SystemCommon, } } } #[derive(Debug)] pub struct SongSelect { bytes: [u8; 2], number: u8, pub category: Category, } impl SongSelect { pub fn new(number: u64) -> Self { Self { bytes: [0xF3, number.min(127) as u8], number: number.min(127) as u8, category: Category::SystemCommon, } } pub fn number(&self) -> u8 { self.number } } impl From<&[u8]> for SongSelect { fn from(raw_bytes: &[u8]) -> Self { SongSelect { bytes: [raw_bytes[0], raw_bytes[1]], number: raw_bytes[1], category: Category::SystemCommon, } } } impl Default for SongSelect { fn default() -> Self { Self { bytes: [0xF3, 0], number: 0, category: Category::SystemCommon, } } } #[derive(Debug)] pub struct TuneRequest { bytes: [u8; 1], pub category: Category, } impl Default for TuneRequest { fn default() -> Self { TuneRequest { bytes: [0xF6], category: Category::SystemCommon, } } } impl TuneRequest { pub fn new() -> Self { TuneRequest::default() } } impl From<&[u8]> for TuneRequest { fn from(raw_bytes: &[u8]) -> Self { TuneRequest { bytes: [raw_bytes[0]], category: Category::SystemCommon, } } } #[derive(Debug)] pub struct EndOfExclusive { bytes: [u8; 1], pub category: Category, } impl Default for EndOfExclusive { fn default() -> Self { EndOfExclusive { bytes: [0xF7], category: Category::SystemCommon, } } } impl EndOfExclusive { pub fn new() -> Self { EndOfExclusive::default() } } impl From<&[u8]> for EndOfExclusive {
} impl_midi_message!(SystemExclusive); impl_midi_message!(TimeCodeQuarterFrame); impl_midi_message!(SongPosition); impl_midi_message!(SongSelect); impl_midi_message!(TuneRequest); impl_midi_message!(EndOfExclusive);
fn from(raw_bytes: &[u8]) -> Self { EndOfExclusive { bytes: [raw_bytes[0]], category: Category::SystemCommon, } }
function_block-full_function
[ { "content": "// TODO: Solve unnecessary repetition of list collectors for different types ot cases if there is.\n\npub fn collect_list_of_midi_bytes_as_u8(\n\n message: &ValueList,\n\n error: &str,\n\n) -> std::result::Result<Vec<u8>, RuntimeError> {\n\n let arguments = message\n\n .data()\n\n .iter()\n\n .map(|v| match v {\n\n Value::Number(num) => match num {\n\n // Truncate.\n\n ValueNumber::I64(midi_byte) if *midi_byte >= 0 && *midi_byte < 128 => Ok(*midi_byte as u8),\n\n _ => runtime_error!(error),\n\n },\n\n _ => {\n\n runtime_error!(error)\n\n }\n\n })\n\n .collect::<std::result::Result<Vec<u8>, RuntimeError>>();\n\n arguments\n\n}\n\n\n", "file_path": "midi/src/lib.rs", "rank": 0, "score": 97344.5762184057 }, { "content": "pub fn collect_list_of_u64(\n\n message: &ValueList,\n\n error: &str,\n\n) -> std::result::Result<Vec<u64>, RuntimeError> {\n\n let arguments = message\n\n .data()\n\n .iter()\n\n .map(|v| match v {\n\n Value::Number(num) => match num {\n\n // Truncate.\n\n ValueNumber::I64(midi_byte) if *midi_byte >= 0 => Ok(*midi_byte as u64),\n\n _ => runtime_error!(error),\n\n },\n\n _ => {\n\n runtime_error!(error)\n\n }\n\n })\n\n .collect::<std::result::Result<Vec<u64>, RuntimeError>>();\n\n arguments\n\n}\n\n\n", "file_path": "midi/src/lib.rs", "rank": 1, "score": 82563.5661706187 }, { "content": "pub fn collect_list_of_u8(\n\n message: &ValueList,\n\n error: &str,\n\n) -> std::result::Result<Vec<u8>, RuntimeError> {\n\n let arguments = message\n\n .data()\n\n .iter()\n\n .map(|v| match v {\n\n Value::Number(num) => match num {\n\n // Truncate.\n\n ValueNumber::I64(byte) if *byte >= 0 && *byte <= 255 => Ok(*byte as u8),\n\n _ => runtime_error!(error),\n\n },\n\n _ => {\n\n runtime_error!(error)\n\n }\n\n })\n\n .collect::<std::result::Result<Vec<u8>, RuntimeError>>();\n\n arguments\n\n}\n", "file_path": "midi/src/lib.rs", "rank": 2, "score": 82513.55422276192 }, { "content": "pub fn collect_list_of_value_list(\n\n message: &ValueList,\n\n error: &str,\n\n) -> std::result::Result<Vec<ValueList>, RuntimeError> {\n\n let arguments = message\n\n .data()\n\n .iter()\n\n .map(|v| match v {\n\n Value::List(list) => Ok(list.clone()),\n\n _ => {\n\n runtime_error!(error)\n\n }\n\n })\n\n .collect::<std::result::Result<Vec<ValueList>, RuntimeError>>();\n\n arguments\n\n}\n\n\n\n\n", "file_path": "midi/src/lib.rs", "rank": 3, "score": 79803.86875672557 }, { "content": "pub fn make_module() -> ValueMap {\n\n let mut module = ValueMap::new();\n\n\n\n let mut types = ValueMap::new();\n\n types!(\n\n types,\n\n \"note_off\",\n\n \"note_on\",\n\n \"poly_after_touch\",\n\n \"control_change\",\n\n \"program_change\",\n\n \"after_touch\",\n\n \"pitch_bend\",\n\n \"all_sound_off\",\n\n \"reset_all_controllers\",\n\n \"local_control\",\n\n \"all_notes_off\",\n\n \"omni_mode_off\",\n\n \"omni_mode_on\",\n\n \"mono_mode_on\",\n", "file_path": "midi/src/lib.rs", "rank": 4, "score": 76558.26804013741 }, { "content": "pub trait MidiMessage {\n\n fn pack(&self) -> &[u8];\n\n}\n\n\n\nmacro_rules! impl_midi_message {\n\n ($type:ty) => {\n\n impl MidiMessage for $type {\n\n fn pack(&self) -> &[u8] {\n\n &self.bytes\n\n }\n\n }\n\n };\n\n}\n\n\n\npub(crate) use impl_midi_message;\n", "file_path": "midi/src/lib.rs", "rank": 5, "score": 49505.277224270794 }, { "content": "fn load_and_run_script(script_path: &str) {\n\n let mut path = PathBuf::new();\n\n path.push(env!(\"CARGO_MANIFEST_DIR\"));\n\n path.push(\"../koto/tests\");\n\n path.push(script_path);\n\n if !path.exists() {\n\n panic!(\"Path doesn't exist: {:?}\", path);\n\n }\n\n let script =\n\n read_to_string(&path).unwrap_or_else(|_| panic!(\"Unable to load path '{:?}'\", &path));\n\n\n\n run_script(&script, Some(path), false);\n\n}\n\n\n\nmacro_rules! module_test {\n\n ($name:ident) => {\n\n #[test]\n\n fn $name() {\n\n load_and_run_script(&format!(\"{}.koto\", stringify!($name)));\n\n }\n\n };\n\n}\n\n\n\nmod lib_tests {\n\n use super::*;\n\n module_test!(midi);\n\n module_test!(api);\n\n}\n", "file_path": "test_runner/tests/test_runner.rs", "rank": 6, "score": 25467.238464083755 }, { "content": "fn pascal_case_to_underscore_separated_literal(string_to_process: &str) -> std::string::String {\n\n let mut literal = String::new();\n\n for (i,ch) in string_to_process.chars().enumerate() {\n\n if ch.is_uppercase() && i != 0 {\n\n literal.push('_');\n\n literal.push_str(&format!(\"{}\",ch.to_lowercase())[..]);\n\n continue;\n\n }\n\n else if ch.is_uppercase() {\n\n literal.push_str(&format!(\"{}\",ch.to_lowercase())[..]);\n\n continue;\n\n }\n\n literal.push(ch);\n\n }\n\n literal\n\n}\n\n\n\nmacro_rules! types {\n\n ($map:ident,$($type_literal:literal),*) => {\n\n $($map.add_value($type_literal, $type_literal.into());)*\n", "file_path": "midi/src/lib.rs", "rank": 7, "score": 21084.503155222552 }, { "content": " Undefined,\n\n Malformed,\n\n EndOfExclusive(EndOfExclusive),\n\n AllSoundOff(AllSoundOff),\n\n ResetAllControllers(ResetAllControllers),\n\n LocalControl(LocalControl),\n\n AllNotesOff(AllNotesOff),\n\n OmniModeOff(OmniModeOff),\n\n OmniModeOn(OmniModeOn),\n\n MonoModeOn(MonoModeOn),\n\n PolyModeOn(PolyModeOn),\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ParsedMessage {\n\n pub message: Message,\n\n}\n\n\n\nimpl From<&[u8]> for ParsedMessage {\n\n fn from(raw_message: &[u8]) -> Self {\n", "file_path": "midi/src/message.rs", "rank": 8, "score": 20874.95174043833 }, { "content": " let status_byte = raw_message[0];\n\n let data_bytes = raw_message[1..].to_vec();\n\n let data_bytes_length = data_bytes.len();\n\n let message = match status_byte & 0xF0 {\n\n 0x80 => match data_bytes_length {\n\n 2 if data_bytes[0] <= 127 && data_bytes[1] <= 127 => {\n\n Message::NoteOff(raw_message.into())\n\n }\n\n _ => Message::Malformed,\n\n },\n\n\n\n 0x90 => match data_bytes_length {\n\n 2 if data_bytes[0] <= 127 && data_bytes[1] <= 127 => {\n\n Message::NoteOn(raw_message.into())\n\n }\n\n _ => Message::Malformed,\n\n },\n\n 0xA0 => match data_bytes_length {\n\n 2 if data_bytes[0] <= 127 && data_bytes[1] <= 127 => {\n\n Message::PolyAfterTouch(raw_message.into())\n", "file_path": "midi/src/message.rs", "rank": 9, "score": 20872.772682006642 }, { "content": "#![allow(unused)]\n\n\n\nmod channel_mode;\n\nmod channel_voice;\n\nmod system_common;\n\nmod system_realtime;\n\npub use channel_mode::*;\n\npub use channel_voice::*;\n\npub use system_common::*;\n\npub use system_realtime::*;\n\n\n\n#[derive(Debug)]\n\npub enum Category {\n\n ChannelVoice,\n\n ChannelMode,\n\n SystemCommon,\n\n SystemRealtime,\n\n Unknown,\n\n}\n\n\n", "file_path": "midi/src/message.rs", "rank": 10, "score": 20872.569613609427 }, { "content": "\n\n 0xC0 => match data_bytes_length {\n\n 1 if data_bytes[0] <= 127 => Message::ProgramChange(raw_message.into()),\n\n _ => Message::Malformed,\n\n },\n\n 0xD0 => match data_bytes_length {\n\n 1 if data_bytes[0] <= 127 => Message::AfterTouch(raw_message.into()),\n\n _ => Message::Malformed,\n\n },\n\n 0xE0 => match data_bytes_length {\n\n 2 if data_bytes[0] <= 127 && data_bytes[1] <= 127 => {\n\n Message::PitchBend(raw_message.into())\n\n }\n\n _ => Message::Malformed,\n\n },\n\n _ => match status_byte {\n\n 0xF0 => match data_bytes.last() {\n\n Some(0xF7) => Message::SystemExclusive(raw_message.into()),\n\n _ => Message::Malformed,\n\n },\n", "file_path": "midi/src/message.rs", "rank": 11, "score": 20871.12814853109 }, { "content": " 0xF1 => match data_bytes_length {\n\n 1 if data_bytes[0] <= 127 => Message::TimeCodeQuarterFrame(raw_message.into()),\n\n _ => Message::Malformed,\n\n },\n\n 0xF2 => match data_bytes_length {\n\n 2 if data_bytes[0] <= 127 && data_bytes[1] <= 127 => {\n\n Message::SongPosition(raw_message.into())\n\n }\n\n _ => Message::Malformed,\n\n },\n\n 0xF3 => match data_bytes_length {\n\n 1 if data_bytes[0] <= 127 => Message::SongSelect(raw_message.into()),\n\n _ => Message::Malformed,\n\n },\n\n 0xF4 | 0xF5 => Message::Undefined,\n\n 0xF6 => match data_bytes_length {\n\n 0 => Message::TuneRequest(TuneRequest::default()),\n\n _ => Message::Malformed,\n\n },\n\n 0xF7 => match data_bytes_length {\n", "file_path": "midi/src/message.rs", "rank": 12, "score": 20871.004062954613 }, { "content": " }\n\n _ => Message::Malformed,\n\n },\n\n 0xB0 => match data_bytes_length {\n\n 2 if data_bytes[0] <= 127 && data_bytes[1] <= 127 => {\n\n match data_bytes[0] & 0b0111_1111 {\n\n 0..120 => Message::ControlChange(raw_message.into()),\n\n 120 => Message::AllSoundOff(raw_message.into()),\n\n 121 => Message::ResetAllControllers(raw_message.into()),\n\n 122 => Message::LocalControl(raw_message.into()),\n\n 123 => Message::AllNotesOff(raw_message.into()),\n\n 124 => Message::OmniModeOff(raw_message.into()),\n\n 125 => Message::OmniModeOn(raw_message.into()),\n\n 126 => Message::MonoModeOn(raw_message.into()),\n\n 127 => Message::PolyModeOn(raw_message.into()),\n\n _ => Message::Malformed,\n\n }\n\n }\n\n _ => Message::Malformed,\n\n },\n", "file_path": "midi/src/message.rs", "rank": 13, "score": 20870.751708383486 }, { "content": " 0 => Message::EndOfExclusive(EndOfExclusive::default()),\n\n _ => Message::Malformed,\n\n },\n\n 0xF8 => match data_bytes_length {\n\n 0 => Message::TimingClock(TimingClock::default()),\n\n _ => Message::Malformed,\n\n },\n\n 0xF9 => Message::Undefined,\n\n 0xFA => match data_bytes_length {\n\n 0 => Message::Start(Start::default()),\n\n _ => Message::Malformed,\n\n },\n\n 0xFB => match data_bytes_length {\n\n 0 => Message::Continue(Continue::default()),\n\n _ => Message::Malformed,\n\n },\n\n 0xFC => match data_bytes_length {\n\n 0 => Message::Stop(Stop::default()),\n\n _ => Message::Malformed,\n\n },\n", "file_path": "midi/src/message.rs", "rank": 14, "score": 20870.581510010754 }, { "content": " 0xFD => Message::Undefined,\n\n 0xFE => match data_bytes_length {\n\n 0 => Message::ActiveSensing(ActiveSensing::default()),\n\n _ => Message::Malformed,\n\n },\n\n 0xFF => match data_bytes_length {\n\n 0 => Message::Reset(Reset::default()),\n\n _ => Message::Malformed,\n\n },\n\n _ => Message::Malformed,\n\n },\n\n };\n\n\n\n ParsedMessage { message }\n\n }\n\n}\n", "file_path": "midi/src/message.rs", "rank": 15, "score": 20870.44510899617 }, { "content": "#[derive(Debug)]\n\npub enum Message {\n\n NoteOn(NoteOn),\n\n NoteOff(NoteOff),\n\n ControlChange(ControlChange),\n\n ProgramChange(ProgramChange),\n\n PitchBend(PitchBend),\n\n AfterTouch(AfterTouch),\n\n PolyAfterTouch(PolyAfterTouch),\n\n SystemExclusive(SystemExclusive),\n\n SongPosition(SongPosition),\n\n SongSelect(SongSelect),\n\n TuneRequest(TuneRequest),\n\n TimeCodeQuarterFrame(TimeCodeQuarterFrame),\n\n TimingClock(TimingClock),\n\n Start(Start),\n\n Continue(Continue),\n\n Stop(Stop),\n\n ActiveSensing(ActiveSensing),\n\n Reset(Reset),\n", "file_path": "midi/src/message.rs", "rank": 16, "score": 20867.194634992644 }, { "content": "fn run_script(script: &str, path: Option<PathBuf>, should_fail_at_runtime: bool) {\n\n let mut koto = Koto::with_settings(KotoSettings {\n\n run_tests: true,\n\n ..Default::default()\n\n });\n\n koto.set_script_path(path);\n\n\n\n let mut prelude = koto.prelude();\n\n prelude.add_map(\"midi\", koto_midi::make_module());\n\n\n\n match koto.compile(script) {\n\n Ok(_) => match koto.run() {\n\n Ok(_) => {\n\n if should_fail_at_runtime {\n\n panic!(\"Expected failure\");\n\n }\n\n }\n\n Err(error) => {\n\n if !should_fail_at_runtime {\n\n panic!(\"{}\", error);\n\n }\n\n }\n\n },\n\n Err(error) => {\n\n panic!(\"{}\", error);\n\n }\n\n }\n\n}\n\n\n", "file_path": "test_runner/tests/test_runner.rs", "rank": 17, "score": 19165.214936913726 }, { "content": "use crate::impl_midi_message;\n\nuse crate::Category;\n\nuse crate::MidiMessage;\n\n\n\n#[derive(Debug)]\n\npub struct AllSoundOff {\n\n bytes: [u8; 3],\n\n pub category: Category,\n\n}\n\n\n\nimpl AllSoundOff {\n\n pub fn new(value: u64, channel: u64) -> Self {\n\n Self {\n\n bytes: [0xB0 | channel.min(15) as u8, 120, value.min(127) as u8],\n\n category: Category::ChannelMode,\n\n }\n\n }\n\n pub fn value(&self) -> u8 {\n\n self.bytes[2]\n\n }\n", "file_path": "midi/src/message/channel_mode.rs", "rank": 18, "score": 18894.33273370735 }, { "content": "use crate::impl_midi_message;\n\nuse crate::Category;\n\nuse crate::MidiMessage;\n\n\n\n#[derive(Debug)]\n\npub struct NoteOff {\n\n bytes: [u8; 3],\n\n pub category: Category,\n\n}\n\nimpl NoteOff {\n\n pub fn new(note: u64, velocity: u64, channel: u64) -> Self {\n\n Self {\n\n bytes: [\n\n 0x80 | channel.min(15) as u8,\n\n note.min(127) as u8,\n\n velocity.min(127) as u8,\n\n ],\n\n category: Category::ChannelVoice,\n\n }\n\n }\n", "file_path": "midi/src/message/channel_voice.rs", "rank": 19, "score": 18891.197547974334 }, { "content": " }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct PitchBend {\n\n bytes: [u8; 3],\n\n bend_amount: u16,\n\n pub category: Category,\n\n}\n\n\n\nimpl PitchBend {\n\n pub fn new(bend_amount: u64, channel: u64) -> Self {\n\n let bend_amount = bend_amount.min(16383) as u16;\n\n let msb = ((bend_amount >> 7) as u8) & 0b0111_1111;\n\n let lsb = (bend_amount as u8) & 0b0111_1111;\n\n Self {\n\n bytes: [\n\n 0xE0 | channel.min(15) as u8,\n\n lsb.min(127) as u8,\n", "file_path": "midi/src/message/channel_voice.rs", "rank": 20, "score": 18891.070146760405 }, { "content": " pub category: Category,\n\n}\n\n\n\nimpl PolyModeOn {\n\n pub fn new(value: u64, channel: u64) -> Self {\n\n Self {\n\n bytes: [0xB0 | channel.min(15) as u8, 127, value.min(127) as u8],\n\n category: Category::ChannelMode,\n\n }\n\n }\n\n pub fn value(&self) -> u8 {\n\n self.bytes[2]\n\n }\n\n pub fn channel(&self) -> u8 {\n\n self.bytes[0] & 0x0F\n\n }\n\n}\n\n\n\nimpl From<&[u8]> for PolyModeOn {\n\n fn from(raw_bytes: &[u8]) -> Self {\n", "file_path": "midi/src/message/channel_mode.rs", "rank": 22, "score": 18890.128417384 }, { "content": " bytes: [u8; 3],\n\n pub category: Category,\n\n}\n\n\n\nimpl MonoModeOn {\n\n pub fn new(value: u64, channel: u64) -> Self {\n\n Self {\n\n bytes: [0xB0 | channel.min(15) as u8, 126, value.min(127) as u8],\n\n category: Category::ChannelMode,\n\n }\n\n }\n\n pub fn value(&self) -> u8 {\n\n self.bytes[2]\n\n }\n\n pub fn channel(&self) -> u8 {\n\n self.bytes[0] & 0x0F\n\n }\n\n}\n\n\n\nimpl From<&[u8]> for MonoModeOn {\n", "file_path": "midi/src/message/channel_mode.rs", "rank": 24, "score": 18890.04031898266 }, { "content": "\n\n#[derive(Debug)]\n\npub struct AllNotesOff {\n\n bytes: [u8; 3],\n\n pub category: Category,\n\n}\n\n\n\nimpl AllNotesOff {\n\n pub fn new(value: u64, channel: u64) -> Self {\n\n Self {\n\n bytes: [0xB0 | channel.min(15) as u8, 123, value.min(127) as u8],\n\n category: Category::ChannelMode,\n\n }\n\n }\n\n pub fn value(&self) -> u8 {\n\n self.bytes[2]\n\n }\n\n pub fn channel(&self) -> u8 {\n\n self.bytes[0] & 0x0F\n\n }\n", "file_path": "midi/src/message/channel_mode.rs", "rank": 25, "score": 18889.595981190098 }, { "content": "pub struct OmniModeOn {\n\n bytes: [u8; 3],\n\n pub category: Category,\n\n}\n\n\n\nimpl OmniModeOn {\n\n pub fn new(value: u64, channel: u64) -> Self {\n\n Self {\n\n bytes: [0xB0 | channel.min(15) as u8, 125, value.min(127) as u8],\n\n category: Category::ChannelMode,\n\n }\n\n }\n\n pub fn value(&self) -> u8 {\n\n self.bytes[2]\n\n }\n\n pub fn channel(&self) -> u8 {\n\n self.bytes[0] & 0x0F\n\n }\n\n}\n\n\n", "file_path": "midi/src/message/channel_mode.rs", "rank": 26, "score": 18889.595981190098 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ResetAllControllers {\n\n bytes: [u8; 3],\n\n pub category: Category,\n\n}\n\n\n\nimpl ResetAllControllers {\n\n pub fn new(value: u64, channel: u64) -> Self {\n\n Self {\n\n bytes: [0xB0 | channel.min(15) as u8, 121, value.min(127) as u8],\n\n category: Category::ChannelMode,\n\n }\n\n }\n\n pub fn value(&self) -> u8 {\n\n self.bytes[2]\n\n }\n\n pub fn channel(&self) -> u8 {\n", "file_path": "midi/src/message/channel_mode.rs", "rank": 27, "score": 18889.52613880149 }, { "content": "#[derive(Debug)]\n\npub struct OmniModeOff {\n\n bytes: [u8; 3],\n\n pub category: Category,\n\n}\n\n\n\nimpl OmniModeOff {\n\n pub fn new(value: u64, channel: u64) -> Self {\n\n Self {\n\n bytes: [0xB0 | channel.min(15) as u8, 124, value.min(127) as u8],\n\n category: Category::ChannelMode,\n\n }\n\n }\n\n pub fn value(&self) -> u8 {\n\n self.bytes[2]\n\n }\n\n pub fn channel(&self) -> u8 {\n\n self.bytes[0] & 0x0F\n\n }\n\n}\n", "file_path": "midi/src/message/channel_mode.rs", "rank": 28, "score": 18889.309860996465 }, { "content": "}\n\n\n\n#[derive(Debug)]\n\npub struct LocalControl {\n\n bytes: [u8; 3],\n\n pub category: Category,\n\n}\n\n\n\nimpl LocalControl {\n\n pub fn new(value: u64, channel: u64) -> Self {\n\n Self {\n\n bytes: [0xB0 | channel.min(15) as u8, 122, value.min(127) as u8],\n\n category: Category::ChannelMode,\n\n }\n\n }\n\n pub fn value(&self) -> u8 {\n\n self.bytes[2]\n\n }\n\n pub fn channel(&self) -> u8 {\n\n self.bytes[0] & 0x0F\n", "file_path": "midi/src/message/channel_mode.rs", "rank": 29, "score": 18889.309860996465 }, { "content": "impl Default for NoteOff {\n\n fn default() -> Self {\n\n Self {\n\n bytes: [0x80, 64, 0],\n\n category: Category::ChannelVoice,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct NoteOn {\n\n bytes: [u8; 3],\n\n pub category: Category,\n\n}\n\nimpl NoteOn {\n\n pub fn new(note: u64, velocity: u64, channel: u64) -> Self {\n\n Self {\n\n bytes: [\n\n 0x90 | channel.min(15) as u8,\n\n note.min(127) as u8,\n", "file_path": "midi/src/message/channel_voice.rs", "rank": 31, "score": 18889.182537264347 }, { "content": "#[derive(Debug)]\n\npub struct ControlChange {\n\n bytes: [u8; 3],\n\n pub category: Category,\n\n}\n\n\n\nimpl ControlChange {\n\n pub fn new(note: u64, value: u64, channel: u64) -> Self {\n\n Self {\n\n bytes: [\n\n 0xB0 | channel.min(15) as u8,\n\n note.min(127) as u8,\n\n value.min(127) as u8,\n\n ],\n\n category: Category::ChannelVoice,\n\n }\n\n }\n\n pub fn note(&self) -> u8 {\n\n self.bytes[1]\n\n }\n", "file_path": "midi/src/message/channel_voice.rs", "rank": 33, "score": 18889.09037411681 }, { "content": "use crate::impl_midi_message;\n\nuse crate::Category;\n\nuse crate::MidiMessage;\n\n\n\n#[derive(Debug)]\n\npub struct TimingClock {\n\n bytes: [u8; 1],\n\n pub category: Category,\n\n}\n\n\n\nimpl Default for TimingClock {\n\n fn default() -> Self {\n\n TimingClock {\n\n bytes: [0xF8],\n\n category: Category::SystemRealtime,\n\n }\n\n }\n\n}\n\n\n\nimpl TimingClock {\n", "file_path": "midi/src/message/system_realtime.rs", "rank": 35, "score": 18887.6596595846 }, { "content": "\n\nimpl From<&[u8]> for PitchBend {\n\n fn from(raw_bytes: &[u8]) -> Self {\n\n let bend_amount = ((raw_bytes[2] as u16) << 7) | raw_bytes[1] as u16;\n\n PitchBend {\n\n bytes: [raw_bytes[0], raw_bytes[1], raw_bytes[2]],\n\n bend_amount,\n\n category: Category::ChannelVoice,\n\n }\n\n }\n\n}\n\n\n\nimpl Default for PitchBend {\n\n fn default() -> Self {\n\n // Middle point\n\n let bend_amount = 8821_u16;\n\n let msb = ((bend_amount >> 7) as u8) & 0b0111_1111;\n\n let lsb = (bend_amount as u8) & 0b0111_1111;\n\n\n\n Self {\n", "file_path": "midi/src/message/channel_voice.rs", "rank": 37, "score": 18886.87381443716 }, { "content": " category: Category::ChannelVoice,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct AfterTouch {\n\n bytes: [u8; 2],\n\n pub category: Category,\n\n}\n\n\n\nimpl AfterTouch {\n\n pub fn new(pressure: u64, channel: u64) -> Self {\n\n Self {\n\n bytes: [0xC0 | channel.min(15) as u8, pressure.min(127) as u8],\n\n category: Category::ChannelVoice,\n\n }\n\n }\n\n pub fn pressure(&self) -> u8 {\n\n self.bytes[1]\n", "file_path": "midi/src/message/channel_voice.rs", "rank": 39, "score": 18886.677965143575 }, { "content": " msb.min(127) as u8,\n\n ],\n\n bend_amount,\n\n category: Category::ChannelVoice,\n\n }\n\n }\n\n pub fn bend_amount(&self) -> u16 {\n\n self.bend_amount\n\n }\n\n pub fn channel(&self) -> u8 {\n\n self.bytes[0] & 0x0F\n\n }\n\n pub fn change_bend_amount(&mut self, bend_amount: u16) {\n\n self.bend_amount = bend_amount.min(16383);\n\n let msb = ((bend_amount >> 7) as u8) & 0b0111_1111;\n\n let lsb = (bend_amount as u8) & 0b0111_1111;\n\n self.bytes[1] = lsb;\n\n self.bytes[2] = msb;\n\n }\n\n}\n", "file_path": "midi/src/message/channel_voice.rs", "rank": 40, "score": 18886.586290948504 }, { "content": "impl PolyAfterTouch {\n\n pub fn new(note: u64, pressure: u64, channel: u64) -> Self {\n\n Self {\n\n bytes: [\n\n 0xA0 | channel.min(15) as u8,\n\n note.min(127) as u8,\n\n pressure.min(127) as u8,\n\n ],\n\n category: Category::ChannelVoice,\n\n }\n\n }\n\n pub fn note(&self) -> u8 {\n\n self.bytes[1]\n\n }\n\n pub fn pressure(&self) -> u8 {\n\n self.bytes[2]\n\n }\n\n pub fn channel(&self) -> u8 {\n\n self.bytes[0] & 0x0F\n\n }\n", "file_path": "midi/src/message/channel_voice.rs", "rank": 41, "score": 18886.374465510504 }, { "content": " bytes: [0xB0, 64, 127],\n\n category: Category::ChannelVoice,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ProgramChange {\n\n bytes: [u8; 2],\n\n pub category: Category,\n\n}\n\n\n\nimpl ProgramChange {\n\n pub fn new(program: u64, channel: u64) -> Self {\n\n Self {\n\n bytes: [0xC0 | channel.min(15) as u8, program.min(127) as u8],\n\n category: Category::ChannelVoice,\n\n }\n\n }\n\n pub fn program(&self) -> u8 {\n", "file_path": "midi/src/message/channel_voice.rs", "rank": 42, "score": 18886.284517290795 }, { "content": " pub fn value(&self) -> u8 {\n\n self.bytes[2]\n\n }\n\n pub fn channel(&self) -> u8 {\n\n self.bytes[0] & 0x0F\n\n }\n\n}\n\n\n\nimpl From<&[u8]> for ControlChange {\n\n fn from(raw_bytes: &[u8]) -> Self {\n\n ControlChange {\n\n bytes: [raw_bytes[0], raw_bytes[1], raw_bytes[2]],\n\n category: Category::ChannelVoice,\n\n }\n\n }\n\n}\n\n\n\nimpl Default for ControlChange {\n\n fn default() -> Self {\n\n Self {\n", "file_path": "midi/src/message/channel_voice.rs", "rank": 43, "score": 18886.131192410816 }, { "content": "impl Default for Stop {\n\n fn default() -> Self {\n\n Stop {\n\n bytes: [0xFC],\n\n category: Category::SystemRealtime,\n\n }\n\n }\n\n}\n\n\n\nimpl Stop {\n\n pub fn new() -> Self {\n\n Stop::default()\n\n }\n\n}\n\n\n\nimpl From<&[u8]> for Stop {\n\n fn from(raw_bytes: &[u8]) -> Self {\n\n Stop {\n\n bytes: [raw_bytes[0]],\n\n category: Category::SystemRealtime,\n", "file_path": "midi/src/message/system_realtime.rs", "rank": 44, "score": 18886.01863560706 }, { "content": "impl Default for Start {\n\n fn default() -> Self {\n\n Start {\n\n bytes: [0xFA],\n\n category: Category::SystemRealtime,\n\n }\n\n }\n\n}\n\n\n\nimpl Start {\n\n pub fn new() -> Self {\n\n Start::default()\n\n }\n\n}\n\n\n\nimpl From<&[u8]> for Start {\n\n fn from(raw_bytes: &[u8]) -> Self {\n\n Start {\n\n bytes: [raw_bytes[0]],\n\n category: Category::SystemRealtime,\n", "file_path": "midi/src/message/system_realtime.rs", "rank": 45, "score": 18886.01863560706 }, { "content": "impl Default for Reset {\n\n fn default() -> Self {\n\n Reset {\n\n bytes: [0xFF],\n\n category: Category::SystemRealtime,\n\n }\n\n }\n\n}\n\n\n\nimpl Reset {\n\n pub fn new() -> Self {\n\n Reset::default()\n\n }\n\n}\n\n\n\nimpl From<&[u8]> for Reset {\n\n fn from(raw_bytes: &[u8]) -> Self {\n\n Reset {\n\n bytes: [raw_bytes[0]],\n\n category: Category::SystemRealtime,\n", "file_path": "midi/src/message/system_realtime.rs", "rank": 46, "score": 18886.01863560706 }, { "content": " pub fn new() -> Self {\n\n Continue::default()\n\n }\n\n}\n\n\n\nimpl From<&[u8]> for Continue {\n\n fn from(raw_bytes: &[u8]) -> Self {\n\n Continue {\n\n bytes: [raw_bytes[0]],\n\n category: Category::SystemRealtime,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Stop {\n\n bytes: [u8; 1],\n\n pub category: Category,\n\n}\n\n\n", "file_path": "midi/src/message/system_realtime.rs", "rank": 49, "score": 18884.776955138837 }, { "content": " pub fn new() -> Self {\n\n ActiveSensing::default()\n\n }\n\n}\n\n\n\nimpl From<&[u8]> for ActiveSensing {\n\n fn from(raw_bytes: &[u8]) -> Self {\n\n ActiveSensing {\n\n bytes: [raw_bytes[0]],\n\n category: Category::SystemRealtime,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Reset {\n\n bytes: [u8; 1],\n\n pub category: Category,\n\n}\n\n\n", "file_path": "midi/src/message/system_realtime.rs", "rank": 52, "score": 18884.37036133629 }, { "content": " pub fn new() -> Self {\n\n TimingClock::default()\n\n }\n\n}\n\n\n\nimpl From<&[u8]> for TimingClock {\n\n fn from(raw_bytes: &[u8]) -> Self {\n\n TimingClock {\n\n bytes: [raw_bytes[0]],\n\n category: Category::SystemRealtime,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Start {\n\n bytes: [u8; 1],\n\n pub category: Category,\n\n}\n\n\n", "file_path": "midi/src/message/system_realtime.rs", "rank": 53, "score": 18884.37036133629 }, { "content": " }\n\n pub fn channel(&self) -> u8 {\n\n self.bytes[0] & 0x0F\n\n }\n\n}\n\n\n\nimpl From<&[u8]> for AfterTouch {\n\n fn from(raw_bytes: &[u8]) -> Self {\n\n AfterTouch {\n\n bytes: [raw_bytes[0], raw_bytes[1]],\n\n category: Category::ChannelVoice,\n\n }\n\n }\n\n}\n\n\n\nimpl Default for AfterTouch {\n\n fn default() -> Self {\n\n Self {\n\n bytes: [0xD0, 0],\n\n category: Category::ChannelVoice,\n", "file_path": "midi/src/message/channel_voice.rs", "rank": 54, "score": 18884.19622371064 }, { "content": " pub fn channel(&self) -> u8 {\n\n self.bytes[0] & 0x0F\n\n }\n\n}\n\n\n\nimpl From<&[u8]> for AllSoundOff {\n\n fn from(raw_bytes: &[u8]) -> Self {\n\n AllSoundOff {\n\n bytes: [raw_bytes[0], raw_bytes[1], raw_bytes[2]],\n\n category: Category::ChannelMode,\n\n }\n\n }\n\n}\n\n\n\nimpl Default for AllSoundOff {\n\n fn default() -> Self {\n\n Self {\n\n bytes: [0xB0, 120, 127],\n\n category: Category::ChannelMode,\n\n }\n", "file_path": "midi/src/message/channel_mode.rs", "rank": 55, "score": 18884.032041189603 }, { "content": " }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Continue {\n\n bytes: [u8; 1],\n\n pub category: Category,\n\n}\n\n\n\nimpl Default for Continue {\n\n fn default() -> Self {\n\n Continue {\n\n bytes: [0xFB],\n\n category: Category::SystemRealtime,\n\n }\n\n }\n\n}\n\n\n\nimpl Continue {\n", "file_path": "midi/src/message/system_realtime.rs", "rank": 56, "score": 18883.92656175107 }, { "content": " self.bytes[1]\n\n }\n\n pub fn channel(&self) -> u8 {\n\n self.bytes[0] & 0x0F\n\n }\n\n}\n\n\n\nimpl From<&[u8]> for ProgramChange {\n\n fn from(raw_bytes: &[u8]) -> Self {\n\n ProgramChange {\n\n bytes: [raw_bytes[0], raw_bytes[1]],\n\n category: Category::ChannelVoice,\n\n }\n\n }\n\n}\n\n\n\nimpl Default for ProgramChange {\n\n fn default() -> Self {\n\n Self {\n\n bytes: [0xC0, 0],\n", "file_path": "midi/src/message/channel_voice.rs", "rank": 57, "score": 18883.739012435715 }, { "content": " category: Category::ChannelVoice,\n\n }\n\n }\n\n}\n\n\n\nimpl Default for NoteOn {\n\n fn default() -> Self {\n\n Self {\n\n bytes: [0x90, 64, 0],\n\n category: Category::ChannelVoice,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct PolyAfterTouch {\n\n bytes: [u8; 3],\n\n pub category: Category,\n\n}\n\n\n", "file_path": "midi/src/message/channel_voice.rs", "rank": 58, "score": 18883.66125220501 }, { "content": "impl From<&[u8]> for OmniModeOn {\n\n fn from(raw_bytes: &[u8]) -> Self {\n\n OmniModeOn {\n\n bytes: [raw_bytes[0], raw_bytes[1], raw_bytes[2]],\n\n category: Category::ChannelMode,\n\n }\n\n }\n\n}\n\n\n\nimpl Default for OmniModeOn {\n\n fn default() -> Self {\n\n Self {\n\n bytes: [0xB0, 125, 127],\n\n category: Category::ChannelMode,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct MonoModeOn {\n", "file_path": "midi/src/message/channel_mode.rs", "rank": 59, "score": 18883.35890668746 }, { "content": " }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ActiveSensing {\n\n bytes: [u8; 1],\n\n pub category: Category,\n\n}\n\n\n\nimpl Default for ActiveSensing {\n\n fn default() -> Self {\n\n ActiveSensing {\n\n bytes: [0xFE],\n\n category: Category::SystemRealtime,\n\n }\n\n }\n\n}\n\n\n\nimpl ActiveSensing {\n", "file_path": "midi/src/message/system_realtime.rs", "rank": 60, "score": 18883.255256048684 }, { "content": " fn from(raw_bytes: &[u8]) -> Self {\n\n MonoModeOn {\n\n bytes: [raw_bytes[0], raw_bytes[1], raw_bytes[2]],\n\n category: Category::ChannelMode,\n\n }\n\n }\n\n}\n\n\n\nimpl Default for MonoModeOn {\n\n fn default() -> Self {\n\n Self {\n\n bytes: [0xB0, 126, 127],\n\n category: Category::ChannelMode,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct PolyModeOn {\n\n bytes: [u8; 3],\n", "file_path": "midi/src/message/channel_mode.rs", "rank": 61, "score": 18883.041922276043 }, { "content": "}\n\n\n\nimpl From<&[u8]> for AllNotesOff {\n\n fn from(raw_bytes: &[u8]) -> Self {\n\n AllNotesOff {\n\n bytes: [raw_bytes[0], raw_bytes[1], raw_bytes[2]],\n\n category: Category::ChannelMode,\n\n }\n\n }\n\n}\n\n\n\nimpl Default for AllNotesOff {\n\n fn default() -> Self {\n\n Self {\n\n bytes: [0xB0, 123, 127],\n\n category: Category::ChannelMode,\n\n }\n\n }\n\n}\n\n\n", "file_path": "midi/src/message/channel_mode.rs", "rank": 63, "score": 18882.43772744454 }, { "content": " }\n\n}\n\n\n\nimpl From<&[u8]> for LocalControl {\n\n fn from(raw_bytes: &[u8]) -> Self {\n\n LocalControl {\n\n bytes: [raw_bytes[0], raw_bytes[1], raw_bytes[2]],\n\n category: Category::ChannelMode,\n\n }\n\n }\n\n}\n\n\n\nimpl Default for LocalControl {\n\n fn default() -> Self {\n\n Self {\n\n bytes: [0xB0, 122, 127],\n\n category: Category::ChannelMode,\n\n }\n\n }\n\n}\n", "file_path": "midi/src/message/channel_mode.rs", "rank": 64, "score": 18882.112502526856 }, { "content": "}\n\n\n\nimpl From<&[u8]> for PolyAfterTouch {\n\n fn from(raw_bytes: &[u8]) -> Self {\n\n PolyAfterTouch {\n\n bytes: [raw_bytes[0], raw_bytes[1], raw_bytes[2]],\n\n category: Category::ChannelVoice,\n\n }\n\n }\n\n}\n\n\n\nimpl Default for PolyAfterTouch {\n\n fn default() -> Self {\n\n Self {\n\n bytes: [0xA0, 64, 127],\n\n category: Category::ChannelVoice,\n\n }\n\n }\n\n}\n\n\n", "file_path": "midi/src/message/channel_voice.rs", "rank": 65, "score": 18882.112502526856 }, { "content": "\n\nimpl From<&[u8]> for OmniModeOff {\n\n fn from(raw_bytes: &[u8]) -> Self {\n\n OmniModeOff {\n\n bytes: [raw_bytes[0], raw_bytes[1], raw_bytes[2]],\n\n category: Category::ChannelMode,\n\n }\n\n }\n\n}\n\n\n\nimpl Default for OmniModeOff {\n\n fn default() -> Self {\n\n Self {\n\n bytes: [0xB0, 124, 127],\n\n category: Category::ChannelMode,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "midi/src/message/channel_mode.rs", "rank": 66, "score": 18881.903981902557 }, { "content": " self.bytes[0] & 0x0F\n\n }\n\n}\n\n\n\nimpl From<&[u8]> for ResetAllControllers {\n\n fn from(raw_bytes: &[u8]) -> Self {\n\n ResetAllControllers {\n\n bytes: [raw_bytes[0], raw_bytes[1], raw_bytes[2]],\n\n category: Category::ChannelMode,\n\n }\n\n }\n\n}\n\n\n\nimpl Default for ResetAllControllers {\n\n fn default() -> Self {\n\n Self {\n\n bytes: [0xB0, 121, 127],\n\n category: Category::ChannelMode,\n\n }\n\n }\n", "file_path": "midi/src/message/channel_mode.rs", "rank": 67, "score": 18881.88063837338 }, { "content": " pub fn note(&self) -> u8 {\n\n self.bytes[1]\n\n }\n\n pub fn velocity(&self) -> u8 {\n\n self.bytes[2]\n\n }\n\n pub fn channel(&self) -> u8 {\n\n self.bytes[0] & 0x0F\n\n }\n\n}\n\n\n\nimpl From<&[u8]> for NoteOff {\n\n fn from(raw_bytes: &[u8]) -> Self {\n\n NoteOff {\n\n bytes: [raw_bytes[0], raw_bytes[1], raw_bytes[2]],\n\n category: Category::ChannelVoice,\n\n }\n\n }\n\n}\n\n\n", "file_path": "midi/src/message/channel_voice.rs", "rank": 68, "score": 18881.79020738956 }, { "content": " velocity.min(127) as u8,\n\n ],\n\n category: Category::ChannelVoice,\n\n }\n\n }\n\n pub fn note(&self) -> u8 {\n\n self.bytes[1]\n\n }\n\n pub fn velocity(&self) -> u8 {\n\n self.bytes[2]\n\n }\n\n pub fn channel(&self) -> u8 {\n\n self.bytes[0] & 0x0F\n\n }\n\n}\n\n\n\nimpl From<&[u8]> for NoteOn {\n\n fn from(raw_bytes: &[u8]) -> Self {\n\n NoteOn {\n\n bytes: [raw_bytes[0], raw_bytes[1], raw_bytes[2]],\n", "file_path": "midi/src/message/channel_voice.rs", "rank": 69, "score": 18881.596335535683 }, { "content": " PolyModeOn {\n\n bytes: [raw_bytes[0], raw_bytes[1], raw_bytes[2]],\n\n category: Category::ChannelMode,\n\n }\n\n }\n\n}\n\n\n\nimpl Default for PolyModeOn {\n\n fn default() -> Self {\n\n Self {\n\n bytes: [0xB0, 127, 127],\n\n category: Category::ChannelMode,\n\n }\n\n }\n\n}\n\n\n\nimpl_midi_message!(AllSoundOff);\n\nimpl_midi_message!(ResetAllControllers);\n\nimpl_midi_message!(LocalControl);\n\nimpl_midi_message!(AllNotesOff);\n\nimpl_midi_message!(OmniModeOff);\n\nimpl_midi_message!(OmniModeOn);\n\nimpl_midi_message!(MonoModeOn);\n\nimpl_midi_message!(PolyModeOn);\n", "file_path": "midi/src/message/channel_mode.rs", "rank": 71, "score": 18881.460886645265 }, { "content": " bytes: [0xE0, lsb, msb],\n\n bend_amount,\n\n category: Category::ChannelVoice,\n\n }\n\n }\n\n}\n\n\n\nimpl_midi_message!(NoteOff);\n\nimpl_midi_message!(NoteOn);\n\nimpl_midi_message!(PolyAfterTouch);\n\nimpl_midi_message!(ControlChange);\n\nimpl_midi_message!(ProgramChange);\n\nimpl_midi_message!(AfterTouch);\n\nimpl_midi_message!(PitchBend);\n", "file_path": "midi/src/message/channel_voice.rs", "rank": 72, "score": 18880.61005289713 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl_midi_message!(TimingClock);\n\nimpl_midi_message!(Start);\n\nimpl_midi_message!(Continue);\n\nimpl_midi_message!(Stop);\n\nimpl_midi_message!(ActiveSensing);\n\nimpl_midi_message!(Reset);\n", "file_path": "midi/src/message/system_realtime.rs", "rank": 74, "score": 18873.22151462713 }, { "content": " if vm.get_args(&args).len() == 1 {\n\n match vm.get_args(&args) {\n\n [Value::List(message)] => {\n\n if let Ok(arguments) = collect_list_of_u64(message, $error_literal) {\n\n if let [$($field),*] = &arguments[..] {\n\n let mut message_koto = ValueMap::new();\n\n // dbg!($(*$field),*);\n\n let message = <$enum_key>::new($(*$field),*);\n\n // dbg!(&message);\n\n message_koto.add_value(\"type\", name_literal.clone().into());\n\n message_koto.add_value(\"category\", $category_literal.into());\n\n $(\n\n // dbg!($field);\n\n message_koto.add_value(stringify!($field), message.$field().into());\n\n )*\n\n impl_pack!(message_koto, message);\n\n Ok(Value::Map(message_koto))\n\n }\n\n else {\n\n runtime_error!($error_literal)\n", "file_path": "midi/src/lib.rs", "rank": 75, "score": 14.147329725418984 }, { "content": " match vm.get_args(&args) {\n\n [Value::List(message)] => {\n\n if let Ok(arguments) = collect_list_of_value_list(message, error_literal) {\n\n if let [manufacturer_id,message] = &arguments[..] {\n\n match manufacturer_id.len() {\n\n 1 | 3 => {\n\n match message.len() {\n\n 0 => runtime_error!(error_literal),\n\n _ => { \n\n if let Ok(m_id) = collect_list_of_u8(manufacturer_id, error_literal) {\n\n if let Ok(data) = collect_list_of_u8(message, error_literal) {\n\n let mut message_koto = ValueMap::new();\n\n let message = SystemExclusive::new(&m_id[..], &data[..]);\n\n message_koto.add_value(\"type\", \"system_exclusive\".into());\n\n message_koto.add_value(\"category\", \"system_common\".into());\n\n let m_id = m_id.iter().map(|&x| x.into()).collect::<Vec<Value>>();\n\n message_koto.add_value(\"manufacturer_id\", Value::List(ValueList::from_slice(&m_id[..])));\n\n impl_pack!(message_koto, message);\n\n Ok(Value::Map(message_koto))\n\n }\n", "file_path": "midi/src/lib.rs", "rank": 76, "score": 14.002573311673503 }, { "content": " }\n\n}\n\nmacro_rules! impl_pack {\n\n ($map:ident, $message:ident) => {\n\n $map.add_fn(\"pack\", move |_, _| {\n\n Ok(Value::List(ValueList::from_slice(\n\n &$message\n\n .pack()\n\n .into_iter()\n\n .map(|byte| byte.into())\n\n .collect::<Vec<Value>>()[..],\n\n )))\n\n });\n\n };\n\n}\n\n\n\nmacro_rules! make_koto_message_constructor {\n\n ($map:ident, $enum_key:ident, $category_literal:literal, $($field:ident),*, $error_literal:literal) => {\n\n let name_literal = pascal_case_to_underscore_separated_literal(stringify!($enum_key));\n\n $map.add_fn(&name_literal.clone(), move |vm, args| {\n", "file_path": "midi/src/lib.rs", "rank": 77, "score": 13.363373172748721 }, { "content": "#![feature(exclusive_range_pattern)]\n\n\n\nmod message;\n\nuse message::*;\n\n\n\nuse koto::runtime::{\n\n runtime_error, RuntimeError, Value, ValueList, ValueMap, ValueNumber,\n\n};\n\n\n\n// TODO: Solve unnecessary repetition of list collectors for different types ot cases if there is.\n", "file_path": "midi/src/lib.rs", "rank": 78, "score": 13.246796732112287 }, { "content": "\n\n module.add_fn(\"parse\", |vm, args| {\n\n if vm.get_args(&args).len() == 1 {\n\n match vm.get_args(&args) {\n\n [Value::List(message)] => {\n\n let mut message_koto = ValueMap::new();\n\n if let Ok(midi_message) = collect_list_of_u8(\n\n message,\n\n \"parse requires a single list of one or more positive integers as its argument\",\n\n ) {\n\n let parsed = ParsedMessage::from(&midi_message[..]);\n\n let message = parsed.message;\n\n\n\n match message {\n\n Message::NoteOn(_)\n\n | Message::NoteOff(_)\n\n | Message::ControlChange(_)\n\n | Message::ProgramChange(_)\n\n | Message::PitchBend(_)\n\n | Message::AfterTouch(_)\n", "file_path": "midi/src/lib.rs", "rank": 79, "score": 11.571977413592174 }, { "content": " }\n\n } else {\n\n Ok(Value::Empty)\n\n }\n\n }\n\n _ => runtime_error!($error_literal),\n\n }\n\n } else {\n\n runtime_error!($error_literal)\n\n }\n\n })\n\n };\n\n\n\n ($map:ident, $enum_key:ty, $category_literal:literal, $error_literal:literal) => {\n\n let name_literal = pascal_case_to_underscore_separated_literal(stringify!($enum_key));\n\n $map.add_fn(&name_literal.clone(), move |vm, args| {\n\n if vm.get_args(&args).len() == 0 {\n\n let mut message_koto = ValueMap::new();\n\n let message = <$enum_key>::default();\n\n message_koto.add_value(\"type\", name_literal.clone().into());\n", "file_path": "midi/src/lib.rs", "rank": 80, "score": 11.098918713485673 }, { "content": " message_koto.add_value(\"category\", $category_literal.into());\n\n impl_pack!(message_koto, message);\n\n Ok(Value::Map(message_koto))\n\n } else {\n\n runtime_error!($error_literal)\n\n }\n\n })\n\n }\n\n}\n\n\n\n\n\nmacro_rules! make_koto_message {\n\n ($map:ident, $message:ident, $name_literal:literal,$($field:ident),*) => { \n\n $map.add_value(\"type\", $name_literal.into());\n\n $(\n\n $map.add_value(stringify!($field),$message.$field().into());\n\n )*\n\n impl_pack!($map, $message);\n\n }\n\n}\n\n\n\n\n", "file_path": "midi/src/lib.rs", "rank": 81, "score": 10.515011842494037 }, { "content": " }\n\n Message::MonoModeOn(message) => {\n\n make_koto_message!(message_koto, message, \"mono_mode_on\", value, channel);\n\n message_koto.add_value(\"note\", 126.into());\n\n }\n\n Message::PolyModeOn(message) => {\n\n make_koto_message!(message_koto, message, \"poly_mode_on\", value, channel);\n\n message_koto.add_value(\"note\", 127.into());\n\n }\n\n Message::SystemExclusive(message) => {\n\n message_koto.add_value(\"type\", \"system_exclusive\".into());\n\n let m_id = message.manufacturer_id.iter().map(|&x| x.into()).collect::<Vec<Value>>();\n\n message_koto.add_value(\"manufacturer_id\", Value::List(ValueList::from_slice(&m_id[..])));\n\n impl_pack!(message_koto, message);\n\n }\n\n Message::SongPosition(message) => {\n\n make_koto_message!(message_koto, message, \"song_position\", midi_beats_elapsed);\n\n }\n\n Message::SongSelect(message) => {\n\n make_koto_message!(message_koto, message, \"song_select\", number);\n", "file_path": "midi/src/lib.rs", "rank": 82, "score": 10.262039172730955 }, { "content": " Message::Continue(message) => {\n\n message_koto.add_value(\"type\", \"continue\".into());\n\n impl_pack!(message_koto, message);\n\n }\n\n Message::Stop(message) => {\n\n message_koto.add_value(\"type\", \"stop\".into());\n\n impl_pack!(message_koto, message);\n\n }\n\n Message::ActiveSensing(message) => {\n\n message_koto.add_value(\"type\", \"active_sensing\".into());\n\n impl_pack!(message_koto, message);\n\n }\n\n Message::Reset(message) => {\n\n message_koto.add_value(\"type\", \"reset\".into());\n\n impl_pack!(message_koto, message);\n\n }\n\n Message::Undefined => {\n\n message_koto.add_value(\"type\", \"undefined\".into());\n\n }\n\n Message::Malformed => {\n", "file_path": "midi/src/lib.rs", "rank": 83, "score": 9.290515782930395 }, { "content": "# koto_midi\n\n\n\n## Introduction\n\n\n\n`koto_midi` is a module for working with midi messages in koto scripts.\n\n\n\nFor the summary of the api please run `just tests` or `cargo watch -x \"test --test test_runner\"` and check the `stdout`.\n\n\n\n## Embedding\n\n\n\nIn the application which embeds `koto`,\n\n\n\n```rust\n\n\n\n// ..\n\n\n\nlet mut koto = Koto::new();\n\nlet mut prelude = koto.prelude();\n\nprelude.add_map(\"midi\", koto_midi::make_module());\n\n\n\n// ..\n\n\n\n```\n\n\n\nIn the koto script which `koto_midi` wants the be used in, it could be brought to scope by,\n\n\n\n```coffee\n\n\n\nimport midi\n\n# ..\n\n\n\n```\n\n\n\n| For more on using and embedding `koto` in your rust applications please visit [koto repository](https://github.com/koto-lang/koto).\n", "file_path": "README.md", "rank": 84, "score": 9.10246762852232 }, { "content": " }\n\n Message::TuneRequest(message) => {\n\n message_koto.add_value(\"type\", \"tune_request\".into());\n\n impl_pack!(message_koto, message);\n\n }\n\n Message::EndOfExclusive(message) => {\n\n message_koto.add_value(\"type\", \"end_of_exclusive\".into());\n\n impl_pack!(message_koto, message);\n\n }\n\n Message::TimeCodeQuarterFrame(message) => {\n\n make_koto_message!(message_koto, message, \"time_code_quarter_frame\", message_type, values);\n\n }\n\n Message::TimingClock(message) => {\n\n message_koto.add_value(\"type\", \"timing_clock\".into());\n\n impl_pack!(message_koto, message);\n\n }\n\n Message::Start(message) => {\n\n message_koto.add_value(\"type\", \"start\".into());\n\n impl_pack!(message_koto, message);\n\n }\n", "file_path": "midi/src/lib.rs", "rank": 85, "score": 9.027126318183983 }, { "content": " MonoModeOn,\n\n \"channel_mode\",\n\n value,\n\n channel,\n\n \"mono_mode_on requires a single list of exactly two positive integers as its argument\"\n\n );\n\n make_koto_message_constructor!(\n\n message_constructors,\n\n PolyModeOn,\n\n \"channel_mode\",\n\n value,\n\n channel,\n\n \"poly_mode_on requires a single list of exactly two positive integers as its argument\"\n\n );\n\n\n\n // TODO: This is a very basic sysex implementation. It might be extended later. Find out if it is necessary.\n\n\n\n message_constructors.add_fn(\"system_exclusive\", |vm, args| {\n\n let error_literal = \"system_exclusive requires a list with single or 3 bytes for its first argument and a list with one or more bytes for its second argument\";\n\n if vm.get_args(&args).len() == 2 {\n", "file_path": "midi/src/lib.rs", "rank": 86, "score": 8.967335219245113 }, { "content": " \"channel_voice\",\n\n \"channel_mode\",\n\n \"system_common\",\n\n \"system_realtime\",\n\n \"unknown\"\n\n );\n\n\n\n let mut message_constructors = ValueMap::new();\n\n\n\n make_koto_message_constructor!(\n\n message_constructors,\n\n NoteOff,\n\n \"channel_voice\",\n\n note,\n\n velocity,\n\n channel,\n\n \"note_off requires a single list of exactly three integers as its argument\"\n\n );\n\n make_koto_message_constructor!(\n\n message_constructors,\n", "file_path": "midi/src/lib.rs", "rank": 87, "score": 8.845262262420736 }, { "content": " message_koto.add_value(\"type\", \"malformed\".into());\n\n }\n\n }\n\n\n\n Ok(Value::Map(message_koto))\n\n } else {\n\n message_koto.add_value(\"type\", \"malformed\".into());\n\n message_koto.add_value(\"category\", \"unknown\".into());\n\n // Returns an empty value if the message is malformed.\n\n Ok(Value::Map(message_koto))\n\n }\n\n }\n\n _ => runtime_error!(\n\n \"parse requires a single list of one or more positive integers as its argument\"\n\n ),\n\n }\n\n } else {\n\n runtime_error!(\"parse requires a single list of one or more positive integers as its argument\")\n\n }\n\n });\n\n\n\n module.add_map(\"types\", types);\n\n module.add_map(\"categories\", categories);\n\n module.add_map(\"message\", message_constructors);\n\n module\n\n}\n\n\n", "file_path": "midi/src/lib.rs", "rank": 88, "score": 8.733617660822041 }, { "content": " \"poly_mode_on\",\n\n \"system_exclusive\",\n\n \"time_code_quarter_frame\",\n\n \"song_position\",\n\n \"song_select\",\n\n \"tune_request\",\n\n \"end_of_exclusive\",\n\n \"timing_clock\",\n\n \"start\",\n\n \"continue\",\n\n \"stop\",\n\n \"active_sensing\",\n\n \"reset\",\n\n \"undefined\",\n\n \"malformed\"\n\n );\n\n\n\n let mut categories = ValueMap::new();\n\n types!(\n\n categories,\n", "file_path": "midi/src/lib.rs", "rank": 89, "score": 8.56066699616916 }, { "content": " | Message::PolyAfterTouch(_) => {\n\n message_koto.add_value(\"category\", \"channel_voice\".into())\n\n }\n\n Message::AllSoundOff(_)\n\n | Message::ResetAllControllers(_)\n\n | Message::LocalControl(_)\n\n | Message::AllNotesOff(_)\n\n | Message::OmniModeOff(_)\n\n | Message::OmniModeOn(_)\n\n | Message::MonoModeOn(_)\n\n | Message::PolyModeOn(_) => {\n\n message_koto.add_value(\"category\", \"channel_mode\".into())\n\n }\n\n Message::SystemExclusive(_)\n\n | Message::SongPosition(_)\n\n | Message::SongSelect(_)\n\n | Message::TuneRequest(_)\n\n | Message::EndOfExclusive(_)\n\n | Message::TimeCodeQuarterFrame(_) => {\n\n message_koto.add_value(\"category\", \"system_common\".into())\n", "file_path": "midi/src/lib.rs", "rank": 90, "score": 8.384983757998326 }, { "content": " }\n\n Message::TimingClock(_)\n\n | Message::Start(_)\n\n | Message::Continue(_)\n\n | Message::Stop(_)\n\n | Message::ActiveSensing(_)\n\n | Message::Reset(_) => {\n\n message_koto.add_value(\"category\", \"system_realtime\".into())\n\n }\n\n Message::Undefined | Message::Malformed => {\n\n message_koto.add_value(\"category\", \"unknown\".into())\n\n }\n\n };\n\n\n\n match message {\n\n Message::NoteOff(message) => {\n\n make_koto_message!(message_koto, message, \"note_off\", note, velocity, channel);\n\n }\n\n Message::NoteOn(message) => {\n\n make_koto_message!(message_koto, message, \"note_on\", note, velocity, channel);\n", "file_path": "midi/src/lib.rs", "rank": 91, "score": 7.92846139193941 }, { "content": " _ => runtime_error!(error_literal),\n\n }\n\n } else {\n\n runtime_error!(error_literal)\n\n }\n\n });\n\n\n\n // TODO: Find out what are possible message types and values for time_code_quarter_frame\n\n\n\n make_koto_message_constructor!(\n\n message_constructors,\n\n TimeCodeQuarterFrame,\n\n \"system_common\",\n\n message_type,\n\n values,\n\n \"time_code_quarter_frame requires a single list of exactly two positive integers as its argument\"\n\n );\n\n make_koto_message_constructor!(\n\n message_constructors,\n\n SongPosition,\n", "file_path": "midi/src/lib.rs", "rank": 92, "score": 7.325401980464087 }, { "content": " }\n\n Message::ResetAllControllers(message) => {\n\n make_koto_message!(message_koto, message, \"reset_all_controllers\", value, channel);\n\n message_koto.add_value(\"note\", 121.into());\n\n }\n\n Message::LocalControl(message) => {\n\n make_koto_message!(message_koto, message, \"local_control\", value, channel);\n\n message_koto.add_value(\"note\", 122.into());\n\n }\n\n Message::AllNotesOff(message) => {\n\n make_koto_message!(message_koto, message, \"all_notes_off\", value, channel);\n\n message_koto.add_value(\"note\", 123.into());\n\n }\n\n Message::OmniModeOff(message) => {\n\n make_koto_message!(message_koto, message, \"omni_mode_off\", value, channel);\n\n message_koto.add_value(\"note\", 124.into());\n\n }\n\n Message::OmniModeOn(message) => {\n\n make_koto_message!(message_koto, message, \"omni_mode_on\", value, channel);\n\n message_koto.add_value(\"note\", 125.into());\n", "file_path": "midi/src/lib.rs", "rank": 93, "score": 7.091761628991339 }, { "content": " }\n\n Message::ControlChange(message) => {\n\n make_koto_message!(message_koto, message, \"control_change\", note, value, channel);\n\n }\n\n Message::ProgramChange(message) => {\n\n make_koto_message!(message_koto, message, \"program_change\", program, channel);\n\n }\n\n\n\n Message::AfterTouch(message) => {\n\n make_koto_message!(message_koto, message, \"after_touch\", pressure, channel);\n\n }\n\n Message::PolyAfterTouch(message) => {\n\n make_koto_message!(message_koto, message, \"poly_after_touch\", note, pressure, channel);\n\n }\n\n Message::PitchBend(message) => {\n\n make_koto_message!(message_koto, message, \"pitch_bend\", bend_amount, channel);\n\n }\n\n Message::AllSoundOff(message) => {\n\n make_koto_message!(message_koto, message, \"all_sound_off\", value, channel);\n\n message_koto.add_value(\"note\", 120.into());\n", "file_path": "midi/src/lib.rs", "rank": 94, "score": 6.097627846123013 }, { "content": " ResetAllControllers,\n\n \"channel_mode\",\n\n value,\n\n channel,\n\n \"reset_all_controllers requires a single list of exactly two positive integers as its argument\"\n\n );\n\n make_koto_message_constructor!(\n\n message_constructors,\n\n LocalControl,\n\n \"channel_mode\",\n\n value,\n\n channel,\n\n \"local_control requires a single list of exactly two positive integers as its argument\"\n\n );\n\n make_koto_message_constructor!(\n\n message_constructors,\n\n AllNotesOff,\n\n \"channel_mode\",\n\n value,\n\n channel,\n", "file_path": "midi/src/lib.rs", "rank": 95, "score": 5.743208207074657 }, { "content": " \"all_notes_off requires a single list of exactly two positive integers as its argument\"\n\n );\n\n make_koto_message_constructor!(\n\n message_constructors,\n\n OmniModeOff,\n\n \"channel_mode\",\n\n value,\n\n channel,\n\n \"omni_mode_off requires a single list of exactly two positive integers as its argument\"\n\n );\n\n make_koto_message_constructor!(\n\n message_constructors,\n\n OmniModeOn,\n\n \"channel_mode\",\n\n value,\n\n channel,\n\n \"omni_mode_on requires a single list of exactly two positive integers as its argument\"\n\n );\n\n make_koto_message_constructor!(\n\n message_constructors,\n", "file_path": "midi/src/lib.rs", "rank": 96, "score": 5.390622634249553 }, { "content": " );\n\n\n\n make_koto_message_constructor!(\n\n message_constructors,\n\n PitchBend,\n\n \"channel_voice\",\n\n bend_amount,\n\n channel,\n\n \"pitch_bend requires a single list of exactly two positive integers as its argument\"\n\n );\n\n make_koto_message_constructor!(\n\n message_constructors,\n\n AllSoundOff,\n\n \"channel_mode\",\n\n value,\n\n channel,\n\n \"all_sound_off requires a single list of exactly two positive integers as its argument\"\n\n );\n\n make_koto_message_constructor!(\n\n message_constructors,\n", "file_path": "midi/src/lib.rs", "rank": 97, "score": 4.943031529372066 }, { "content": " \"system_common\",\n\n midi_beats_elapsed,\n\n \"song_position requires a single list of exactly one positive integer as its argument\"\n\n );\n\n make_koto_message_constructor!(\n\n message_constructors,\n\n SongSelect,\n\n \"system_common\",\n\n number,\n\n \"song_select requires a single list of exactly one positive integer as its argument\"\n\n );\n\n\n\n make_koto_message_constructor!(\n\n message_constructors,\n\n TuneRequest,\n\n \"system_common\",\n\n \"tune_request does not take any arguments\"\n\n );\n\n make_koto_message_constructor!(\n\n message_constructors, \n", "file_path": "midi/src/lib.rs", "rank": 98, "score": 4.868343861095686 }, { "content": " note,\n\n value,\n\n channel,\n\n \"control_change requires a single list of exactly three integers as its argument\"\n\n );\n\n make_koto_message_constructor!(\n\n message_constructors,\n\n ProgramChange,\n\n \"channel_voice\",\n\n program,\n\n channel,\n\n \"program_change requires a single list of exactly two positive integers as its argument\"\n\n );\n\n make_koto_message_constructor!(\n\n message_constructors,\n\n AfterTouch,\n\n \"channel_voice\",\n\n pressure,\n\n channel,\n\n \"after_touch requires a single list of exactly two positive integers as its argument\"\n", "file_path": "midi/src/lib.rs", "rank": 99, "score": 4.4673964192710365 } ]
Rust
avr-hal-generic/src/wdt.rs
IamTheCarl/avr-hal
10d311ba74a07289d0ad3afa05f4b22800a2ec7a
use core::marker::PhantomData; #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] pub enum Timeout { Ms16, Ms32, Ms64, Ms125, Ms250, Ms500, Ms1000, Ms2000, Ms4000, Ms8000, } pub trait WdtOps<H> { type MCUSR; fn raw_init(&mut self, m: &Self::MCUSR); fn raw_start(&mut self, timeout: Timeout) -> Result<(), ()>; fn raw_feed(&mut self); fn raw_stop(&mut self); } pub struct Wdt<H, WDT> { p: WDT, _h: PhantomData<H>, } impl<H, WDT: WdtOps<H>> Wdt<H, WDT> { pub fn new(mut p: WDT, m: &WDT::MCUSR) -> Self { p.raw_init(m); Self { p, _h: PhantomData } } pub fn start(&mut self, timeout: Timeout) -> Result<(), ()> { self.p.raw_start(timeout) } pub fn feed(&mut self) { self.p.raw_feed() } pub fn stop(&mut self) { self.p.raw_stop() } } #[macro_export] macro_rules! impl_wdt { ( hal: $HAL:ty, peripheral: $WDT:ty, mcusr: $MCUSR:ty, timeout: |$to:ident, $w:ident| $to_match:expr, ) => { impl $crate::wdt::WdtOps<$HAL> for $WDT { type MCUSR = $MCUSR; #[inline] fn raw_init(&mut self, m: &Self::MCUSR) { m.modify(|_, w| w.wdrf().clear_bit()); } #[inline] fn raw_start(&mut self, timeout: Timeout) -> Result<(), ()> { $crate::avr_device::interrupt::free(|_| { self.raw_feed(); self.wdtcsr .modify(|_, w| w.wdce().set_bit().wde().set_bit()); self.wdtcsr.write(|w| { let $to = timeout; let $w = w; ($to_match).wde().set_bit().wdce().clear_bit() }); Ok(()) }) } #[inline] fn raw_feed(&mut self) { avr_device::asm::wdr(); } #[inline] fn raw_stop(&mut self) { $crate::avr_device::interrupt::free(|_| { self.raw_feed(); self.wdtcsr .modify(|_, w| w.wdce().set_bit().wde().set_bit()); self.wdtcsr.reset(); }) } } }; }
use core::marker::PhantomData; #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] pub enum Timeout { Ms16, Ms32, Ms64, Ms125, Ms250, Ms500, Ms1000, Ms2000, Ms4000, Ms8000, } pub trait WdtOps<H> { type MCUSR; fn raw_init(&mut self, m: &Self::MCUSR); fn raw_start(&mut self, timeout: Timeout) -> Result<(), ()>; fn raw_feed(&mut self); fn raw_stop(&mut self); } pub struct Wdt<H, WDT> { p: WDT, _h: PhantomData<H>, } impl<H, WDT: WdtOps<H>> Wdt<H, WDT> { pub fn new(mut p: WDT, m: &WDT::MCUSR) -> Self { p.raw_init(m); Self { p, _h: PhantomData } } pub fn start(&mut self, timeout: Timeout) -> Result<(), ()> { self.p.raw_start(timeout) } pub fn feed(&mut self) { self.p.raw_feed() } pub fn stop(&mut self) { self.p.raw_stop() } } #[macro_export] macro_rules! impl_wdt { ( hal: $HAL:ty, peripheral: $WDT:ty, mcusr: $MCUSR:ty,
$crate::avr_device::interrupt::free(|_| { self.raw_feed(); self.wdtcsr .modify(|_, w| w.wdce().set_bit().wde().set_bit()); self.wdtcsr.reset(); }) } } }; }
timeout: |$to:ident, $w:ident| $to_match:expr, ) => { impl $crate::wdt::WdtOps<$HAL> for $WDT { type MCUSR = $MCUSR; #[inline] fn raw_init(&mut self, m: &Self::MCUSR) { m.modify(|_, w| w.wdrf().clear_bit()); } #[inline] fn raw_start(&mut self, timeout: Timeout) -> Result<(), ()> { $crate::avr_device::interrupt::free(|_| { self.raw_feed(); self.wdtcsr .modify(|_, w| w.wdce().set_bit().wde().set_bit()); self.wdtcsr.write(|w| { let $to = timeout; let $w = w; ($to_match).wde().set_bit().wdce().clear_bit() }); Ok(()) }) } #[inline] fn raw_feed(&mut self) { avr_device::asm::wdr(); } #[inline] fn raw_stop(&mut self) {
random
[ { "content": " pub trait Sealed {}\n\n}\n\npub(crate) use sealed::Sealed;\n", "file_path": "avr-hal-generic/src/lib.rs", "rank": 1, "score": 114647.06503214789 }, { "content": "/// A clock speed\n\npub trait Clock {\n\n /// Frequency of this clock in Hz\n\n const FREQ: u32;\n\n}\n\n\n\n/// 24 MHz Clock\n\n#[derive(ufmt::derive::uDebug, Debug)]\n\npub struct MHz24;\n\nimpl Clock for MHz24 {\n\n const FREQ: u32 = 24_000_000;\n\n}\n\n\n\n/// 20 MHz Clock\n\n#[derive(ufmt::derive::uDebug, Debug)]\n\npub struct MHz20;\n\nimpl Clock for MHz20 {\n\n const FREQ: u32 = 20_000_000;\n\n}\n\n\n\n/// 16 MHz Clock\n", "file_path": "avr-hal-generic/src/clock.rs", "rank": 2, "score": 114647.06503214789 }, { "content": "/// Provide a `into_baudrate()` method for integers.\n\n///\n\n/// This extension trait allows conveniently initializing a baudrate by using\n\n///\n\n/// ```\n\n/// let mut serial = arduino_uno::Serial::new(\n\n/// dp.USART0,\n\n/// pins.d0,\n\n/// pins.d1.into_output(&mut pins.ddr),\n\n/// 57600.into_baudrate(),\n\n/// );\n\n/// ```\n\n///\n\n/// instead of having to call [`Baudrate::new(57600)`](Baudrate::new).\n\npub trait BaudrateExt {\n\n /// Calculate baudrate parameters from this number.\n\n fn into_baudrate<CLOCK: crate::clock::Clock>(self) -> Baudrate<CLOCK>;\n\n}\n\n\n\nimpl BaudrateExt for u32 {\n\n fn into_baudrate<CLOCK: crate::clock::Clock>(self) -> Baudrate<CLOCK> {\n\n Baudrate::new(self)\n\n }\n\n}\n\n\n", "file_path": "avr-hal-generic/src/usart.rs", "rank": 3, "score": 112069.8816241304 }, { "content": "pub trait PinOps {\n\n type Dynamic;\n\n\n\n fn into_dynamic(self) -> Self::Dynamic;\n\n\n\n unsafe fn out_set(&mut self);\n\n unsafe fn out_clear(&mut self);\n\n unsafe fn out_toggle(&mut self);\n\n unsafe fn out_get(&self) -> bool;\n\n\n\n unsafe fn in_get(&self) -> bool;\n\n\n\n unsafe fn make_output(&mut self);\n\n unsafe fn make_input(&mut self, pull_up: bool);\n\n}\n\n\n\n/// Representation of an MCU pin.\n\n///\n\n/// # Design Rationale\n\n/// We want individual types per pin to model constraints which depend on a specific pin. For\n", "file_path": "avr-hal-generic/src/port.rs", "rank": 4, "score": 112064.27028891878 }, { "content": "/// Same as [`BaudrateExt`] but accounts for an errata of certain Arduino boards:\n\n///\n\n/// The affected boards where this trait should be used instead are:\n\n///\n\n/// - Duemilanove\n\n/// - Uno\n\n/// - Mega 2560\n\npub trait BaudrateArduinoExt {\n\n /// Calculate baudrate parameters from this number (with Arduino errata).\n\n fn into_baudrate<CLOCK: crate::clock::Clock>(self) -> Baudrate<CLOCK>;\n\n}\n\n\n\nimpl BaudrateArduinoExt for u32 {\n\n fn into_baudrate<CLOCK: crate::clock::Clock>(self) -> Baudrate<CLOCK> {\n\n let br = Baudrate::new(self);\n\n\n\n // hardcoded exception for 57600 for compatibility with the bootloader\n\n // shipped with the Duemilanove and previous boards and the firmware\n\n // on the 8U2 on the Uno and Mega 2560.\n\n //\n\n // https://github.com/arduino/ArduinoCore-avr/blob/3055c1efa3c6980c864f661e6c8cc5d5ac773af4/cores/arduino/HardwareSerial.cpp#L123-L132\n\n if CLOCK::FREQ == 16_000_000 && br.ubrr == 34 && br.u2x {\n\n // (CLOCK::FREQ / 8 / 57600 - 1) / 2 == 16\n\n Baudrate::with_exact(false, 16)\n\n } else {\n\n br\n\n }\n", "file_path": "avr-hal-generic/src/usart.rs", "rank": 5, "score": 109647.04582998554 }, { "content": "pub fn open(port: &std::path::Path, baudrate: u32) -> anyhow::Result<()> {\n\n let mut rx = serialport::new(port.to_string_lossy(), baudrate)\n\n .timeout(std::time::Duration::from_secs(2))\n\n .open_native()\n\n .with_context(|| format!(\"failed to open serial port `{}`\", port.display()))?;\n\n let mut tx = rx.try_clone_native()?;\n\n\n\n let mut stdin = std::io::stdin();\n\n let mut stdout = std::io::stdout();\n\n\n\n // Spawn a thread for the receiving end because stdio is not portably non-blocking...\n\n std::thread::spawn(move || loop {\n\n let mut buf = [0u8; 4098];\n\n match rx.read(&mut buf) {\n\n Ok(count) => {\n\n stdout.write(&buf[..count]).unwrap();\n\n stdout.flush().unwrap();\n\n }\n\n Err(e) => {\n\n assert!(e.kind() == std::io::ErrorKind::TimedOut);\n", "file_path": "ravedude/src/console.rs", "rank": 6, "score": 107041.81374640265 }, { "content": "/// Internal trait for the low-level ADC peripheral.\n\n///\n\n/// **Prefer using the [`Adc`] API instead of this trait.**\n\npub trait AdcOps<H> {\n\n /// Channel ID type for this ADC.\n\n type Channel: PartialEq + Copy;\n\n\n\n /// Settings type for this ADC.\n\n type Settings: PartialEq + Copy;\n\n\n\n /// Initialize the ADC peripheral with the specified settings.\n\n ///\n\n /// **Warning**: This is a low-level method and should not be called directly from user code.\n\n fn raw_init(&mut self, settings: Self::Settings);\n\n\n\n /// Read out the ADC data register.\n\n ///\n\n /// This method must only be called after a conversion completed.\n\n ///\n\n /// **Warning**: This is a low-level method and should not be called directly from user code.\n\n fn raw_read_adc(&self) -> u16;\n\n\n\n /// Check whether the ADC is currently converting a signal.\n", "file_path": "avr-hal-generic/src/adc.rs", "rank": 7, "score": 106482.3324061917 }, { "content": "/// Delay execution for a number of microseconds.\n\n///\n\n/// Busy-loop for the given time. This function assumes the default clock speed defined by\n\n/// [`arduino_hal::DefaultClock`][crate::DefaultClock].\n\npub fn delay_us(us: u32) {\n\n Delay::new().delay_us(us)\n\n}\n", "file_path": "arduino-hal/src/delay.rs", "rank": 8, "score": 104865.15745363609 }, { "content": "/// Delay execution for a number of milliseconds.\n\n///\n\n/// Busy-loop for the given time. This function assumes the default clock speed defined by\n\n/// [`arduino_hal::DefaultClock`][crate::DefaultClock].\n\npub fn delay_ms(ms: u16) {\n\n Delay::new().delay_ms(ms)\n\n}\n\n\n", "file_path": "arduino-hal/src/delay.rs", "rank": 9, "score": 104865.15745363609 }, { "content": "pub trait PinMode: crate::Sealed {}\n\n/// GPIO pin modes\n\npub mod mode {\n\n use core::marker::PhantomData;\n\n\n", "file_path": "avr-hal-generic/src/port.rs", "rank": 10, "score": 101581.23156626459 }, { "content": " pub trait InputMode: crate::Sealed {}\n\n\n\n /// Pin is configured as digital input (floating or pulled-up).\n\n pub struct Input<IMODE = AnyInput> {\n\n pub(crate) _imode: PhantomData<IMODE>,\n\n }\n\n impl<IMODE: InputMode> super::PinMode for Input<IMODE> {}\n\n impl<IMODE: InputMode> Io for Input<IMODE> {}\n\n impl<IMODE: InputMode> crate::Sealed for Input<IMODE> {}\n\n\n\n /// Floating input, used like `Input<Floating>`.\n\n pub struct Floating;\n\n impl InputMode for Floating {}\n\n impl crate::Sealed for Floating {}\n\n\n\n /// Pulled-up input, used like `Input<PullUp>`.\n\n pub struct PullUp;\n\n impl InputMode for PullUp {}\n\n impl crate::Sealed for PullUp {}\n\n\n\n /// Any input (floating or pulled-up), used like `Input<AnyInput>`.\n\n pub struct AnyInput;\n\n impl InputMode for AnyInput {}\n\n impl crate::Sealed for AnyInput {}\n\n\n\n /// Pin is configured as an analog input (for the ADC).\n\n pub struct Analog;\n\n}\n\n\n", "file_path": "avr-hal-generic/src/port.rs", "rank": 11, "score": 101581.23156626459 }, { "content": "pub trait Board {\n\n fn display_name(&self) -> &str;\n\n fn needs_reset(&self) -> Option<&str>;\n\n fn avrdude_options(&self) -> avrdude::AvrdudeOptions;\n\n fn guess_port(&self) -> Option<anyhow::Result<std::path::PathBuf>>;\n\n}\n\n\n", "file_path": "ravedude/src/board.rs", "rank": 12, "score": 99544.89386680021 }, { "content": "fn ravedude() -> anyhow::Result<()> {\n\n let args: Args = structopt::StructOpt::from_args();\n\n\n\n let board = board::get_board(&args.board).expect(\"board not found\");\n\n\n\n task_message!(\"Board\", \"{}\", board.display_name());\n\n\n\n if let Some(msg) = board.needs_reset() {\n\n warning!(\"this board cannot reset itself.\");\n\n eprintln!(\"\");\n\n eprintln!(\" {}\", msg);\n\n eprintln!(\"\");\n\n eprint!(\"Once reset, press ENTER here: \");\n\n std::io::stdin().read_line(&mut String::new())?;\n\n }\n\n\n\n let port = match args.port {\n\n Some(port) => Ok(Some(port)),\n\n None => match board.guess_port() {\n\n Some(Ok(port)) => Ok(Some(port)),\n", "file_path": "ravedude/src/main.rs", "rank": 13, "score": 98369.7627473585 }, { "content": "/// Internal trait for low-level USART peripherals.\n\n///\n\n/// This trait defines the common interface for all USART peripheral variants. It is used as an\n\n/// intermediate abstraction ontop of which the [`Usart`] API is built. **Prefer using the\n\n/// [`Usart`] API instead of this trait.**\n\npub trait UsartOps<H, RX, TX> {\n\n /// Enable & initialize this USART peripheral to the given baudrate.\n\n ///\n\n /// **Warning**: This is a low-level method and should not be called directly from user code.\n\n fn raw_init<CLOCK>(&mut self, baudrate: Baudrate<CLOCK>);\n\n /// Disable this USART peripheral such that the pins can be used for other purposes again.\n\n ///\n\n /// **Warning**: This is a low-level method and should not be called directly from user code.\n\n fn raw_deinit(&mut self);\n\n\n\n /// Flush all remaining data in the TX buffer.\n\n ///\n\n /// This operation must be non-blocking and return [`nb::Error::WouldBlock`] if not all data\n\n /// was flushed yet.\n\n ///\n\n /// **Warning**: This is a low-level method and should not be called directly from user code.\n\n fn raw_flush(&mut self) -> nb::Result<(), void::Void>;\n\n /// Write a byte to the TX buffer.\n\n ///\n\n /// This operation must be non-blocking and return [`nb::Error::WouldBlock`] until the byte is\n", "file_path": "avr-hal-generic/src/usart.rs", "rank": 14, "score": 97247.6524359101 }, { "content": "/// Internal trait for low-level I2C peripherals.\n\n///\n\n/// This trait defines the common interface for all I2C peripheral variants. It is used as an\n\n/// intermediate abstraction ontop of which the [`I2c`] API is built. **Prefer using the\n\n/// [`I2c`] API instead of this trait.**\n\npub trait I2cOps<H, SDA, SCL> {\n\n /// Setup the bus for operation at a certain speed.\n\n ///\n\n /// **Warning**: This is a low-level method and should not be called directly from user code.\n\n fn raw_setup<CLOCK: crate::clock::Clock>(&mut self, speed: u32);\n\n\n\n /// Start a bus transaction to a certain `address` in either read or write mode.\n\n ///\n\n /// If a previous transaction was not stopped via `raw_stop()`, this should generate a repeated\n\n /// start condition.\n\n ///\n\n /// **Warning**: This is a low-level method and should not be called directly from user code.\n\n fn raw_start(&mut self, address: u8, direction: Direction) -> Result<(), Error>;\n\n\n\n /// Write some bytes to the bus.\n\n ///\n\n /// This method must only be called after a transaction in write mode was successfully started.\n\n ///\n\n /// **Warning**: This is a low-level method and should not be called directly from user code.\n\n fn raw_write(&mut self, bytes: &[u8]) -> Result<(), Error>;\n", "file_path": "avr-hal-generic/src/i2c.rs", "rank": 15, "score": 97247.558179124 }, { "content": " pub trait Io: crate::Sealed + super::PinMode {}\n\n\n\n /// Pin is configured as a digital output.\n\n pub struct Output;\n\n impl super::PinMode for Output {}\n\n impl Io for Output {}\n\n impl crate::Sealed for Output {}\n\n\n", "file_path": "avr-hal-generic/src/port.rs", "rank": 16, "score": 93344.20382005596 }, { "content": "/// Trait marking a type as an ADC channel for a certain ADC.\n\npub trait AdcChannel<H, ADC: AdcOps<H>> {\n\n fn channel(&self) -> ADC::Channel;\n\n}\n\n\n\n/// Representation of any ADC Channel.\n\n///\n\n/// Typically, distinct types are used per channel, like for example `Pin<mode::Analog, PC0>`. In\n\n/// some situations, however, a type is needed which can represent _any_ channel. This is required\n\n/// to, for example, store multiple channels in an array.\n\n///\n\n/// `Channel` is such a type. It can be created by calling the [`into_channel()`][into-channel]\n\n/// method of a distinct type:\n\n///\n\n/// ```\n\n/// let a0 = pins.a0.into_analog_input(&mut adc);\n\n/// let a1 = pins.a1.into_analog_input(&mut adc);\n\n///\n\n/// let channels: [atmega_hal::adc::Channel; 2] = [\n\n/// a0.into_channel(),\n\n/// a1.into_channel(),\n", "file_path": "avr-hal-generic/src/adc.rs", "rank": 17, "score": 91435.9619573237 }, { "content": "/// Internal trait for low-level SPI peripherals\n\n///\n\n/// This trait defines the common interface for all SPI peripheral variants. It is used as an\n\n/// intermediate abstraction ontop of which the [`Spi`] API is built. **Prefer using the\n\n/// [`Spi`] API instead of this trait.**\n\npub trait SpiOps<H, SCLK, MOSI, MISO, CS> {\n\n fn raw_setup(&mut self, settings: &Settings);\n\n fn raw_release(&mut self);\n\n\n\n fn raw_check_iflag(&self) -> bool;\n\n fn raw_read(&self) -> u8;\n\n fn raw_write(&mut self, byte: u8);\n\n}\n\n\n\n/// Wrapper for the CS pin\n\n///\n\n/// Used to contain the chip-select pin during operation to prevent its mode from being\n\n/// changed from Output. This is necessary because the SPI state machine would otherwise\n\n/// reset itself to SPI slave mode immediately. This wrapper can be used just like an\n\n/// output pin, because it implements all the same traits from embedded-hal.\n\npub struct ChipSelectPin<CSPIN>(port::Pin<port::mode::Output, CSPIN>);\n\n\n\nimpl<CSPIN: port::PinOps> hal::digital::v2::OutputPin for ChipSelectPin<CSPIN> {\n\n type Error = crate::void::Void;\n\n fn set_low(&mut self) -> Result<(), Self::Error> {\n", "file_path": "avr-hal-generic/src/spi.rs", "rank": 18, "score": 89826.52884098647 }, { "content": "pub fn print_error(e: anyhow::Error) {\n\n use colored::Colorize as _;\n\n\n\n eprintln!(\n\n \"{}{}{}\",\n\n \"Error\".red().bold(),\n\n \": \".bold(),\n\n e.to_string().bold()\n\n );\n\n\n\n eprintln!(\"\");\n\n\n\n for cause in e.chain().skip(1) {\n\n eprintln!(\n\n \"{}{}{}\",\n\n \"Caused by\".yellow().bold(),\n\n \": \".bold(),\n\n cause.to_string().bold()\n\n );\n\n }\n\n}\n", "file_path": "ravedude/src/ui.rs", "rank": 19, "score": 86190.68326634835 }, { "content": "pub fn get_board(board: &str) -> Option<Box<dyn Board>> {\n\n Some(match board {\n\n \"uno\" => Box::new(ArduinoUno),\n\n \"nano\" => Box::new(ArduinoNano),\n\n \"leonardo\" => Box::new(ArduinoLeonardo),\n\n \"micro\" => Box::new(ArduinoMicro),\n\n \"mega2560\" => Box::new(ArduinoMega2560),\n\n \"diecimila\" => Box::new(ArduinoDiecimila),\n\n \"promicro\" => Box::new(SparkFunProMicro),\n\n \"trinket-pro\" => Box::new(TrinketPro),\n\n \"trinket\" => Box::new(Trinket),\n\n \"nano168\" => Box::new(Nano168),\n\n _ => return None,\n\n })\n\n}\n\n\n\n// ----------------------------------------------------------------------------\n\n\n", "file_path": "ravedude/src/board.rs", "rank": 20, "score": 74378.09249970216 }, { "content": "fn find_port_from_vid_pid_list(list: &[(u16, u16)]) -> anyhow::Result<std::path::PathBuf> {\n\n for serialport::SerialPortInfo {\n\n port_name,\n\n port_type,\n\n } in serialport::available_ports().unwrap()\n\n {\n\n if let serialport::SerialPortType::UsbPort(usb_info) = port_type {\n\n for (vid, pid) in list.iter() {\n\n if usb_info.vid == *vid && usb_info.pid == *pid {\n\n return Ok(port_name.into());\n\n }\n\n }\n\n }\n\n }\n\n Err(anyhow::anyhow!(\"Serial port not found.\"))\n\n}\n\n\n\n// ----------------------------------------------------------------------------\n\n\n", "file_path": "ravedude/src/board.rs", "rank": 21, "score": 68543.68527154245 }, { "content": "fn main() {\n\n match ravedude() {\n\n Ok(()) => (),\n\n Err(e) => {\n\n ui::print_error(e);\n\n std::process::exit(1);\n\n }\n\n }\n\n}\n\n\n", "file_path": "ravedude/src/main.rs", "rank": 22, "score": 54950.2731042652 }, { "content": "struct Trinket;\n\n\n\nimpl Board for Trinket {\n\n fn display_name(&self) -> &str {\n\n \"Trinket\"\n\n }\n\n\n\n fn needs_reset(&self) -> Option<&str> {\n\n Some(\"Reset the board by pressing the reset button once.\")\n\n }\n\n\n\n fn avrdude_options(&self) -> avrdude::AvrdudeOptions {\n\n avrdude::AvrdudeOptions {\n\n programmer: \"usbtiny\",\n\n partno: \"attiny85\",\n\n baudrate: None,\n\n do_chip_erase: true,\n\n }\n\n }\n\n\n\n fn guess_port(&self) -> Option<anyhow::Result<std::path::PathBuf>> {\n\n None // The Trinket does not have USB-to-Serial.\n\n }\n\n}\n\n\n", "file_path": "ravedude/src/board.rs", "rank": 23, "score": 54531.240067055005 }, { "content": "struct Args {\n\n /// After sucessfully flashing the program, open a serial console to see output sent by the\n\n /// board and possibly interact with it.\n\n #[structopt(short = \"c\", long = \"open-console\")]\n\n open_console: bool,\n\n\n\n /// Baudrate which should be used for the serial console.\n\n #[structopt(short = \"b\", long = \"baudrate\")]\n\n baudrate: Option<u32>,\n\n\n\n /// Overwrite which port to use. By default ravedude will try to find a connected board by\n\n /// itself.\n\n #[structopt(short = \"P\", long = \"port\", parse(from_os_str), env = \"RAVEDUDE_PORT\")]\n\n port: Option<std::path::PathBuf>,\n\n\n\n /// Which board to interact with.\n\n ///\n\n /// Must be one of the known board identifiers:\n\n ///\n\n /// * uno\n", "file_path": "ravedude/src/main.rs", "rank": 24, "score": 54531.240067055005 }, { "content": "struct Nano168;\n\n\n\nimpl Board for Nano168 {\n\n fn display_name(&self) -> &str {\n\n \"Nano Clone (ATmega168)\"\n\n }\n\n\n\n fn needs_reset(&self) -> Option<&str> {\n\n None\n\n }\n\n\n\n fn avrdude_options(&self) -> avrdude::AvrdudeOptions {\n\n avrdude::AvrdudeOptions {\n\n programmer: \"arduino\",\n\n partno: \"atmega168\",\n\n baudrate: Some(19200),\n\n do_chip_erase: false,\n\n }\n\n }\n\n\n\n fn guess_port(&self) -> Option<anyhow::Result<std::path::PathBuf>> {\n\n Some(Err(anyhow::anyhow!(\"Not able to guess port\")))\n\n }\n\n}\n", "file_path": "ravedude/src/board.rs", "rank": 25, "score": 54531.240067055005 }, { "content": "#[allow(unused_imports)]\n\npub use avr_hal_generic::wdt::{Timeout, WdtOps};\n\n\n\npub type Wdt = avr_hal_generic::wdt::Wdt<crate::Atmega, crate::pac::WDT>;\n\n\n\navr_hal_generic::impl_wdt! {\n\n hal: crate::Atmega,\n\n peripheral: crate::pac::WDT,\n\n mcusr: crate::pac::cpu::MCUSR,\n\n timeout: |to, w| match to {\n\n Timeout::Ms16 => w.wdpl().cycles_2k_512k(),\n\n Timeout::Ms32 => w.wdpl().cycles_4k_1024k(),\n\n Timeout::Ms64 => w.wdpl().cycles_8k(),\n\n Timeout::Ms125 => w.wdpl().cycles_16k(),\n\n Timeout::Ms250 => w.wdpl().cycles_32k(),\n\n Timeout::Ms500 => w.wdpl().cycles_64k(),\n\n Timeout::Ms1000 => w.wdpl().cycles_128k(),\n\n Timeout::Ms2000 => w.wdpl().cycles_256k(),\n\n Timeout::Ms4000 => w.wdph().set_bit().wdpl().cycles_2k_512k(),\n\n Timeout::Ms8000 => w.wdph().set_bit().wdpl().cycles_4k_1024k(),\n\n },\n\n}\n", "file_path": "mcu/atmega-hal/src/wdt.rs", "rank": 27, "score": 54219.490506347305 }, { "content": "struct TrinketPro;\n\n\n\nimpl Board for TrinketPro {\n\n fn display_name(&self) -> &str {\n\n \"Trinket Pro\"\n\n }\n\n\n\n fn needs_reset(&self) -> Option<&str> {\n\n Some(\"Reset the board by pressing the reset button once.\")\n\n }\n\n\n\n fn avrdude_options(&self) -> avrdude::AvrdudeOptions {\n\n avrdude::AvrdudeOptions {\n\n programmer: \"usbtiny\",\n\n partno: \"atmega328p\",\n\n baudrate: None,\n\n do_chip_erase: false,\n\n }\n\n }\n\n\n\n fn guess_port(&self) -> Option<anyhow::Result<std::path::PathBuf>> {\n\n None // The TrinketPro does not have USB-to-Serial.\n\n }\n\n}\n\n\n", "file_path": "ravedude/src/board.rs", "rank": 34, "score": 53321.922171032005 }, { "content": "struct ArduinoMicro;\n\n\n\nimpl Board for ArduinoMicro {\n\n fn display_name(&self) -> &str {\n\n \"Arduino Micro\"\n\n }\n\n\n\n fn needs_reset(&self) -> Option<&str> {\n\n Some(\"Reset the board by pressing the reset button once.\")\n\n }\n\n\n\n fn avrdude_options(&self) -> avrdude::AvrdudeOptions {\n\n avrdude::AvrdudeOptions {\n\n programmer: \"avr109\",\n\n partno: \"atmega32u4\",\n\n baudrate: Some(115200),\n\n do_chip_erase: true,\n\n }\n\n }\n\n\n", "file_path": "ravedude/src/board.rs", "rank": 35, "score": 53321.922171032005 }, { "content": "struct ArduinoDiecimila;\n\n\n\nimpl Board for ArduinoDiecimila {\n\n fn display_name(&self) -> &str {\n\n \"Arduino Diecimila\"\n\n }\n\n\n\n fn needs_reset(&self) -> Option<&str> {\n\n None\n\n }\n\n\n\n fn avrdude_options(&self) -> avrdude::AvrdudeOptions {\n\n avrdude::AvrdudeOptions {\n\n programmer: \"arduino\",\n\n partno: \"atmega168\",\n\n baudrate: Some(19200),\n\n do_chip_erase: false,\n\n }\n\n }\n\n\n\n fn guess_port(&self) -> Option<anyhow::Result<std::path::PathBuf>> {\n\n Some(Err(anyhow::anyhow!(\"Not able to guess port\")))\n\n }\n\n}\n\n\n", "file_path": "ravedude/src/board.rs", "rank": 36, "score": 53321.922171032005 }, { "content": "struct ArduinoMega2560;\n\n\n\nimpl Board for ArduinoMega2560 {\n\n fn display_name(&self) -> &str {\n\n \"Arduino Mega 2560\"\n\n }\n\n\n\n fn needs_reset(&self) -> Option<&str> {\n\n None\n\n }\n\n\n\n fn avrdude_options(&self) -> avrdude::AvrdudeOptions {\n\n avrdude::AvrdudeOptions {\n\n programmer: \"wiring\",\n\n partno: \"atmega2560\",\n\n baudrate: Some(115200),\n\n do_chip_erase: false,\n\n }\n\n }\n\n\n", "file_path": "ravedude/src/board.rs", "rank": 37, "score": 53321.922171032005 }, { "content": "struct ArduinoNano;\n\n\n\nimpl Board for ArduinoNano {\n\n fn display_name(&self) -> &str {\n\n \"Arduino Nano\"\n\n }\n\n\n\n fn needs_reset(&self) -> Option<&str> {\n\n None\n\n }\n\n\n\n fn avrdude_options(&self) -> avrdude::AvrdudeOptions {\n\n avrdude::AvrdudeOptions {\n\n programmer: \"arduino\",\n\n partno: \"atmega328p\",\n\n baudrate: Some(57600),\n\n do_chip_erase: true,\n\n }\n\n }\n\n\n\n fn guess_port(&self) -> Option<anyhow::Result<std::path::PathBuf>> {\n\n Some(Err(anyhow::anyhow!(\"Not able to guess port\")))\n\n }\n\n}\n\n\n", "file_path": "ravedude/src/board.rs", "rank": 38, "score": 53321.922171032005 }, { "content": "struct ArduinoLeonardo;\n\n\n\nimpl Board for ArduinoLeonardo {\n\n fn display_name(&self) -> &str {\n\n \"Arduino Leonardo\"\n\n }\n\n\n\n fn needs_reset(&self) -> Option<&str> {\n\n let a = self.guess_port();\n\n match a {\n\n Some(Ok(name)) => {\n\n match serialport::new(name.to_str().unwrap(), 1200).open() {\n\n Ok(_) => {\n\n std::thread::sleep(core::time::Duration::from_secs(1));\n\n None\n\n },\n\n Err(_) => Some(\"Reset the board by pressing the reset button once.\")\n\n }\n\n },\n\n _ => Some(\"Reset the board by pressing the reset button once.\")\n", "file_path": "ravedude/src/board.rs", "rank": 39, "score": 53321.922171032005 }, { "content": "struct ArduinoUno;\n\n\n\nimpl Board for ArduinoUno {\n\n fn display_name(&self) -> &str {\n\n \"Arduino Uno\"\n\n }\n\n\n\n fn needs_reset(&self) -> Option<&str> {\n\n None\n\n }\n\n\n\n fn avrdude_options(&self) -> avrdude::AvrdudeOptions {\n\n avrdude::AvrdudeOptions {\n\n programmer: \"arduino\",\n\n partno: \"atmega328p\",\n\n baudrate: None,\n\n do_chip_erase: true,\n\n }\n\n }\n\n\n\n fn guess_port(&self) -> Option<anyhow::Result<std::path::PathBuf>> {\n\n Some(find_port_from_vid_pid_list(&[\n\n (0x2341, 0x0043),\n\n (0x2341, 0x0001),\n\n (0x2A03, 0x0043),\n\n (0x2341, 0x0243),\n\n ]))\n\n }\n\n}\n\n\n", "file_path": "ravedude/src/board.rs", "rank": 40, "score": 53321.922171032005 }, { "content": "fn apply_settings(peripheral: &crate::pac::ADC, settings: AdcSettings) {\n\n peripheral.adcsra.write(|w| {\n\n w.aden().set_bit();\n\n match settings.clock_divider {\n\n ClockDivider::Factor2 => w.adps().prescaler_2(),\n\n ClockDivider::Factor4 => w.adps().prescaler_4(),\n\n ClockDivider::Factor8 => w.adps().prescaler_8(),\n\n ClockDivider::Factor16 => w.adps().prescaler_16(),\n\n ClockDivider::Factor32 => w.adps().prescaler_32(),\n\n ClockDivider::Factor64 => w.adps().prescaler_64(),\n\n ClockDivider::Factor128 => w.adps().prescaler_128(),\n\n }\n\n });\n\n peripheral.admux.write(|w| match settings.ref_voltage {\n\n ReferenceVoltage::Aref => w.refs().aref(),\n\n ReferenceVoltage::AVcc => w.refs().avcc(),\n\n ReferenceVoltage::Internal => w.refs().internal(),\n\n });\n\n}\n\n\n", "file_path": "mcu/atmega-hal/src/adc.rs", "rank": 41, "score": 53118.65918701464 }, { "content": "struct SparkFunProMicro;\n\n\n\nimpl Board for SparkFunProMicro {\n\n fn display_name(&self) -> &str {\n\n \"SparkFun Pro Micro\"\n\n }\n\n\n\n fn needs_reset(&self) -> Option<&str> {\n\n Some(\"Reset the board by quickly pressing the reset button **twice**.\")\n\n }\n\n\n\n fn avrdude_options(&self) -> avrdude::AvrdudeOptions {\n\n avrdude::AvrdudeOptions {\n\n programmer: \"avr109\",\n\n partno: \"atmega32u4\",\n\n baudrate: None,\n\n do_chip_erase: true,\n\n }\n\n }\n\n\n\n fn guess_port(&self) -> Option<anyhow::Result<std::path::PathBuf>> {\n\n Some(find_port_from_vid_pid_list(&[\n\n (0x1B4F, 0x9205), //5V\n\n (0x1B4F, 0x9206), //5V\n\n (0x1B4F, 0x9203), //3.3V\n\n (0x1B4F, 0x9204), //3.3V\n\n ]))\n\n }\n\n}\n\n\n", "file_path": "ravedude/src/board.rs", "rank": 42, "score": 51140.693271741846 }, { "content": "#!/usr/bin/env python3\n\nimport copy\n\nimport json\n\nimport subprocess\n\n\n\nSPECS = {\n\n \"atmega32u4\": {\n\n \"cpu\": \"atmega32u4\",\n\n },\n\n \"atmega48p\": {\n\n \"cpu\": \"atmega48p\",\n\n },\n\n \"atmega168\": {\n\n \"cpu\": \"atmega168\",\n\n },\n\n \"atmega328p\": {\n\n \"cpu\": \"atmega328p\",\n\n },\n\n \"atmega1280\": {\n\n \"cpu\": \"atmega1280\",\n\n },\n\n \"atmega2560\": {\n\n \"cpu\": \"atmega2560\",\n\n },\n\n \"attiny85\": {\n\n \"cpu\": \"attiny85\",\n\n },\n\n \"attiny88\": {\n\n \"cpu\": \"attiny88\",\n\n },\n\n}\n\n\n\nCOMMON = {\n\n # needed because we currently rely on avr-libc\n\n \"no-default-libraries\": False,\n\n # 8-bit operations on AVR are atomic\n\n \"max-atomic-width\": 8,\n\n}\n\n\n\n\n\ndef main():\n\n rustc_version = subprocess.run(\n\n [\"rustc\", \"--version\"],\n\n check=True,\n\n stdout=subprocess.PIPE,\n\n ).stdout.decode()\n\n\n\n if \"nightly\" not in rustc_version:\n\n raise Exception(\"You need nightly rustc!\")\n\n\n\n upstream_spec_string = subprocess.run(\n\n [\n\n \"rustc\",\n\n \"--print\",\n\n \"target-spec-json\",\n\n \"-Z\",\n\n \"unstable-options\",\n\n \"--target\",\n\n \"avr-unknown-gnu-atmega328\",\n\n ],\n\n check=True,\n\n stdout=subprocess.PIPE,\n\n ).stdout\n\n\n\n upstream_spec = json.loads(upstream_spec_string)\n\n\n\n # our targets are of course not built into rustc\n\n del upstream_spec[\"is-builtin\"]\n\n\n\n for mcu, settings in SPECS.items():\n\n spec = copy.deepcopy(upstream_spec)\n\n spec.update(COMMON)\n\n spec.update(settings)\n\n spec[\"pre-link-args\"][\"gcc\"][0] = f\"-mmcu={mcu}\"\n\n\n\n with open(f\"avr-specs/avr-{mcu}.json\", \"w\") as f:\n\n json.dump(spec, f, sort_keys=True, indent=2)\n\n f.write(\"\\n\")\n\n\n\n\n\nif __name__ == \"__main__\":\n\n main()\n", "file_path": "avr-specs/sync-from-upstream.py", "rank": 43, "score": 30141.824275600375 }, { "content": " }\n\n }\n\n\n\n pub use ufmt::uWrite as _;\n\n pub use void::ResultVoidErrExt as _;\n\n pub use void::ResultVoidExt as _;\n\n}\n\n\n\n/// Convenience macro to instanciate the [`Pins`] struct for this board.\n\n///\n\n/// # Example\n\n/// ```no_run\n\n/// let dp = arduino_hal::Peripherals::take().unwrap();\n\n/// let pins = arduino_hal::pins!(dp);\n\n/// ```\n\n#[cfg(feature = \"board-selected\")]\n\n#[macro_export]\n\nmacro_rules! pins {\n\n ($p:expr) => {\n\n $crate::Pins::with_mcu_pins($crate::hal::pins!($p))\n", "file_path": "arduino-hal/src/lib.rs", "rank": 44, "score": 21579.4611873993 }, { "content": "/// (runtime) feature is selected (it is by default).\n\n#[cfg(any(feature = \"rt\", doc))]\n\n#[doc(cfg(feature = \"rt\"))]\n\npub use avr_device::entry;\n\n\n\n#[doc(no_inline)]\n\n#[cfg(feature = \"mcu-atmega\")]\n\npub use atmega_hal as hal;\n\n#[doc(no_inline)]\n\n#[cfg(feature = \"mcu-atmega\")]\n\npub use atmega_hal::pac;\n\n\n\n#[doc(no_inline)]\n\n#[cfg(feature = \"mcu-attiny\")]\n\npub use attiny_hal as hal;\n\n#[doc(no_inline)]\n\n#[cfg(feature = \"mcu-attiny\")]\n\npub use attiny_hal::pac;\n\n\n\n#[doc(no_inline)]\n", "file_path": "arduino-hal/src/lib.rs", "rank": 45, "score": 21577.51209152646 }, { "content": "#[doc(no_inline)]\n\n#[cfg(feature = \"mcu-atmega\")]\n\npub use i2c::I2c;\n\n\n\n/// SPI controller.\n\n#[cfg(feature = \"mcu-atmega\")]\n\npub mod spi {\n\n pub use crate::hal::spi::*;\n\n\n\n pub type Spi = crate::hal::spi::Spi;\n\n}\n\n#[doc(no_inline)]\n\n#[cfg(feature = \"mcu-atmega\")]\n\npub use spi::Spi;\n\n\n\n#[cfg(feature = \"mcu-atmega\")]\n\npub mod usart {\n\n pub use crate::hal::usart::{Baudrate, UsartOps};\n\n\n\n pub type Usart<USART, RX, TX> = crate::hal::usart::Usart<USART, RX, TX, crate::DefaultClock>;\n", "file_path": "arduino-hal/src/lib.rs", "rank": 46, "score": 21576.881810541177 }, { "content": "#[cfg(feature = \"mcu-atmega\")]\n\npub mod adc {\n\n pub use crate::hal::adc::{\n\n channel, AdcChannel, AdcOps, AdcSettings, Channel, ClockDivider, ReferenceVoltage,\n\n };\n\n\n\n /// Check the [`avr_hal_generic::adc::Adc`] documentation.\n\n pub type Adc = crate::hal::Adc<crate::DefaultClock>;\n\n}\n\n#[doc(no_inline)]\n\n#[cfg(feature = \"mcu-atmega\")]\n\npub use adc::Adc;\n\n\n\n/// I2C bus controller.\n\n#[cfg(feature = \"mcu-atmega\")]\n\npub mod i2c {\n\n pub use crate::hal::i2c::*;\n\n\n\n pub type I2c = crate::hal::i2c::I2c<crate::DefaultClock>;\n\n}\n", "file_path": "arduino-hal/src/lib.rs", "rank": 47, "score": 21576.504845286247 }, { "content": " pub type UsartWriter<USART, RX, TX> =\n\n crate::hal::usart::UsartWriter<USART, RX, TX, crate::DefaultClock>;\n\n pub type UsartReader<USART, RX, TX> =\n\n crate::hal::usart::UsartReader<USART, RX, TX, crate::DefaultClock>;\n\n}\n\n#[doc(no_inline)]\n\n#[cfg(feature = \"mcu-atmega\")]\n\npub use usart::Usart;\n\n\n\n#[cfg(feature = \"mcu-atmega\")]\n\npub mod prelude {\n\n cfg_if::cfg_if! {\n\n if #[cfg(any(\n\n feature = \"arduino-diecimila\",\n\n feature = \"arduino-mega2560\",\n\n feature = \"arduino-uno\"\n\n ))] {\n\n pub use crate::hal::usart::BaudrateArduinoExt as _;\n\n } else {\n\n pub use crate::hal::usart::BaudrateExt as _;\n", "file_path": "arduino-hal/src/lib.rs", "rank": 48, "score": 21576.0225374412 }, { "content": "//! MCU core clock support.\n\n//!\n\n//! This module contains common definitions to abtract over the MCU core clock speed. `avr-hal`\n\n//! does not support changing the clock-speed at runtime.\n\n//!\n\n//! Most items in this module are re-exported from [`avr_hal_generic::clock`].\n\npub use avr_hal_generic::clock::*;\n\n\n\npub(crate) mod default {\n\n /// Default clock speed for this board.\n\n ///\n\n /// `arduino-hal` contains a lot of type aliases for assuming this clock speed. As such it is\n\n /// easiest to keep the processor at the selected default speed.\n\n ///\n\n /// However, you can of course still use other clock speeds but you'll then need to correctly\n\n /// name the types from the HAL crate using your own clock definition.\n\n #[cfg(feature = \"mhz16\")]\n\n pub type DefaultClock = avr_hal_generic::clock::MHz16;\n\n\n\n #[cfg(feature = \"mhz8\")]\n\n pub type DefaultClock = avr_hal_generic::clock::MHz8;\n\n}\n", "file_path": "arduino-hal/src/clock.rs", "rank": 49, "score": 21575.592948820835 }, { "content": "#[cfg(feature = \"board-selected\")]\n\npub use hal::Peripherals;\n\n\n\n#[cfg(feature = \"board-selected\")]\n\npub mod clock;\n\n#[cfg(feature = \"board-selected\")]\n\npub use clock::default::DefaultClock;\n\n\n\n#[cfg(feature = \"board-selected\")]\n\nmod delay;\n\n#[cfg(feature = \"board-selected\")]\n\npub use delay::{delay_ms, delay_us, Delay};\n\n\n\n#[cfg(feature = \"board-selected\")]\n\npub mod port;\n\n#[doc(no_inline)]\n\n#[cfg(feature = \"board-selected\")]\n\npub use port::Pins;\n\n\n\n/// Analog to Digital converter.\n", "file_path": "arduino-hal/src/lib.rs", "rank": 50, "score": 21575.245682585923 }, { "content": "use embedded_hal::blocking::delay::{DelayMs, DelayUs};\n\n\n\n/// Delay type for `embedded-hal` compatibility.\n\n///\n\n/// This type can be used to pass a generic delay utility to `embedded-hal` drivers. For direct\n\n/// use in `arduino-hal` code, usage of [`delay_ms`] or [`delay_us`] is preferred.\n\npub type Delay = avr_hal_generic::delay::Delay<crate::DefaultClock>;\n\n\n\n/// Delay execution for a number of milliseconds.\n\n///\n\n/// Busy-loop for the given time. This function assumes the default clock speed defined by\n\n/// [`arduino_hal::DefaultClock`][crate::DefaultClock].\n", "file_path": "arduino-hal/src/delay.rs", "rank": 51, "score": 21574.872363804723 }, { "content": " $p.USART1,\n\n $pins.rx,\n\n $pins.tx.into_output(),\n\n $crate::hal::usart::BaudrateExt::into_baudrate($baud),\n\n )\n\n };\n\n}\n\n// See comment in avr-hal-generic/src/usart.rs for why these boards use\n\n// the BaudrateArduinoExt trait instead of BaudrateExt\n\n#[cfg(any(\n\n feature = \"arduino-diecimila\",\n\n feature = \"arduino-mega2560\",\n\n feature = \"arduino-uno\"\n\n))]\n\n#[macro_export]\n\nmacro_rules! default_serial {\n\n ($p:expr, $pins:expr, $baud:expr) => {\n\n $crate::Usart::new(\n\n $p.USART0,\n\n $pins.d0,\n", "file_path": "arduino-hal/src/lib.rs", "rank": 52, "score": 21572.923655634437 }, { "content": "//!\n\n//! ```text\n\n//! cargo doc --open\n\n//! ```\n\n//!\n\n//! in your project (where `arduino-hal` is included with the feature-flag for your board).\n\n//!\n\n//! ## Usage\n\n//! For setting up a new project, the [`avr-hal-template`](https://github.com/Rahix/avr-hal-template)\n\n//! is the recommended baseline. Applications should be built ontop of the following skeleton:\n\n//!\n\n//! ```no_run\n\n//! #![no_std]\n\n//! #![no_main]\n\n//!\n\n//! use panic_halt as _;\n\n//!\n\n//! #[arduino_hal::entry]\n\n//! fn main() -> ! {\n\n//! let dp = arduino_hal::Peripherals::take().unwrap();\n", "file_path": "arduino-hal/src/lib.rs", "rank": 53, "score": 21571.821781374514 }, { "content": "#![no_std]\n\n#![feature(doc_cfg)]\n\n\n\n//! `arduino-hal`\n\n//! =============\n\n//! Common HAL (hardware abstraction layer) for Arduino boards.\n\n//!\n\n//! **Note**: This version of the documentation was built for\n\n#![cfg_attr(feature = \"arduino-diecimila\", doc = \"**Arduino Diecimila**.\")]\n\n#![cfg_attr(feature = \"arduino-leonardo\", doc = \"**Arduino Leonardo**.\")]\n\n#![cfg_attr(feature = \"arduino-mega2560\", doc = \"**Arduino Mega 2560**.\")]\n\n#![cfg_attr(feature = \"arduino-nano\", doc = \"**Arduino Nano**.\")]\n\n#![cfg_attr(feature = \"arduino-uno\", doc = \"**Arduino Uno**.\")]\n\n#![cfg_attr(feature = \"arduino-uno-no-crystal\", doc = \"**Arduino Uno without external osculator**.\")]\n\n#![cfg_attr(feature = \"sparkfun-promicro\", doc = \"**SparkFun ProMicro**.\")]\n\n#![cfg_attr(feature = \"trinket-pro\", doc = \"**Trinket Pro**.\")]\n\n#![cfg_attr(feature = \"trinket\", doc = \"**Trinket**.\")]\n\n#![cfg_attr(feature = \"nano168\", doc = \"**Nano clone (ATmega168)**.\")]\n\n//! This means that only items which are available for this board are visible. If you are using a\n\n//! different board, try building the documentation locally with\n", "file_path": "arduino-hal/src/lib.rs", "rank": 54, "score": 21569.382568630586 }, { "content": "//! let pins = arduino_hal::pins!(dp);\n\n//!\n\n//! loop { }\n\n//! }\n\n//! ```\n\n//!\n\n//! For examples, please check the `avr-hal` examples: <https://github.com/Rahix/avr-hal/tree/main/examples>\n\n\n\n#[cfg(not(feature = \"board-selected\"))]\n\ncompile_error!(\n\n \"This crate requires you to specify your target Arduino board as a feature.\n\n\n\n Please select one of the following\n\n\n\n * arduino-diecimila\n\n * arduino-leonardo\n\n * arduino-mega2560\n\n * arduino-nano\n\n * arduino-uno\n\n * sparkfun-promicro\n", "file_path": "arduino-hal/src/lib.rs", "rank": 55, "score": 21568.859094204327 }, { "content": " $pins.d1.into_output(),\n\n $crate::hal::usart::BaudrateArduinoExt::into_baudrate($baud),\n\n )\n\n };\n\n}\n\n#[cfg(any(feature = \"arduino-nano\", feature = \"nano168\"))]\n\n#[macro_export]\n\nmacro_rules! default_serial {\n\n ($p:expr, $pins:expr, $baud:expr) => {\n\n $crate::Usart::new(\n\n $p.USART0,\n\n $pins.d0,\n\n $pins.d1.into_output(),\n\n $crate::hal::usart::BaudrateExt::into_baudrate($baud),\n\n )\n\n };\n\n}\n", "file_path": "arduino-hal/src/lib.rs", "rank": 56, "score": 21568.518811878926 }, { "content": " * trinket-pro\n\n * trinket\n\n * nano168\n\n \"\n\n);\n\n\n\n/// Attribute to declare the entry point of the program\n\n///\n\n/// Exactly one entry point must be declared in the entire dependency tree.\n\n///\n\n/// ```\n\n/// #[arduino_hal::entry]\n\n/// fn main() -> ! {\n\n/// // ...\n\n/// }\n\n/// ```\n\n///\n\n/// The entry function must have a signature of `[unsafe] fn() -> !`.\n\n///\n\n/// This macro is a reexport of [`avr_device::entry`]. It is only available when the `rt`\n", "file_path": "arduino-hal/src/lib.rs", "rank": 57, "score": 21567.89815635964 }, { "content": " };\n\n}\n\n\n\n#[cfg(any(feature = \"arduino-leonardo\"))]\n\n#[macro_export]\n\nmacro_rules! default_serial {\n\n ($p:expr, $pins:expr, $baud:expr) => {\n\n $crate::Usart::new(\n\n $p.USART1,\n\n $pins.d0,\n\n $pins.d1.into_output(),\n\n $crate::hal::usart::BaudrateExt::into_baudrate($baud),\n\n )\n\n };\n\n}\n\n#[cfg(any(feature = \"sparkfun-promicro\"))]\n\n#[macro_export]\n\nmacro_rules! default_serial {\n\n ($p:expr, $pins:expr, $baud:expr) => {\n\n $crate::Usart::new(\n", "file_path": "arduino-hal/src/lib.rs", "rank": 58, "score": 21567.57069700543 }, { "content": "pub mod i2c;\n\npub mod spi;\n\npub mod adc;\n\npub mod pwm;\n\npub mod wdt;\n\n\n\n/// Prelude containing all HAL traits\n\npub mod prelude {\n\n pub use hal::prelude::*;\n\n pub use hal::digital::v2::OutputPin as _;\n\n pub use hal::digital::v2::InputPin as _;\n\n pub use hal::digital::v2::StatefulOutputPin as _;\n\n pub use hal::digital::v2::ToggleableOutputPin as _;\n\n pub use void::ResultVoidExt as _;\n\n pub use void::ResultVoidErrExt as _;\n\n pub use ufmt::uWrite as _;\n\n}\n\n\n\n// For making certain traits unimplementable from outside this crate.\n\nmod sealed {\n", "file_path": "avr-hal-generic/src/lib.rs", "rank": 59, "score": 20830.767329595186 }, { "content": "/// Analog-to-Digial converter\n\nuse core::marker::PhantomData;\n\n\n\n/// The division factor between the system clock frequency and the input clock to the AD converter.\n\n///\n\n/// To get 10-bit precision, clock from 50kHz to 200kHz must be supplied. If you need less\n\n/// precision, you can supply a higher clock.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\n#[repr(u8)]\n\npub enum ClockDivider {\n\n Factor2,\n\n Factor4,\n\n Factor8,\n\n Factor16,\n\n Factor32,\n\n Factor64,\n\n /// (default)\n\n Factor128,\n\n}\n\n\n\nimpl Default for ClockDivider {\n\n fn default() -> Self {\n\n Self::Factor128\n\n }\n\n}\n\n\n\n/// Internal trait for the low-level ADC peripheral.\n\n///\n\n/// **Prefer using the [`Adc`] API instead of this trait.**\n", "file_path": "avr-hal-generic/src/adc.rs", "rank": 60, "score": 20826.48381076531 }, { "content": "/// I2C Transfer Direction\n\n#[derive(ufmt::derive::uDebug, Debug, Clone, Copy, Eq, PartialEq)]\n\n#[repr(u8)]\n\npub enum Direction {\n\n /// Write to a slave (LSB is 0)\n\n Write,\n\n /// Read from a slave (LSB is 1)\n\n Read,\n\n}\n\n\n\n/// Internal trait for low-level I2C peripherals.\n\n///\n\n/// This trait defines the common interface for all I2C peripheral variants. It is used as an\n\n/// intermediate abstraction ontop of which the [`I2c`] API is built. **Prefer using the\n\n/// [`I2c`] API instead of this trait.**\n", "file_path": "avr-hal-generic/src/i2c.rs", "rank": 61, "score": 20825.55140011737 }, { "content": "impl Default for ReferenceVoltage {\n\n fn default() -> Self {\n\n Self::AVcc\n\n }\n\n}\n\n\n\n/// Configuration for the ADC peripheral.\n\n#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]\n\npub struct AdcSettings {\n\n pub clock_divider: ClockDivider,\n\n pub ref_voltage: ReferenceVoltage,\n\n}\n\n\n", "file_path": "mcu/atmega-hal/src/adc.rs", "rank": 62, "score": 20824.831465893545 }, { "content": "//! Analog-to-Digital Converter\n\n\n\nuse crate::port;\n\npub use avr_hal_generic::adc::{AdcChannel, AdcOps, ClockDivider};\n\n\n\n/// Select the voltage reference for the ADC peripheral\n\n///\n\n/// The internal voltage reference options may not be used if an external reference voltage is\n\n/// being applied to the AREF pin.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\n#[repr(u8)]\n\npub enum ReferenceVoltage {\n\n /// Voltage applied to AREF pin.\n\n Aref,\n\n /// Default reference voltage (default).\n\n AVcc,\n\n /// Internal reference voltage.\n\n Internal,\n\n}\n\n\n", "file_path": "mcu/atmega-hal/src/adc.rs", "rank": 63, "score": 20824.460704310688 }, { "content": "//! HAL abstractions for USART/Serial\n\n//!\n\n//! Check the documentation of [`Usart`] for details.\n\n\n\nuse core::cmp::Ordering;\n\nuse core::marker;\n\nuse void::ResultVoidExt;\n\n\n\nuse crate::port;\n\n\n\n/// Representation of a USART baudrate\n\n///\n\n/// Precalculated parameters for configuring a certain USART baudrate.\n\n#[derive(Debug, Clone, Copy)]\n\npub struct Baudrate<CLOCK> {\n\n /// Value of the `UBRR#` register\n\n pub ubrr: u16,\n\n /// Value of the `U2X#` bit\n\n pub u2x: bool,\n\n /// The baudrate calculation depends on the configured clock rate, thus a `CLOCK` generic\n", "file_path": "avr-hal-generic/src/usart.rs", "rank": 64, "score": 20824.235523247968 }, { "content": " /// parameter is needed.\n\n pub _clock: marker::PhantomData<CLOCK>,\n\n}\n\n\n\nimpl<CLOCK: crate::clock::Clock> PartialEq for Baudrate<CLOCK> {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.compare_value() == other.compare_value()\n\n }\n\n}\n\n\n\nimpl<CLOCK: crate::clock::Clock> Eq for Baudrate<CLOCK> {}\n\n\n\nimpl<CLOCK: crate::clock::Clock> PartialOrd for Baudrate<CLOCK> {\n\n fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n\n Some(self.compare_value().cmp(&other.compare_value()))\n\n }\n\n}\n\n\n\nimpl<CLOCK: crate::clock::Clock> Ord for Baudrate<CLOCK> {\n\n fn cmp(&self, other: &Self) -> Ordering {\n", "file_path": "avr-hal-generic/src/usart.rs", "rank": 65, "score": 20823.152046255498 }, { "content": " /// each pin gets its own distinct type in `avr-hal`, but by\n\n /// [downgrading][avr_hal_generic::port::Pin#downgrading], you can cast them into this\n\n /// \"dynamic\" type. Do note, however, that using this dynamic type has a runtime cost.\n\n pub type Pin<MODE, PIN = Dynamic> = $crate::port::Pin<MODE, PIN>;\n\n\n\n $(#[$pins_attr])*\n\n pub struct Pins {\n\n $(pub $pin: Pin<\n\n mode::Input<mode::Floating>,\n\n $Pin,\n\n >,)+\n\n }\n\n\n\n impl Pins {\n\n pub fn new(\n\n $(_: $Port,)+\n\n ) -> Self {\n\n Self {\n\n $($pin: $crate::port::Pin::new(\n\n $Pin { _private: (), }\n", "file_path": "avr-hal-generic/src/port.rs", "rank": 66, "score": 20822.535413164725 }, { "content": "/// use atmega_hal::port::{Pin, mode, self};\n\n///\n\n/// let dp = atmega_hal::Peripherals::take().unwrap();\n\n/// let pins = atmega_hal::pins!(dp);\n\n///\n\n/// let output: Pin<mode::Output, port::PD3> = pins.pd3.into_output();\n\n/// ```\n\npub struct Pin<MODE, PIN> {\n\n pub(crate) pin: PIN,\n\n pub(crate) _mode: PhantomData<MODE>,\n\n}\n\n\n\nimpl<PIN: PinOps> Pin<mode::Input<mode::Floating>, PIN> {\n\n #[doc(hidden)]\n\n pub fn new(pin: PIN) -> Self {\n\n Pin {\n\n pin,\n\n _mode: PhantomData,\n\n }\n\n }\n", "file_path": "avr-hal-generic/src/port.rs", "rank": 67, "score": 20821.79803896673 }, { "content": "//! PWM Implementation\n\n\n\n/// Clock prescaler for PWM\n\n///\n\n/// The prescaler dictates the PWM frequency, together with the IO clock. The formula is as\n\n/// follows:\n\n///\n\n/// ```text\n\n/// F_pwm = CLK_io / (Prescaler * 256);\n\n/// ```\n\n///\n\n/// | Prescaler | 16 MHz Clock | 8 MHz Clock |\n\n/// | --- | --- | ---|\n\n/// | `Direct` | 62.5 kHz | 31.3 kHz |\n\n/// | `Prescale8` | 7.81 kHz | 3.91 kHz |\n\n/// | `Prescale64` | 977 Hz | 488 Hz |\n\n/// | `Prescale256` | 244 Hz | 122 Hz |\n\n/// | `Prescale1024` | 61.0 Hz | 30.5 Hz |\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]\n\npub enum Prescaler {\n", "file_path": "avr-hal-generic/src/pwm.rs", "rank": 68, "score": 20821.753784193505 }, { "content": "pub use avr_device::entry;\n\n\n\n#[cfg(feature = \"device-selected\")]\n\npub use pac::Peripherals;\n\n\n\npub use avr_hal_generic::clock;\n\npub use avr_hal_generic::delay;\n\n\n\n#[cfg(feature = \"device-selected\")]\n\npub mod port;\n\n#[cfg(feature = \"device-selected\")]\n\npub use port::Pins;\n\n\n\npub struct Attiny;\n\n\n\n#[cfg(feature = \"attiny85\")]\n\n#[macro_export]\n\nmacro_rules! pins {\n\n ($p:expr) => {\n\n $crate::Pins::new($p.PORTB)\n", "file_path": "mcu/attiny-hal/src/lib.rs", "rank": 69, "score": 20821.700362371103 }, { "content": "pub use atmega_hal::port::mode;\n\npub use atmega_hal::port::Pin;\n\n\n\navr_hal_generic::renamed_pins! {\n\n type Pin = Pin;\n\n\n\n /// Pins of the **SparkFun ProMicro**.\n\n ///\n\n /// This struct is best initialized via the [`arduino_hal::pins!()`][pins] macro.\n\n pub struct Pins from atmega_hal::Pins {\n\n /// `RX`\n\n ///\n\n /// `RX` (UART)\n\n pub rx: atmega_hal::port::PD2 = pd2,\n\n /// `TX`\n\n ///\n\n /// `TX` (UART)\n\n pub tx: atmega_hal::port::PD3 = pd3,\n\n /// `D2` / `SDA`\n\n ///\n", "file_path": "arduino-hal/src/port/promicro.rs", "rank": 70, "score": 20821.485669413596 }, { "content": "pub use attiny_hal::port::mode;\n\npub use attiny_hal::port::Pin;\n\n\n\navr_hal_generic::renamed_pins! {\n\n type Pin = Pin;\n\n\n\n pub struct Pins from attiny_hal::Pins {\n\n /// `#0`: `PB0`, `DI`(SPI), `SDA`(I2C)\n\n pub d0: attiny_hal::port::PB0 = pb0,\n\n /// `#1`: `PB1`, `DO`(SPI), Builtin LED\n\n pub d1: attiny_hal::port::PB1 = pb1,\n\n /// `#2`: `PB2`, `SCK`(SPI), `SCL`(I2C)\n\n pub d2: attiny_hal::port::PB2 = pb2,\n\n /// `#3`: `PB3`\n\n pub d3: attiny_hal::port::PB3 = pb3,\n\n /// `#4`: `PB4`\n\n pub d4: attiny_hal::port::PB4 = pb4,\n\n }\n\n}\n", "file_path": "arduino-hal/src/port/trinket.rs", "rank": 71, "score": 20821.427877862556 }, { "content": "pub use atmega_hal::port::mode;\n\npub use atmega_hal::port::Pin;\n\n\n\navr_hal_generic::renamed_pins! {\n\n type Pin = Pin;\n\n\n\n /// Pins of the **Arduino Mega 2560**.\n\n ///\n\n /// This struct is best initialized via the [`arduino_hal::pins!()`][pins] macro.\n\n pub struct Pins from atmega_hal::Pins {\n\n /// `D0` / `RX0`\n\n ///\n\n /// * `RXD0` (USART0)\n\n /// * `PCINT8`: External Interrupt (Pin Change)\n\n pub d0: atmega_hal::port::PE0 = pe0,\n\n /// `D1` / `TX0`\n\n ///\n\n /// * `TXD0` (USART0)\n\n pub d1: atmega_hal::port::PE1 = pe1,\n\n /// `D2`\n", "file_path": "arduino-hal/src/port/mega2560.rs", "rank": 72, "score": 20821.336777236247 }, { "content": "pub use atmega_hal::port::mode;\n\npub use atmega_hal::port::Pin;\n\n\n\navr_hal_generic::renamed_pins! {\n\n type Pin = Pin;\n\n\n\n /// Pins of the **Arduino Leonardo**.\n\n ///\n\n /// This struct is best initialized via the [`arduino_hal::pins!()`][pins] macro.\n\n pub struct Pins from atmega_hal::Pins {\n\n /// `D0` / `RX`\n\n ///\n\n /// * `RX` (UART)\n\n /// * `INT2`: External Interrupt\n\n pub d0: atmega_hal::port::PD2 = pd2,\n\n /// `D1` / `TX`\n\n ///\n\n /// * `TX` (UART)\n\n /// * `INT3`: External Interrupt\n\n pub d1: atmega_hal::port::PD3 = pd3,\n", "file_path": "arduino-hal/src/port/leonardo.rs", "rank": 73, "score": 20821.336777236247 }, { "content": " ///\n\n /// ```text\n\n /// - 0 1 2 3 4 5 6 7 8 9 a b c d e f\n\n /// 00: -- -- -- -- -- -- -- -- -- -- -- -- -- --\n\n /// 10: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --\n\n /// 20: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --\n\n /// 30: -- -- -- -- -- -- -- -- 38 39 -- -- -- -- -- --\n\n /// 40: -- -- -- -- -- -- -- -- 48 -- -- -- -- -- -- --\n\n /// 50: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --\n\n /// 60: -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --\n\n /// 70: -- -- -- -- -- -- -- --\n\n /// ```\n\n ///\n\n /// [i2cdetect-linux]: https://man.archlinux.org/man/community/i2c-tools/i2cdetect.8.en\n\n pub fn i2cdetect<W: ufmt::uWrite>(\n\n &mut self,\n\n w: &mut W,\n\n direction: Direction,\n\n ) -> Result<(), W::Error> {\n\n use hal::blocking::delay::DelayMs;\n", "file_path": "avr-hal-generic/src/i2c.rs", "rank": 74, "score": 20821.302774883246 }, { "content": "pub use avr_device::atmega48p as pac;\n\n\n\n/// See [`avr_device::entry`](https://docs.rs/avr-device/latest/avr_device/attr.entry.html).\n\n#[cfg(feature = \"rt\")]\n\npub use avr_device::entry;\n\n\n\n#[cfg(feature = \"device-selected\")]\n\npub use pac::Peripherals;\n\n\n\npub use avr_hal_generic::clock;\n\npub use avr_hal_generic::delay;\n\n\n\n#[cfg(feature = \"device-selected\")]\n\npub mod adc;\n\n#[cfg(feature = \"device-selected\")]\n\npub use adc::Adc;\n\n\n\n#[cfg(feature = \"device-selected\")]\n\npub mod i2c;\n\n#[cfg(feature = \"device-selected\")]\n", "file_path": "mcu/atmega-hal/src/lib.rs", "rank": 75, "score": 20821.25831849513 }, { "content": " CSPIN: port::PinOps,\n\n{\n\n type Error = void::Void;\n\n\n\n /// Sets up the device for transmission and sends the data\n\n fn send(&mut self, byte: u8) -> nb::Result<(), Self::Error> {\n\n self.flush()?;\n\n self.write(byte);\n\n Ok(())\n\n }\n\n\n\n /// Reads and returns the response in the data register\n\n fn read(&mut self) -> nb::Result<u8, Self::Error> {\n\n self.flush()?;\n\n Ok(self.receive())\n\n }\n\n}\n\n\n\n/// Default Transfer trait implementation. Only 8-bit word size is supported for now.\n\nimpl<H, SPI, SCLKPIN, MOSIPIN, MISOPIN, CSPIN> hal::blocking::spi::transfer::Default<u8>\n", "file_path": "avr-hal-generic/src/spi.rs", "rank": 76, "score": 20821.195936511715 }, { "content": "pub use atmega_hal::port::mode;\n\npub use atmega_hal::port::Pin;\n\n\n\navr_hal_generic::renamed_pins! {\n\n type Pin = Pin;\n\n\n\n /// Pins of the **Arduino Uno**.\n\n ///\n\n /// This struct is best initialized via the [`arduino_hal::pins!()`][pins] macro.\n\n pub struct Pins from atmega_hal::Pins {\n\n /// `A0`\n\n ///\n\n /// * ADC0 (ADC input channel 0)\n\n /// * PCINT8 (pin change interrupt 8)\n\n pub a0: atmega_hal::port::PC0 = pc0,\n\n /// `A1`\n\n ///\n\n /// * ADC1 (ADC input channel 1)\n\n /// * PCINT9 (pin change interrupt 9)\n\n pub a1: atmega_hal::port::PC1 = pc1,\n", "file_path": "arduino-hal/src/port/uno.rs", "rank": 77, "score": 20821.192385817376 }, { "content": "pub use atmega_hal::port::mode;\n\npub use atmega_hal::port::Pin;\n\n\n\navr_hal_generic::renamed_pins! {\n\n type Pin = Pin;\n\n\n\n /// Pins of the **Arduino Diecimila**.\n\n ///\n\n /// This struct is best initialized via the [`arduino_hal::pins!()`][pins] macro.\n\n pub struct Pins from atmega_hal::Pins {\n\n /// `A0`\n\n ///\n\n /// * ADC0 (ADC input channel 0)\n\n /// * PCINT8 (pin change interrupt 8)\n\n pub a0: atmega_hal::port::PC0 = pc0,\n\n /// `A1`\n\n ///\n\n /// * ADC1 (ADC input channel 1)\n\n /// * PCINT9 (pin change interrupt 9)\n\n pub a1: atmega_hal::port::PC1 = pc1,\n", "file_path": "arduino-hal/src/port/diecimila.rs", "rank": 78, "score": 20821.192385817376 }, { "content": "//! Delay implementations\n\n\n\nuse core::marker;\n\nuse hal::blocking::delay;\n\n\n\n/// A busy-loop delay implementation\n\n///\n\n/// # Example\n\n/// ```rust\n\n/// let mut delay = delay::Delay::<clock::MHz16>::new();\n\n///\n\n/// // Wait 1 second\n\n/// delay.delay_ms(1000);\n\n/// ```\n\n///\n\n/// # Warning\n\n/// The delay is not accurate for values above 4095µs because of a loop whose\n\n/// overhead is not accounted for. This will be fixed in a future version.\n\n#[derive(Debug, Clone, Copy)]\n\npub struct Delay<SPEED> {\n", "file_path": "avr-hal-generic/src/delay.rs", "rank": 79, "score": 20820.84111478338 }, { "content": "pub use i2c::I2c;\n\n\n\n#[cfg(feature = \"device-selected\")]\n\npub mod spi;\n\n#[cfg(feature = \"device-selected\")]\n\npub use spi::Spi;\n\n\n\n#[cfg(feature = \"device-selected\")]\n\npub mod port;\n\n#[cfg(feature = \"device-selected\")]\n\npub use port::Pins;\n\n\n\n#[cfg(feature = \"device-selected\")]\n\npub mod usart;\n\n#[cfg(feature = \"device-selected\")]\n\npub use usart::Usart;\n\n\n\n#[cfg(feature = \"device-selected\")]\n\npub mod wdt;\n\n#[cfg(feature = \"device-selected\")]\n", "file_path": "mcu/atmega-hal/src/lib.rs", "rank": 80, "score": 20820.790565724303 }, { "content": " $($(#[$pin_attr])*\n\n pub $pin: $PinType<\n\n $crate::port::mode::Input<$crate::port::mode::Floating>,\n\n $Pin,\n\n >,)+\n\n }\n\n\n\n impl Pins {\n\n pub fn with_mcu_pins(pins: $McuPins) -> Self {\n\n Self {\n\n $($pin: pins.$pin_orig,)+\n\n }\n\n }\n\n }\n\n };\n\n}\n", "file_path": "avr-hal-generic/src/port.rs", "rank": 81, "score": 20820.7750737189 }, { "content": " self.0.set_low();\n\n Ok(())\n\n }\n\n fn set_high(&mut self) -> Result<(), Self::Error> {\n\n self.0.set_high();\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<CSPIN: port::PinOps> hal::digital::v2::StatefulOutputPin for ChipSelectPin<CSPIN> {\n\n fn is_set_low(&self) -> Result<bool, Self::Error> {\n\n Ok(self.0.is_set_low())\n\n }\n\n fn is_set_high(&self) -> Result<bool, Self::Error> {\n\n Ok(self.0.is_set_high())\n\n }\n\n}\n\n\n\nimpl<CSPIN: port::PinOps> hal::digital::v2::ToggleableOutputPin for ChipSelectPin<CSPIN> {\n\n type Error = crate::void::Void;\n", "file_path": "avr-hal-generic/src/spi.rs", "rank": 82, "score": 20820.59455645864 }, { "content": "/// (for Arduino Uno)\n\n/// ```\n\n/// let dp = arduino_hal::Peripherals::take().unwrap();\n\n/// let pins = arduino_hal::pins!(dp);\n\n///\n\n/// let mut i2c = arduino_hal::I2c::new(\n\n/// dp.TWI,\n\n/// pins.a4.into_pull_up_input(),\n\n/// pins.a5.into_pull_up_input(),\n\n/// 50000,\n\n/// );\n\n///\n\n/// // i2c implements the embedded-hal traits so it can be used with generic drivers.\n\n/// ```\n\npub struct I2c<H, I2C: I2cOps<H, SDA, SCL>, SDA, SCL, CLOCK> {\n\n p: I2C,\n\n #[allow(dead_code)]\n\n sda: SDA,\n\n #[allow(dead_code)]\n\n scl: SCL,\n", "file_path": "avr-hal-generic/src/i2c.rs", "rank": 83, "score": 20820.386689717925 }, { "content": "/// | `OscfOver8` | 2 MHz | 1 MHz |\n\n/// | `OscfOver16` | 1 MHz | 500 kHz |\n\n/// | `OscfOver32` | 500 kHz | 250 kHz |\n\n/// | `OscfOver64` | 250 kHz | 125 kHz |\n\n/// | `OscfOver128` | 125 kHz | 62.5 kHz |\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\n\npub enum SerialClockRate {\n\n OscfOver2,\n\n OscfOver4,\n\n OscfOver8,\n\n OscfOver16,\n\n OscfOver32,\n\n OscfOver64,\n\n OscfOver128,\n\n}\n\n\n\n/// Order of data transmission, either MSB first or LSB first\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\n\npub enum DataOrder {\n\n MostSignificantFirst,\n", "file_path": "avr-hal-generic/src/spi.rs", "rank": 84, "score": 20820.306963329687 }, { "content": "\n\n fn write(&mut self, byte: u8) -> nb::Result<(), Self::Error> {\n\n self.p.raw_write(byte)\n\n }\n\n\n\n fn flush(&mut self) -> nb::Result<(), Self::Error> {\n\n self.p.raw_flush()\n\n }\n\n}\n\n\n\nimpl<H, USART: UsartOps<H, RX, TX>, RX, TX, CLOCK> hal::serial::Read<u8>\n\n for UsartReader<H, USART, RX, TX, CLOCK>\n\n{\n\n type Error = void::Void;\n\n\n\n fn read(&mut self) -> nb::Result<u8, Self::Error> {\n\n self.p.raw_read()\n\n }\n\n}\n\n\n", "file_path": "avr-hal-generic/src/usart.rs", "rank": 85, "score": 20820.271036229467 }, { "content": "{\n\n type Error = void::Void;\n\n\n\n fn write(&mut self, byte: u8) -> nb::Result<(), Self::Error> {\n\n self.p.raw_write(byte)\n\n }\n\n\n\n fn flush(&mut self) -> nb::Result<(), Self::Error> {\n\n self.p.raw_flush()\n\n }\n\n}\n\n\n\nimpl<H, USART: UsartOps<H, RX, TX>, RX, TX, CLOCK> hal::serial::Read<u8>\n\n for Usart<H, USART, RX, TX, CLOCK>\n\n{\n\n type Error = void::Void;\n\n\n\n fn read(&mut self) -> nb::Result<u8, Self::Error> {\n\n self.p.raw_read()\n\n }\n", "file_path": "avr-hal-generic/src/usart.rs", "rank": 86, "score": 20820.168792323086 }, { "content": " type Error = core::convert::Infallible;\n\n\n\n fn set_high(&mut self) -> Result<(), Self::Error> {\n\n self.set_high();\n\n Ok(())\n\n }\n\n\n\n fn set_low(&mut self) -> Result<(), Self::Error> {\n\n self.set_low();\n\n Ok(())\n\n }\n\n}\n\n\n\n// Implements InputPin from embedded-hal to make sure external libraries work\n\nimpl<PIN: PinOps, IMODE: mode::InputMode> InputPin for Pin<mode::Input<IMODE>, PIN> {\n\n type Error = core::convert::Infallible;\n\n\n\n fn is_high(&self) -> Result<bool, Self::Error> {\n\n Ok(self.is_high())\n\n }\n", "file_path": "avr-hal-generic/src/port.rs", "rank": 87, "score": 20820.06880640208 }, { "content": " ),)+\n\n }\n\n }\n\n }\n\n\n\n #[repr(u8)]\n\n pub enum DynamicPort {\n\n $($PortName,)+\n\n }\n\n\n\n pub struct Dynamic {\n\n port: DynamicPort,\n\n // We'll store the mask instead of the pin number because this allows much less code to\n\n // be generated for the trait method implementations.\n\n mask: u8,\n\n }\n\n\n\n impl Dynamic {\n\n fn new(port: DynamicPort, pin_num: u8) -> Self {\n\n Self {\n", "file_path": "avr-hal-generic/src/port.rs", "rank": 88, "score": 20820.00426237222 }, { "content": "#[allow(unused_imports)]\n\nuse crate::port;\n\npub use avr_hal_generic::spi::*;\n\n\n\n#[cfg(feature = \"attiny88\")]\n\npub type Spi = avr_hal_generic::spi::Spi<\n\n crate::Atmega,\n\n crate::pac::SPI,\n\n port::PB5,\n\n port::PB3,\n\n port::PB4,\n\n port::PB2,\n\n>;\n\n#[cfg(feature = \"attiny88\")]\n\navr_hal_generic::impl_spi! {\n\n hal: crate::Atmega,\n\n peripheral: crate::pac::SPI,\n\n sclk: port::PB5,\n\n mosi: port::PB3,\n\n miso: port::PB4,\n\n cs: port::PB2,\n\n}\n", "file_path": "mcu/attiny-hal/src/spi.rs", "rank": 89, "score": 20819.9311099696 }, { "content": "#[allow(unused_imports)]\n\nuse crate::port;\n\npub use avr_hal_generic::spi::*;\n\n\n\n#[cfg(any(feature = \"atmega2560\", feature = \"atmega32u4\"))]\n\npub type Spi = avr_hal_generic::spi::Spi<\n\n crate::Atmega,\n\n crate::pac::SPI,\n\n port::PB1,\n\n port::PB2,\n\n port::PB3,\n\n port::PB0,\n\n>;\n\n#[cfg(any(feature = \"atmega2560\", feature = \"atmega32u4\"))]\n\navr_hal_generic::impl_spi! {\n\n hal: crate::Atmega,\n\n peripheral: crate::pac::SPI,\n\n sclk: port::PB1,\n\n mosi: port::PB2,\n\n miso: port::PB3,\n", "file_path": "mcu/atmega-hal/src/spi.rs", "rank": 90, "score": 20819.886610501886 }, { "content": " LeastSignificantFirst,\n\n}\n\n\n\n/// Settings to pass to Spi.\n\n///\n\n/// Easiest way to initialize is with\n\n/// `Settings::default()`. Otherwise can be instantiated with alternate\n\n/// settings directly.\n\n#[derive(Clone, PartialEq, Eq)]\n\npub struct Settings {\n\n pub data_order: DataOrder,\n\n pub clock: SerialClockRate,\n\n pub mode: spi::Mode,\n\n}\n\n\n\nimpl Default for Settings {\n\n fn default() -> Self {\n\n Settings {\n\n data_order: DataOrder::MostSignificantFirst,\n\n clock: SerialClockRate::OscfOver4,\n", "file_path": "avr-hal-generic/src/spi.rs", "rank": 91, "score": 20819.754745562637 }, { "content": "#[allow(unused_imports)]\n\nuse crate::port;\n\npub use avr_hal_generic::usart::*;\n\n\n\npub type Usart<USART, RX, TX, CLOCK> =\n\n avr_hal_generic::usart::Usart<crate::Atmega, USART, RX, TX, CLOCK>;\n\npub type UsartWriter<USART, RX, TX, CLOCK> =\n\n avr_hal_generic::usart::UsartWriter<crate::Atmega, USART, RX, TX, CLOCK>;\n\npub type UsartReader<USART, RX, TX, CLOCK> =\n\n avr_hal_generic::usart::UsartReader<crate::Atmega, USART, RX, TX, CLOCK>;\n\n\n\n#[cfg(any(feature = \"atmega168\", feature = \"atmega328p\", feature = \"atmega328pb\"))]\n\npub type Usart0<CLOCK> = Usart<\n\n crate::pac::USART0,\n\n port::Pin<port::mode::Input, port::PD0>,\n\n port::Pin<port::mode::Output, port::PD1>,\n\n CLOCK,\n\n>;\n\n#[cfg(any(feature = \"atmega168\", feature = \"atmega328p\", feature = \"atmega328pb\"))]\n\navr_hal_generic::impl_usart_traditional! {\n", "file_path": "mcu/atmega-hal/src/usart.rs", "rank": 92, "score": 20819.55762273225 }, { "content": "pub use wdt::Wdt;\n\n\n\npub struct Atmega;\n\n\n\n#[cfg(any(feature = \"atmega48p\", feature = \"atmega168\", feature = \"atmega328p\"))]\n\n#[macro_export]\n\nmacro_rules! pins {\n\n ($p:expr) => {\n\n $crate::Pins::new($p.PORTB, $p.PORTC, $p.PORTD)\n\n };\n\n}\n\n#[cfg(feature = \"atmega328pb\")]\n\n#[macro_export]\n\nmacro_rules! pins {\n\n ($p:expr) => {\n\n $crate::Pins::new($p.PORTB, $p.PORTC, $p.PORTD, $p.PORTE)\n\n };\n\n}\n\n#[cfg(feature = \"atmega32u4\")]\n\n#[macro_export]\n", "file_path": "mcu/atmega-hal/src/lib.rs", "rank": 93, "score": 20819.497871754702 }, { "content": "#[allow(unused_imports)]\n\nuse crate::port;\n\npub use avr_hal_generic::i2c::*;\n\n\n\n#[cfg(any(feature = \"atmega1280\", feature = \"atmega2560\", feature = \"atmega32u4\"))]\n\npub type I2c<CLOCK> = avr_hal_generic::i2c::I2c<\n\n crate::Atmega,\n\n crate::pac::TWI,\n\n port::Pin<port::mode::Input, port::PD1>,\n\n port::Pin<port::mode::Input, port::PD0>,\n\n CLOCK,\n\n>;\n\n#[cfg(any(feature = \"atmega1280\", feature = \"atmega2560\", feature = \"atmega32u4\"))]\n\navr_hal_generic::impl_i2c_twi! {\n\n hal: crate::Atmega,\n\n peripheral: crate::pac::TWI,\n\n sda: port::PD1,\n\n scl: port::PD0,\n\n}\n\n\n", "file_path": "mcu/atmega-hal/src/i2c.rs", "rank": 94, "score": 20819.464086161508 }, { "content": "/// Check the [`avr_hal_generic::adc::Adc`] documentation.\n\npub type Adc<CLOCK> = avr_hal_generic::adc::Adc<crate::Atmega, crate::pac::ADC, CLOCK>;\n\n\n\n/// Check the [`avr_hal_generic::adc::Channel`] documentation.\n\npub type Channel = avr_hal_generic::adc::Channel<crate::Atmega, crate::pac::ADC>;\n\n\n\n/// Additional channels\n\n///\n\n/// Some channels are not directly connected to pins. This module provides types which can be used\n\n/// to access them.\n\n///\n\n/// # Example\n\n/// ```\n\n/// let dp = atmega_hal::Peripherals::take().unwrap();\n\n/// let mut adc = atmega_hal::Adc::new(dp.ADC, Default::default());\n\n///\n\n/// let value = adc.read_blocking(&channel::Vbg);\n\n/// ```\n\npub mod channel {\n\n #[cfg(all(\n", "file_path": "mcu/atmega-hal/src/adc.rs", "rank": 95, "score": 20819.274010730962 }, { "content": " let $prescaler = prescaler;\n\n $init_block\n\n }\n\n\n\n t\n\n }\n\n }\n\n\n\n $(\n\n impl $port::$PXi<$crate::port::mode::Output> {\n\n pub fn $into_pwm(self, pwm_timer: &mut $TimerPwm)\n\n -> $port::$PXi<$crate::port::mode::Pwm<$TimerPwm>>\n\n {\n\n $port::$PXi { _mode: core::marker::PhantomData }\n\n }\n\n }\n\n\n\n impl $crate::hal::PwmPin for $port::$PXi<$crate::port::mode::Pwm<$TimerPwm>> {\n\n type Duty = u8;\n\n\n", "file_path": "avr-hal-generic/src/pwm.rs", "rank": 96, "score": 20818.995322754563 }, { "content": " /// Illegal start or stop condition\n\n pub const TW_BUS_ERROR: u8 = 0x00 >> 3;\n\n}\n\n\n\n/// I2C Error\n\n#[derive(ufmt::derive::uDebug, Debug, Clone, Copy, Eq, PartialEq)]\n\n#[repr(u8)]\n\npub enum Error {\n\n /// Lost arbitration while trying to acquire bus\n\n ArbitrationLost,\n\n /// No slave answered for this address or a slave replied NACK\n\n AddressNack,\n\n /// Slave replied NACK to sent data\n\n DataNack,\n\n /// A bus-error occured\n\n BusError,\n\n /// An unknown error occured. The bus might be in an unknown state.\n\n Unknown,\n\n}\n\n\n", "file_path": "avr-hal-generic/src/i2c.rs", "rank": 97, "score": 20818.73821777998 }, { "content": " fn write_read(\n\n &mut self,\n\n address: u8,\n\n bytes: &[u8],\n\n buffer: &mut [u8],\n\n ) -> Result<(), Self::Error> {\n\n self.p.raw_start(address, Direction::Write)?;\n\n self.p.raw_write(bytes)?;\n\n self.p.raw_start(address, Direction::Read)?;\n\n self.p.raw_read(buffer)?;\n\n self.p.raw_stop()?;\n\n Ok(())\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! impl_i2c_twi {\n\n (\n\n hal: $HAL:ty,\n\n peripheral: $I2C:ty,\n", "file_path": "avr-hal-generic/src/i2c.rs", "rank": 98, "score": 20818.628503622982 }, { "content": " /// `SDA`: i2c/twi data\n\n pub d2: atmega_hal::port::PD1 = pd1,\n\n /// `D3` / `SCL`\n\n ///\n\n /// `SCL`: i2c/twi clock\n\n pub d3: atmega_hal::port::PD0 = pd0,\n\n /// `D4`\n\n pub d4: atmega_hal::port::PD4 = pd4,\n\n /// `D5`\n\n pub d5: atmega_hal::port::PC6 = pc6,\n\n /// `D6`\n\n pub d6: atmega_hal::port::PD7 = pd7,\n\n /// `D7`\n\n pub d7: atmega_hal::port::PE6 = pe6,\n\n /// `D8`\n\n pub d8: atmega_hal::port::PB4 = pb4,\n\n /// `D9`\n\n pub d9: atmega_hal::port::PB5 = pb5,\n\n /// `D10`\n\n pub d10: atmega_hal::port::PB6 = pb6,\n", "file_path": "arduino-hal/src/port/promicro.rs", "rank": 99, "score": 20818.602887597386 } ]
Rust
src/main.rs
hardcorebadger/rustchain
e03bbf952726bf4e424f6be0331d8d9a8b16cef2
#[macro_use] extern crate lazy_static; #[macro_use] extern crate mime; #[macro_use] extern crate serde_derive; extern crate iron; extern crate router; extern crate serde; extern crate serde_json; extern crate uuid; extern crate bodyparser; use iron::prelude::*; use iron::status; use router::Router; use std::sync::{Arc, Mutex, RwLock}; use uuid::Uuid; mod blockchain; mod block; mod transaction; use blockchain::Blockchain; use transaction::Transaction; fn main() { lazy_static! { static ref CHAIN: Arc<RwLock<Blockchain>> = { let mut chain: Arc<RwLock<Blockchain>> = Arc::new(RwLock::new(Blockchain::new())); chain }; } println!("Serving on http://localhost:3000..."); let mut router = Router::new(); let node_id: Uuid = Uuid::new_v4(); router.get("/", get_hello, "root"); router.get("/mine", move |r: &mut Request| { get_mine(r, Arc::clone(&CHAIN), node_id.clone()) }, "mine"); router.get("/chain", move |r: &mut Request| { get_chain(r, Arc::clone(&CHAIN)) }, "chain"); router.post("/transactions/new", move |r: &mut Request| { post_transaction(r, Arc::clone(&CHAIN)) }, "transaction"); Iron::new(router).http("localhost:3000").unwrap(); } fn get_hello(_request: &mut Request) -> IronResult<Response> { let mut response = Response::new(); response.set_mut(status::Ok); response.set_mut(mime!(Text/Html; Charset=Utf8)); response.set_mut("Hey! I'm Rustchain, welcome to the future.\n"); Ok(response) } fn get_mine(_request: &mut Request, _chain: Arc<RwLock<Blockchain>>, _node_id: Uuid) -> IronResult<Response> { let mut response = Response::new(); let node_id_str = _node_id.simple().to_string(); let mut ch = _chain.write().unwrap(); let last_proof = ch.last_proof(); let new_proof = ch.proof_of_work(last_proof); let index = ch.new_transaction("0", node_id_str.as_str(), 1); let new_block = ch.new_block(new_proof); assert_eq!(index, new_block.index); response.set_mut(status::Ok); response.set_mut(mime!(Text/Html; Charset=Utf8)); response.set_mut("Click. ** That's the sound of me mining a new block **\n"); Ok(response) } fn get_chain(_request: &mut Request, _chain: Arc<RwLock<Blockchain>>) -> IronResult<Response> { let mut response = Response::new(); let ch = _chain.read().unwrap(); let full_chain = ch.get_chain(); let json = serde_json::to_string(&full_chain); response.set_mut(status::Ok); response.set_mut(mime!(Text/Html; Charset=Utf8)); response.set_mut(json.unwrap()); Ok(response) } fn post_transaction(_request: &mut Request, _chain: Arc<RwLock<Blockchain>>) -> IronResult<Response> { let body = _request.get::<bodyparser::Raw>().unwrap().unwrap(); let transaction: Transaction = serde_json::from_str(&body).unwrap(); print!("before tx lock"); let mut ch = _chain.write().unwrap(); print!("after tx lock"); let index = ch.new_transaction(transaction.sender.as_str(), transaction.recipient.as_str(), transaction.amount); let mut response = Response::new(); response.set_mut(status::Ok); response.set_mut(mime!(Text/Html; Charset=Utf8)); response.set_mut(format!("transaction complete: block index: {}\n", index)); Ok(response) }
#[macro_use] extern crate lazy_static; #[macro_use] extern crate mime; #[macro_use] extern crate serde_derive; extern crate iron; extern crate router; extern crate serde; extern crate serde_json; extern crate uuid; extern crate bodyparser; use iron::prelude::*; use iron::status; use router::Router; use std::sync::{Arc, Mutex, RwLock}; use uuid::Uuid; mod blockchain; mod block; mod transaction; use blockchain::Blockchain; use transaction::Transaction;
fn get_hello(_request: &mut Request) -> IronResult<Response> { let mut response = Response::new(); response.set_mut(status::Ok); response.set_mut(mime!(Text/Html; Charset=Utf8)); response.set_mut("Hey! I'm Rustchain, welcome to the future.\n"); Ok(response) } fn get_mine(_request: &mut Request, _chain: Arc<RwLock<Blockchain>>, _node_id: Uuid) -> IronResult<Response> { let mut response = Response::new(); let node_id_str = _node_id.simple().to_string(); let mut ch = _chain.write().unwrap(); let last_proof = ch.last_proof(); let new_proof = ch.proof_of_work(last_proof); let index = ch.new_transaction("0", node_id_str.as_str(), 1); let new_block = ch.new_block(new_proof); assert_eq!(index, new_block.index); response.set_mut(status::Ok); response.set_mut(mime!(Text/Html; Charset=Utf8)); response.set_mut("Click. ** That's the sound of me mining a new block **\n"); Ok(response) } fn get_chain(_request: &mut Request, _chain: Arc<RwLock<Blockchain>>) -> IronResult<Response> { let mut response = Response::new(); let ch = _chain.read().unwrap(); let full_chain = ch.get_chain(); let json = serde_json::to_string(&full_chain); response.set_mut(status::Ok); response.set_mut(mime!(Text/Html; Charset=Utf8)); response.set_mut(json.unwrap()); Ok(response) } fn post_transaction(_request: &mut Request, _chain: Arc<RwLock<Blockchain>>) -> IronResult<Response> { let body = _request.get::<bodyparser::Raw>().unwrap().unwrap(); let transaction: Transaction = serde_json::from_str(&body).unwrap(); print!("before tx lock"); let mut ch = _chain.write().unwrap(); print!("after tx lock"); let index = ch.new_transaction(transaction.sender.as_str(), transaction.recipient.as_str(), transaction.amount); let mut response = Response::new(); response.set_mut(status::Ok); response.set_mut(mime!(Text/Html; Charset=Utf8)); response.set_mut(format!("transaction complete: block index: {}\n", index)); Ok(response) }
fn main() { lazy_static! { static ref CHAIN: Arc<RwLock<Blockchain>> = { let mut chain: Arc<RwLock<Blockchain>> = Arc::new(RwLock::new(Blockchain::new())); chain }; } println!("Serving on http://localhost:3000..."); let mut router = Router::new(); let node_id: Uuid = Uuid::new_v4(); router.get("/", get_hello, "root"); router.get("/mine", move |r: &mut Request| { get_mine(r, Arc::clone(&CHAIN), node_id.clone()) }, "mine"); router.get("/chain", move |r: &mut Request| { get_chain(r, Arc::clone(&CHAIN)) }, "chain"); router.post("/transactions/new", move |r: &mut Request| { post_transaction(r, Arc::clone(&CHAIN)) }, "transaction"); Iron::new(router).http("localhost:3000").unwrap(); }
function_block-full_function
[ { "content": "use std::fmt;\n\nuse transaction::Transaction;\n\n\n\n#[derive(Clone, Serialize, Deserialize)]\n\npub struct Block {\n\n\tpub index: i64,\n\n\tpub timestamp: i64,\n\n\tpub transactions: Vec<Transaction>,\n\n\tpub proof: i64,\n\n\tpub previous_hash: String\n\n\t//pub previous_hash: GenericArray<u8, Self::OutputSize>,\n\n}\n\n\n\nimpl Block {\n\n fn transaction_string(&self) -> String {\n\n let mut trans_string = String::new();\n\n for t in self.transactions.iter() {\n\n //trans_string.push_str(&t.to_string())\n\n trans_string.push_str(t.to_string().as_str())\n\n }\n", "file_path": "src/block.rs", "rank": 3, "score": 16091.945049043268 }, { "content": "use std::fmt;\n\n\n\n#[derive(Clone, Serialize, Deserialize, Debug)]\n\npub struct Transaction {\n\n\tpub sender: String,\n\n\tpub recipient: String,\n\n\tpub amount: i64\n\n}\n\n\n\n// impl Display, get to_string for free\n\nimpl fmt::Display for Transaction {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"(from: {}, to: {}, amt: {}) \", \n\n self.sender, self.recipient, self.amount)\n\n }\n\n}\n", "file_path": "src/transaction.rs", "rank": 4, "score": 16090.96698913994 }, { "content": " trans_string\n\n }\n\n}\n\n\n\n// impl Display, get to_string for free\n\nimpl fmt::Display for Block {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"index: {}, timestamp: {}, transactions: {}, proof: {}, previous_hash: {}\", self.index, self.timestamp, self.transaction_string(), self.proof, self.previous_hash)\n\n }\n\n}\n", "file_path": "src/block.rs", "rank": 5, "score": 16090.595758149908 }, { "content": "//extern crate sha2;\n\nextern crate crypto;\n\n\n\nuse block::Block;\n\nuse transaction::Transaction;\n\n//use self::sha2::{Sha256, Digest};\n\nuse self::crypto::digest::Digest;\n\nuse self::crypto::sha2::Sha256;\n\nuse std::time;\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct Blockchain {\n\n\tblocks: Vec<Block>,\n\n\tcurrent_transactions: Vec<Transaction>\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct ChainSnapshot {\n\n blocks: Vec<Block>\n\n}\n", "file_path": "src/blockchain.rs", "rank": 6, "score": 12734.384545887744 }, { "content": "\n\nimpl Blockchain {\n\n\n\n\tpub fn new() -> Blockchain {\n\n\t\tlet mut blocks = Vec::new();\n\n\t\tlet current_transactions = Vec::new();\n\n\n\n // Manually create genesis block\n\n let timestamp = match time::SystemTime::now()\n\n .duration_since(time::UNIX_EPOCH) {\n\n Ok(n) => n.as_secs() as i64,\n\n _ => panic!(\"Invalid SystemTime\"),\n\n };\n\n let transactions = current_transactions.clone();\n\n let previous_hash = \"0000000000000000000000000000000000000000000000000000000000000000\".to_string();\n\n\n\n // Add genesis block as first block\n\n blocks.push(Block{index:1, timestamp, transactions, proof:100, previous_hash});\n\n\n\n\t\tBlockchain{blocks,current_transactions}\n", "file_path": "src/blockchain.rs", "rank": 7, "score": 12730.369588372483 }, { "content": "\t}\n\n\n\n\tpub fn new_block(&mut self, new_proof: i64) -> Block {\n\n // Create a new block in the blockchain\n\n let index = (self.blocks.len()+1) as i64;\n\n let timestamp = match time::SystemTime::now()\n\n .duration_since(time::UNIX_EPOCH) {\n\n Ok(n) => n.as_secs() as i64,\n\n _ => panic!(\"Invalid SystemTime\"),\n\n };\n\n let transactions = self.current_transactions.clone();\n\n let proof = new_proof;\n\n let previous_hash = self.hash_last();\n\n\n\n self.current_transactions.clear();\n\n let new_block = Block{index,timestamp,transactions,proof,previous_hash};\n\n self.blocks.push( new_block.clone() );\n\n new_block\n\n //self.blocks.push(Block{index,timestamp,transactions,proof,previous_hash});\n\n\t}\n", "file_path": "src/blockchain.rs", "rank": 8, "score": 12729.733039245102 }, { "content": "\n\n\tpub fn new_transaction(&mut self, s:&str, r:&str, amount:i64) -> i64 {\n\n // Create a new transaction to go into the next mined block\n\n\t\tlet sender = s.to_owned();\n\n\t\tlet recipient = r.to_owned();\n\n\t\tself.current_transactions.push(Transaction{sender,recipient,amount});\n\n (self.blocks.len()+1) as i64 // index of next block\n\n\t}\n\n\n\n\tpub fn last_block(&mut self) -> Option<&Block> {\n\n self.blocks.last()\n\n\t}\n\n\n\n pub fn last_proof(&mut self) -> i64 {\n\n self.blocks.last().unwrap().proof\n\n }\n\n\n\n fn hash_last(&mut self) -> String {\n\n self.hash(self.blocks.last())\n\n }\n", "file_path": "src/blockchain.rs", "rank": 9, "score": 12729.054490889297 }, { "content": "\n\n\tpub fn hash(&self, block: Option<&Block>) -> String {\n\n // Creates a SHA-256 hash of a block\n\n match block {\n\n Some(b) => {\n\n let mut hasher = Sha256::new();\n\n hasher.input_str(&b.to_string());\n\n //String::from_utf8_lossy(hasher.result().as_slice()).into_owned()\n\n hasher.result_str()\n\n },\n\n None => panic!(\"Invalid block passed to hash\")\n\n }\n\n\t}\n\n\n\n pub fn proof_of_work(&self, last_proof: i64) -> i64 {\n\n // Simple proof of work algorithm:\n\n // find a number p such that hash(last_proof, p) \n\n // contains 4 leading zeros\n\n let mut p: i64 = 0;\n\n while !self.valid_proof(last_proof, p) { p += 1; }\n", "file_path": "src/blockchain.rs", "rank": 10, "score": 12727.982472126942 }, { "content": " p\n\n }\n\n\n\n fn valid_proof(&self, last_proof: i64, new_proof: i64) -> bool {\n\n // Validate the proof: Does hash(last_proof,new_proof)\n\n // contain 4 leading zeros\n\n let mut input_str = String::new();\n\n input_str.push_str(last_proof.to_string().as_str());\n\n input_str.push_str(new_proof.to_string().as_str());\n\n\n\n let mut hasher = Sha256::new();\n\n hasher.input_str(&input_str);\n\n hasher.result_str().starts_with(\"000\")\n\n //String::from_utf8_lossy(hasher.result().as_slice())\n\n // .into_owned().starts_with(\"000\")\n\n }\n\n\n\n pub fn get_chain(&self) -> ChainSnapshot {\n\n ChainSnapshot{blocks: self.blocks.clone()}\n\n }\n\n}\n", "file_path": "src/blockchain.rs", "rank": 11, "score": 12727.71134399022 }, { "content": "# rustchain\n\nA simple blockchain in rust\n\n\n\n## guide\n\nStartup rustchain by running `cargo run` in this directory. Once it starts up a webserver will open at localhost:3000. There are 3 requests that can be sent to the blockchain:\n\n\n\n### GET /chain\n\nThis returns a json array of the blockchain to date\n\n\n\n### GET /mine\n\nThis mines a new block that contains all pending transactions onto the chain.\n\n\n\n## POST /transactions/new\n\n\n\n{\n\n \"sender\" : \"address\",\n\n \"recipient\" : \"address\",\n\n \"amount\" : 25\n\n}\n\n\n\nSending a request with the body in the form above (in raw text json) will push a new transaction to be put on the chain. It will be mined into the chain the next time you call /mine\n\n\n\nThis program is licensed under the \"MIT License\". Please see the file `LICENSE` in this distribution for license terms.\n", "file_path": "README.md", "rank": 14, "score": 2.3328916747464703 } ]
Rust
src/lib.rs
mov-rax/smart_buffer
1ab3f45f1c2021d0cd5e87f25852a24dc569e40c
#![no_std] #![feature(min_const_generics)] #[macro_use] extern crate alloc; #[doc(hidden)] pub extern crate core as __core; use alloc::alloc::{alloc, dealloc, Layout}; use alloc::vec::Vec; use core::mem::size_of; use crate::iter::SmartBufferIterRef; use alloc::boxed::Box; use crate::__core::fmt::{Debug, Formatter}; pub mod iter; mod index; pub mod into; #[macro_use] #[cfg(test)] mod tests { use crate::SmartBuffer; use alloc::string::String; use crate::buf; use alloc::vec::Vec; #[test] fn it_works() { let mut buf = buf!(0u32, 5, 10); buf.insert_arr(&[4,9,3,2,1,9,3,2,10,19]); let mut buf_clone = buf.clone(); let test = SmartBuffer::from_arr([1u8,4,5,6,7], 5, true); } } #[feature(min_const_generics)] pub struct SmartBuffer<T, const N:usize> where T: Clone { s_buf: [T; N], d_buf: Option<*mut T>, layout: Option<Layout>, size: usize, capacity: usize, default: T, cursor: usize, } impl<T, const N:usize> SmartBuffer<T,N> where T: Clone { pub fn clear(&mut self){ let default = self.default.clone(); for elem in self{ *elem = default.clone(); } } pub fn push(&mut self, other: T){ if self.size < N{ self.s_buf[self.size] = other; self.size += 1; } else if self.size < self.capacity{ unsafe {*((self.d_buf.unwrap() as usize + (self.size - N) * size_of::<T>()) as *mut T) = other}; self.size += 1; } } pub fn set_size(&mut self, size:usize){ if self.size < self.capacity{ self.size = size; } } pub fn insert_slice(&mut self, slice: &[T]){ for elem in slice{ self.push(elem.clone()); } } pub fn insert_slice_at(&mut self, slice: &[T], mut index:usize){ for elem in slice{ self.insert(elem.clone(), index); index += 1; } } pub fn insert_arr<const M: usize>(&mut self, arr: &[T; M]){ for elem in arr{ self.push(elem.clone()); } } pub fn insert(&mut self, other: T, index: usize){ if index < N{ self.s_buf[index] = other; if index > self.size{ self.size = index; } } else if index < self.capacity{ unsafe {*((self.d_buf.unwrap() as usize + (index - N) * size_of::<T>()) as *mut T) = other}; if index > self.size{ self.size = index; } } } pub fn get(&self, index:usize) -> Option<&T> { if index < N { return Some(&(self.s_buf[index])) } else if index < self.capacity { return unsafe { Some(&*((self.d_buf.unwrap() as usize + (index - N) * size_of::<T>()) as *const T)) } } None } pub unsafe fn get_unchecked(&self, index:usize) -> &T{ if index < N{ return &self.s_buf[index]; } &*((self.d_buf.unwrap() as usize + (index - N) * size_of::<T>()) as *const T) } pub unsafe fn get_mut_unchecked(&mut self, index:usize) -> &mut T{ if index < N{ return &mut self.s_buf[index] } &mut *((self.d_buf.unwrap() as usize + (index - N) * size_of::<T>()) as *mut T) } pub fn as_mut_ptr(mut self) -> *mut Self{ &mut self as *mut Self } pub(crate) fn allocate(&mut self, elements:usize){ let layout = Layout::from_size_align(elements*size_of::<T>(), 1); if let Ok(layout) = layout{ let ptr = unsafe {alloc(layout) as *mut T}; self.capacity += layout.size()/size_of::<T>(); self.layout = Some(layout); self.d_buf = Some(ptr); } } pub fn new(value: T, len:usize) -> Self where T: Copy + Clone { let mut buf = Self{ s_buf: [value; N], d_buf: None, layout: None, size: 0, capacity: N, default: value, cursor: 0, }; if N < len{ buf.allocate(len - N); } buf } pub fn from_arr(buf:[T; N], len:usize, set_size:bool) -> Self where T: Clone { let def = buf[0].clone(); let mut buf = Self{ s_buf: buf, d_buf: None, layout: None, size: if set_size { N } else { 0 }, capacity: N, default: def, cursor: 0, }; if N < len{ buf.allocate(len - N); } buf } pub fn get_size(&self) -> usize{ self.size } pub fn map<F>(&mut self, mut f: F) where T: Clone + Copy, F: FnMut(T) -> T { for i in 0..self.size{ self[i] = f(self[i]) } } pub fn shl(&mut self, count:usize){ self.s_buf.rotate_left(count); if let Some(ptr) = self.d_buf{ for i in 0..count{ self.s_buf[N - count + i] = unsafe {(*ptr.offset(i as isize)).clone()}; } for i in 0..(self.capacity - N){ if i + count < self.capacity - N{ unsafe { *ptr.offset(i as isize) = (*ptr.offset((i + count) as isize)).clone()}; } } for i in 0..count{ unsafe { (*ptr.offset((self.capacity - N - count + i) as isize)) = self.default.clone()}; } } else { for i in 0..count{ self.s_buf[N-count+i] = self.default.clone(); } } } pub fn shr(&mut self, count:usize){ if let Some(ptr) = self.d_buf{ for i in 0..(self.capacity - N){ if (self.capacity as i32 - N as i32 - i as i32- count as i32 - 1) >= 0{ unsafe{ *ptr.offset((self.capacity - N - i - 1) as isize) = (*ptr.offset((self.capacity - N - i - count - 1) as isize)).clone()}; } } for i in 0..count{ unsafe { (*ptr.offset(i as isize)) = self.s_buf[(N as i32 - count as i32 + i as i32) as usize].clone()}; } } self.s_buf.rotate_right(count); for i in 0..count{ self.s_buf[i] = self.default.clone(); } } } impl<T, const N:usize> SmartBuffer<T,N> where T:Clone + PartialEq { pub fn calc_size(&mut self){ let default = self.default.clone(); let mut size = 0; for elem in &*self{ if *elem == default{ break; } size += 1; } self.set_size(size + 1); } } impl<T, const N:usize> Drop for SmartBuffer<T,N> where T: Clone { fn drop(&mut self) { if let Some(ptr) = self.d_buf{ unsafe {dealloc(ptr as *mut u8, self.layout.unwrap())}; } } } impl<T, const N:usize> Debug for SmartBuffer<T,N> where T: Clone + Debug { fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { f.debug_list() .entries(self.into_iter()) .finish() } } impl<T, const N:usize> Clone for SmartBuffer<T,N> where T: Clone { fn clone(&self) -> Self { let mut temp_buf = Self::from_arr(self.s_buf.clone(), self.capacity, true); if let Some(ptr) = self.d_buf{ temp_buf.allocate(self.capacity - N); unsafe { core::intrinsics::copy(ptr, temp_buf.d_buf.unwrap(), self.capacity - N); } } temp_buf.default = self.default.clone(); temp_buf } } #[doc(hidden)] #[non_exhaustive] pub struct Token; impl Token { #[doc(hidden)] #[inline] pub const unsafe fn new() -> Self { Token } } #[macro_export] macro_rules! buf { ($data:expr, $s_len:expr, $t_len:expr) => { $crate::SmartBuffer::<_,$s_len>::from_arr($crate::array!(_ => $data; $s_len), $t_len, false) } } #[macro_export] macro_rules! array { [$expr:expr; $count:expr] => {{ let value = $expr; $crate::array![_ => $crate::__core::clone::Clone::clone(&value); $count] }}; [$i:pat => $e:expr; $count:expr] => {{ const __COUNT: $crate::__core::primitive::usize = $count; #[repr(transparent)] struct __ArrayVec<T>(__ArrayVecInner<T>); impl<T> $crate::__core::ops::Drop for __ArrayVec<T> { fn drop(&mut self) { for val in &mut self.0.arr[..self.0.len] { unsafe { val.as_mut_ptr().drop_in_place() } } } } struct __ArrayVecInner<T> { arr: [$crate::__core::mem::MaybeUninit<T>; __COUNT], len: $crate::__core::primitive::usize, token: $crate::Token, } #[repr(C)] union __Transmuter<T> { init_uninit_array: $crate::__core::mem::ManuallyDrop<$crate::__core::mem::MaybeUninit<[T; __COUNT]>>, uninit_array: $crate::__core::mem::ManuallyDrop<[$crate::__core::mem::MaybeUninit<T>; __COUNT]>, out: $crate::__core::mem::ManuallyDrop<[T; __COUNT]>, } #[repr(C)] union __ArrayVecTransmuter<T> { vec: $crate::__core::mem::ManuallyDrop<__ArrayVec<T>>, inner: $crate::__core::mem::ManuallyDrop<__ArrayVecInner<T>>, } let mut vec = __ArrayVec(__ArrayVecInner { arr: $crate::__core::mem::ManuallyDrop::into_inner(unsafe { __Transmuter { init_uninit_array: $crate::__core::mem::ManuallyDrop::new($crate::__core::mem::MaybeUninit::uninit()), } .uninit_array }), len: 0, token: unsafe { $crate::Token::new() }, }); while vec.0.len < __COUNT { let $i = vec.0.len; let _please_do_not_use_continue_without_label; let value; struct __PleaseDoNotUseBreakWithoutLabel; loop { _please_do_not_use_continue_without_label = (); value = $e; break __PleaseDoNotUseBreakWithoutLabel; }; vec.0.arr[vec.0.len] = $crate::__core::mem::MaybeUninit::new(value); vec.0.len += 1; } let inner = $crate::__core::mem::ManuallyDrop::into_inner(unsafe { __ArrayVecTransmuter { vec: $crate::__core::mem::ManuallyDrop::new(vec), } .inner }); $crate::__core::mem::ManuallyDrop::into_inner(unsafe { __Transmuter { uninit_array: $crate::__core::mem::ManuallyDrop::new(inner.arr), } .out }) }}; }
#![no_std] #![feature(min_const_generics)] #[macro_use] extern crate alloc; #[doc(hidden)] pub extern crate core as __core; use alloc::alloc::{alloc, dealloc, Layout}; use alloc::vec::Vec; use core::mem::size_of; use crate::iter::SmartBufferIterRef; use alloc::boxed::Box; use crate::__core::fmt::{Debug, Formatter}; pub mod iter; mod index; pub mod into; #[macro_use] #[cfg(test)] mod tests { use crate::SmartBuffer; use alloc::string::String; use crate::buf; use alloc::vec::Vec; #[test] fn it_works() { let mut buf = buf!(0u32, 5, 10); buf.insert_arr(&[4,9,3,2,1,9,3,2,10,19]); let mut buf_clone = buf.clone(); let test = SmartBuffer::from_arr([1u8,4,5,6,7], 5, true); } } #[feature(min_const_generics)] pub struct SmartBuffer<T, const N:usize> where T: Clone { s_buf: [T; N], d_buf: Option<*mut T>, layout: Option<Layout>, size: usize, capacity: usize, default: T, cursor: usize, } impl<T, const N:usize> SmartBuffer<T,N> where T: Clone { pub fn clear(&mut self){ let default = self.default.clone(); for elem in self{ *elem = default.clone(); } } pub fn push(&mut self, other: T){ if self.size < N{ self.s_buf[self.size] = other; self.size += 1; } else if self.size < self.capacity{ unsafe {*((self.d_buf.unwrap() as usize + (self.size - N) * size_of::<T>()) as *mut T) = other}; self.size += 1; } } pub fn set_size(&mut self, size:usize){ if self.size < self.capacity{ self.size = size; } } pub fn insert_slice(&mut self, slice: &[T]){ for elem in slice{ self.push(elem.clone()); } } pub fn insert_slice_at(&mut self, slice: &[T], mut index:usize){ for elem in slice{ self.insert(elem.clone(), index); index += 1; } } pub fn insert_arr<const M: usize>(&mut self, arr: &[T; M]){ for elem in arr{ self.push(elem.clone()); } } pub fn insert(&mut self, other: T, index: usize){ if index < N{ self.s_buf[index] = other; if index > self.size{ self.size = index; } } else if index < self.capacity{ unsafe {*((self.d_buf.unwrap() as usize + (index - N) * size_of::<T>()) as *mut T) = other}; if index > self.size{ self.size = index; } } } pub fn get(&self, index:usize) -> Option<&T> { if index < N { return Some(&(self.s_buf[index])) } else if index < self.capacity { return unsafe { Some(&*((self.d_buf.unwrap() as usize + (index - N) * size_of::<T>()) as *const T)) } } None } pub unsafe fn get_unchecked(&self, index:usize) -> &T{ if index < N{ return &self.s_buf[index]; } &*((self.d_buf.unwrap() as usize + (index - N) * size_of::<T>()) as *const T) } pub unsafe fn get_mut_unchecked(&mut self, index:usize) -> &mut T{ if index < N{ return &mut self.s_buf[index] } &mut *((self.d_buf.unwrap() as usize + (index - N) * size_of::<T>()) as *mut T) } pub fn as_mut_ptr(mut self) -> *mut Self{ &mut self as *mut Self } pub(crate) fn allocate(&mut self, elements:usize){ let layout = Layout::from_size_align(elements*size_of::<T>(), 1);
} pub fn new(value: T, len:usize) -> Self where T: Copy + Clone { let mut buf = Self{ s_buf: [value; N], d_buf: None, layout: None, size: 0, capacity: N, default: value, cursor: 0, }; if N < len{ buf.allocate(len - N); } buf } pub fn from_arr(buf:[T; N], len:usize, set_size:bool) -> Self where T: Clone { let def = buf[0].clone(); let mut buf = Self{ s_buf: buf, d_buf: None, layout: None, size: if set_size { N } else { 0 }, capacity: N, default: def, cursor: 0, }; if N < len{ buf.allocate(len - N); } buf } pub fn get_size(&self) -> usize{ self.size } pub fn map<F>(&mut self, mut f: F) where T: Clone + Copy, F: FnMut(T) -> T { for i in 0..self.size{ self[i] = f(self[i]) } } pub fn shl(&mut self, count:usize){ self.s_buf.rotate_left(count); if let Some(ptr) = self.d_buf{ for i in 0..count{ self.s_buf[N - count + i] = unsafe {(*ptr.offset(i as isize)).clone()}; } for i in 0..(self.capacity - N){ if i + count < self.capacity - N{ unsafe { *ptr.offset(i as isize) = (*ptr.offset((i + count) as isize)).clone()}; } } for i in 0..count{ unsafe { (*ptr.offset((self.capacity - N - count + i) as isize)) = self.default.clone()}; } } else { for i in 0..count{ self.s_buf[N-count+i] = self.default.clone(); } } } pub fn shr(&mut self, count:usize){ if let Some(ptr) = self.d_buf{ for i in 0..(self.capacity - N){ if (self.capacity as i32 - N as i32 - i as i32- count as i32 - 1) >= 0{ unsafe{ *ptr.offset((self.capacity - N - i - 1) as isize) = (*ptr.offset((self.capacity - N - i - count - 1) as isize)).clone()}; } } for i in 0..count{ unsafe { (*ptr.offset(i as isize)) = self.s_buf[(N as i32 - count as i32 + i as i32) as usize].clone()}; } } self.s_buf.rotate_right(count); for i in 0..count{ self.s_buf[i] = self.default.clone(); } } } impl<T, const N:usize> SmartBuffer<T,N> where T:Clone + PartialEq { pub fn calc_size(&mut self){ let default = self.default.clone(); let mut size = 0; for elem in &*self{ if *elem == default{ break; } size += 1; } self.set_size(size + 1); } } impl<T, const N:usize> Drop for SmartBuffer<T,N> where T: Clone { fn drop(&mut self) { if let Some(ptr) = self.d_buf{ unsafe {dealloc(ptr as *mut u8, self.layout.unwrap())}; } } } impl<T, const N:usize> Debug for SmartBuffer<T,N> where T: Clone + Debug { fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { f.debug_list() .entries(self.into_iter()) .finish() } } impl<T, const N:usize> Clone for SmartBuffer<T,N> where T: Clone { fn clone(&self) -> Self { let mut temp_buf = Self::from_arr(self.s_buf.clone(), self.capacity, true); if let Some(ptr) = self.d_buf{ temp_buf.allocate(self.capacity - N); unsafe { core::intrinsics::copy(ptr, temp_buf.d_buf.unwrap(), self.capacity - N); } } temp_buf.default = self.default.clone(); temp_buf } } #[doc(hidden)] #[non_exhaustive] pub struct Token; impl Token { #[doc(hidden)] #[inline] pub const unsafe fn new() -> Self { Token } } #[macro_export] macro_rules! buf { ($data:expr, $s_len:expr, $t_len:expr) => { $crate::SmartBuffer::<_,$s_len>::from_arr($crate::array!(_ => $data; $s_len), $t_len, false) } } #[macro_export] macro_rules! array { [$expr:expr; $count:expr] => {{ let value = $expr; $crate::array![_ => $crate::__core::clone::Clone::clone(&value); $count] }}; [$i:pat => $e:expr; $count:expr] => {{ const __COUNT: $crate::__core::primitive::usize = $count; #[repr(transparent)] struct __ArrayVec<T>(__ArrayVecInner<T>); impl<T> $crate::__core::ops::Drop for __ArrayVec<T> { fn drop(&mut self) { for val in &mut self.0.arr[..self.0.len] { unsafe { val.as_mut_ptr().drop_in_place() } } } } struct __ArrayVecInner<T> { arr: [$crate::__core::mem::MaybeUninit<T>; __COUNT], len: $crate::__core::primitive::usize, token: $crate::Token, } #[repr(C)] union __Transmuter<T> { init_uninit_array: $crate::__core::mem::ManuallyDrop<$crate::__core::mem::MaybeUninit<[T; __COUNT]>>, uninit_array: $crate::__core::mem::ManuallyDrop<[$crate::__core::mem::MaybeUninit<T>; __COUNT]>, out: $crate::__core::mem::ManuallyDrop<[T; __COUNT]>, } #[repr(C)] union __ArrayVecTransmuter<T> { vec: $crate::__core::mem::ManuallyDrop<__ArrayVec<T>>, inner: $crate::__core::mem::ManuallyDrop<__ArrayVecInner<T>>, } let mut vec = __ArrayVec(__ArrayVecInner { arr: $crate::__core::mem::ManuallyDrop::into_inner(unsafe { __Transmuter { init_uninit_array: $crate::__core::mem::ManuallyDrop::new($crate::__core::mem::MaybeUninit::uninit()), } .uninit_array }), len: 0, token: unsafe { $crate::Token::new() }, }); while vec.0.len < __COUNT { let $i = vec.0.len; let _please_do_not_use_continue_without_label; let value; struct __PleaseDoNotUseBreakWithoutLabel; loop { _please_do_not_use_continue_without_label = (); value = $e; break __PleaseDoNotUseBreakWithoutLabel; }; vec.0.arr[vec.0.len] = $crate::__core::mem::MaybeUninit::new(value); vec.0.len += 1; } let inner = $crate::__core::mem::ManuallyDrop::into_inner(unsafe { __ArrayVecTransmuter { vec: $crate::__core::mem::ManuallyDrop::new(vec), } .inner }); $crate::__core::mem::ManuallyDrop::into_inner(unsafe { __Transmuter { uninit_array: $crate::__core::mem::ManuallyDrop::new(inner.arr), } .out }) }}; }
if let Ok(layout) = layout{ let ptr = unsafe {alloc(layout) as *mut T}; self.capacity += layout.size()/size_of::<T>(); self.layout = Some(layout); self.d_buf = Some(ptr); }
if_condition
[ { "content": "use crate::SmartBuffer;\n\nuse core::ops::Index;\n\nuse core::ops::IndexMut;\n\n\n\n\n\nimpl<T, const N:usize> Index<usize> for &SmartBuffer<T,N>\n\n where T: Clone\n\n{\n\n type Output = T;\n\n\n\n fn index(&self, index: usize) -> &Self::Output {\n\n unsafe {self.get_unchecked(index)}\n\n }\n\n}\n\n\n\nimpl<T, const N:usize> Index<usize> for SmartBuffer<T,N>\n\n where T: Clone\n\n{\n\n type Output = T;\n\n\n", "file_path": "src/index.rs", "rank": 0, "score": 17644.764404486257 }, { "content": " fn index(&self, index: usize) -> &Self::Output {\n\n unsafe {self.get_unchecked(index)}\n\n }\n\n}\n\n\n\nimpl<T, const N:usize> IndexMut<usize> for SmartBuffer<T,N>\n\nwhere T: Clone\n\n{\n\n fn index_mut(&mut self, index: usize) -> &mut Self::Output {\n\n unsafe {self.get_mut_unchecked(index)}\n\n }\n\n}", "file_path": "src/index.rs", "rank": 1, "score": 17641.802621752082 }, { "content": "use crate::SmartBuffer;\n\n\n\nuse core::mem::size_of;\n\nuse alloc::alloc::{dealloc};\n\nuse crate::__core::iter::Map;\n\n\n\nimpl<T, const N: usize> IntoIterator for SmartBuffer<T,N>\n\n where T: Clone\n\n{\n\n type Item = T;\n\n type IntoIter = SmartBufferIter<T,N>;\n\n /// Creates a consuming Iterator\n\n fn into_iter(mut self) -> Self::IntoIter {\n\n let stack_ptr = self.s_buf.as_mut_ptr();\n\n let heap_ptr = self.d_buf;\n\n let total_elem = self.size;\n\n\n\n Self::IntoIter {\n\n smart_buffer: self, // Self will be dropped when IntoIter is over\n\n stack_ptr,\n", "file_path": "src/iter.rs", "rank": 2, "score": 16736.388867280166 }, { "content": "}\n\n\n\nimpl<T, const N: usize> Iterator for SmartBufferIter<T,N>\n\n where T: Clone\n\n{\n\n type Item = T;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if self.count < self.total_elem{\n\n if self.count < N{\n\n self.count += 1;\n\n return unsafe {Some((*((self.stack_ptr as usize + (self.count - 1) * size_of::<T>()) as *mut T)).clone())}\n\n }\n\n self.count += 1;\n\n return unsafe {Some((*((self.heap_ptr.unwrap() as usize + (self.count - N - 1) * size_of::<T>()) as *mut T)).clone())}\n\n }\n\n None\n\n }\n\n\n\n // fn map<F>(self, f: F) -> Map<Self, F>\n", "file_path": "src/iter.rs", "rank": 3, "score": 16730.581909670113 }, { "content": " type Item = &'a T;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if self.count < self.total_elem{\n\n if self.count < N{\n\n self.count += 1;\n\n return unsafe {Some(&*((self.stack_ptr as usize + (self.count - 1) * size_of::<T>()) as *const T))}\n\n }\n\n self.count += 1;\n\n return unsafe {Some(&*((self.heap_ptr.unwrap() as usize + (self.count - N - 1) * size_of::<T>()) as *const T))}\n\n }\n\n None\n\n }\n\n}\n\n\n\n/// Iterator for SmartBuffer where SmartBuffer is mutably referenced to\n\npub struct SmartBufferIterRefMut<'a, T, const N:usize>\n\n where T: 'a + Clone\n\n{\n\n smart_buffer: &'a mut SmartBuffer<T,N>,\n", "file_path": "src/iter.rs", "rank": 4, "score": 16730.127827557666 }, { "content": " stack_ptr,\n\n heap_ptr,\n\n total_elem,\n\n count: 0\n\n }\n\n }\n\n\n\n\n\n}\n\n\n\nimpl<'a, T, const N: usize> IntoIterator for &'a mut SmartBuffer<T,N>\n\n where T: Clone\n\n{\n\n type Item = &'a mut T;\n\n type IntoIter = SmartBufferIterRefMut<'a,T,N>;\n\n /// Creates a consuming Iterator\n\n fn into_iter(self) -> Self::IntoIter {\n\n let stack_ptr = self.s_buf.as_mut_ptr();\n\n let heap_ptr = self.d_buf.clone();\n\n let total_elem = self.size;\n", "file_path": "src/iter.rs", "rank": 5, "score": 16729.44911359056 }, { "content": " stack_ptr: *mut T,\n\n heap_ptr: Option<*mut T>, // will not change values in heap_ptr\n\n total_elem: usize,\n\n count: usize,\n\n}\n\n\n\nimpl<'a, T, const N: usize> Iterator for SmartBufferIterRefMut<'a, T,N>\n\n where T: 'a + Clone\n\n{\n\n type Item = &'a mut T;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if self.count < self.total_elem{\n\n if self.count < N{\n\n self.count += 1;\n\n return unsafe {Some(&mut *((self.stack_ptr as usize + (self.count - 1) * size_of::<T>()) as *mut T))}\n\n }\n\n self.count += 1;\n\n return unsafe {Some(&mut *((self.heap_ptr.unwrap() as usize + (self.count - N - 1) * size_of::<T>()) as *mut T))}\n\n }\n\n None\n\n }\n\n}\n", "file_path": "src/iter.rs", "rank": 6, "score": 16729.306756738824 }, { "content": " heap_ptr,\n\n total_elem,\n\n count: 0\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, T, const N: usize> IntoIterator for &'a SmartBuffer<T,N>\n\n where T: Clone\n\n{\n\n type Item = &'a T;\n\n type IntoIter = SmartBufferIterRef<'a,T,N>;\n\n /// Creates a consuming Iterator\n\n fn into_iter(self) -> Self::IntoIter {\n\n let stack_ptr = self.s_buf.as_ptr();\n\n let heap_ptr = self.d_buf;\n\n let total_elem = self.size;\n\n\n\n Self::IntoIter {\n\n smart_buffer: self,\n", "file_path": "src/iter.rs", "rank": 7, "score": 16728.14931799229 }, { "content": " // where F: FnMut(Self::Item) -> T\n\n // {\n\n // unimplemented!()\n\n // }\n\n}\n\n\n\n/// Iterator for SmartBuffer where the SmartBuffer is immutably referenced to\n\npub struct SmartBufferIterRef<'a, T, const N:usize>\n\n where T: 'a + Clone\n\n{\n\n smart_buffer: &'a SmartBuffer<T,N>,\n\n stack_ptr: *const T,\n\n heap_ptr: Option<*mut T>, // will not change values in heap_ptr\n\n total_elem: usize,\n\n count: usize,\n\n}\n\n\n\nimpl<'a, T, const N: usize> Iterator for SmartBufferIterRef<'a, T,N>\n\n where T: 'a + Clone\n\n{\n", "file_path": "src/iter.rs", "rank": 8, "score": 16727.835272180047 }, { "content": "\n\n Self::IntoIter {\n\n smart_buffer: self,\n\n stack_ptr,\n\n heap_ptr,\n\n total_elem,\n\n count: 0\n\n }\n\n }\n\n}\n\n\n\n/// Iterator for SmartBuffer where the SmartBuffer is Consumed\n\npub struct SmartBufferIter<T, const N:usize>\n\n where T: Clone\n\n{\n\n smart_buffer: SmartBuffer<T,N>,\n\n stack_ptr: *mut T,\n\n heap_ptr: Option<*mut T>,\n\n total_elem: usize,\n\n count: usize,\n", "file_path": "src/iter.rs", "rank": 9, "score": 16727.219660350744 }, { "content": "use crate::SmartBuffer;\n\nuse alloc::vec::Vec;\n\nuse crate::iter::SmartBufferIter;\n\n\n\nimpl<T, const N: usize> Into<Vec<T>> for SmartBuffer<T,N>\n\n where T: Clone\n\n{\n\n fn into(self) -> Vec<T> {\n\n let mut temp = Vec::new();\n\n for elem in self{\n\n temp.push(elem);\n\n }\n\n temp\n\n }\n\n}\n", "file_path": "src/into.rs", "rank": 23, "score": 14.225135894823184 }, { "content": "# smart_buffer\n\nA stack/heap buffer with const generics. No std needed.\n\n\n\n## THIS CRATE REQUIRES USING NIGHTLY RUST.\n\n\n\n## What is this?\n\n\n\nsmart_buffer provides a datatype that allows the creation of a memory structure that is split between the stack and the heap.\n\n\n\nThe size of the SmartBuffer's memory in the stack is defined at compile time with the usage of const generics. The total allowed size of the entire SmartBuffer can be determined\n\nat runtime, where any additional required on runtime will be allocated in the heap.\n\n\n\nAn example of such is shown below:\n\n\n\n```rust\n\nlet mut buf = SmartBuffer::<u8, 3>::new(0, 5); // 3 elements on the stack, 2 on the heap\n\nbuf.push(3); // stack\n\nbuf.push(21); // stack\n\nbuf.push(100); // stack\n\nbuf.push(65); // heap\n\nbuf.push(21); // heap\n\nbuf.push(0); // not pushed, not enough space\n\n\n\nbuf[0] = 128; // modified on the stack\n\nbuf[4] = 40; // modified on the heap\n\n```\n\n\n\nTo offer flexibility while using this crate, it is also possible to iterate through all values as if it was contiguous data structure.\n\n\n\n```rust\n\nlet mut buf = SmartBuffer::<f64,128>::new(0.0, 256); \n\n// code goes here\n\nfor elem in &buf{\n\n println!(\"Whoa: {}\", elem);\n\n}\n\n```\n\n\n\nHowever, using the `new()` function only supports types with traits `Copy` and `Clone`, which limits the types that can\n\nbe used.\n\n\n\nLuckily, there is an included macro named `buf!` which can simply the creation of any SmartBuffer with types that\n\ninclude the `Clone` trait!\n\n\n\nAn example of using the macro is shown below\n\n\n\n```rust\n\n#[macro_use]\n\nfn some_function(){\n\n let mut buffer = buf!(String::new(), 2, 10); // Creates a SmartBuffer\n\n buffer.push(String::from(\"Wow, look at this\")); // stack\n\n buffer.push(String::from(\"This is pretty nice, huh?\")); // stack\n\n buffer.push(String::from(\"This is one nice heap!\")); // heap\n\n buffer[1] = String::from(\"Yes it is!\"); // heap\n\n}\n\n```\n\n\n\nIn the example above, the macro REQUIRES that the length of the stack (2 in the example) is known on compile time. The total\n\nlength of the SmartBuffer can be known at runtime! The length of the stack portion of the SmartBuffer can be a const generic.\n\n\n", "file_path": "README.md", "rank": 26, "score": 11.051860768152855 } ]
Rust
src/sys/component_manager/src/model/events/source.rs
casey/fuchsia
2b965e9a1e8f2ea346db540f3611a5be16bb4d6b
use { crate::{ capability::{CapabilityProvider, CapabilitySource, FrameworkCapability}, model::{ error::ModelError, events::{ event::SyncMode, registry::{EventRegistry, RoutedEvent}, serve::serve_event_source_sync, stream::EventStream, }, hooks::EventType, model::Model, moniker::AbsoluteMoniker, realm::Realm, routing, }, }, async_trait::async_trait, cm_rust::{CapabilityName, UseDecl, UseEventDecl}, fidl::endpoints::ServerEnd, fidl_fuchsia_sys2 as fsys, fuchsia_async as fasync, fuchsia_zircon as zx, futures::lock::Mutex, maplit::hashset, std::{ collections::{HashMap, HashSet}, path::PathBuf, sync::{Arc, Weak}, }, thiserror::Error, }; #[derive(Clone)] pub struct EventSource { model: Weak<Model>, target_moniker: AbsoluteMoniker, registry: Weak<EventRegistry>, resolve_instance_event_stream: Arc<Mutex<Option<EventStream>>>, debug: bool, sync_mode: SyncMode, } #[derive(Debug, Error)] pub enum EventsError { #[error("Registry not found")] RegistryNotFound, #[error("Events not allowed for subscription {:?}", names)] NotAvailable { names: Vec<CapabilityName> }, #[error("Routing failed")] RoutingFailed(#[source] ModelError), } struct RouteEventsResult { mapping: HashMap<CapabilityName, HashSet<AbsoluteMoniker>>, } impl RouteEventsResult { fn new() -> Self { Self { mapping: HashMap::new() } } fn insert(&mut self, source_name: CapabilityName, scope_moniker: AbsoluteMoniker) { self.mapping.entry(source_name).or_insert(HashSet::new()).insert(scope_moniker); } fn len(&self) -> usize { self.mapping.len() } fn contains_event(&self, event_name: &CapabilityName) -> bool { self.mapping.contains_key(event_name) } fn to_vec(self) -> Vec<RoutedEvent> { self.mapping .into_iter() .map(|(source_name, scope_monikers)| RoutedEvent { source_name, scope_monikers }) .collect() } } impl EventSource { pub async fn new( model: Weak<Model>, target_moniker: AbsoluteMoniker, registry: &Arc<EventRegistry>, sync_mode: SyncMode, ) -> Result<Self, ModelError> { let resolve_instance_event_stream = Arc::new(Mutex::new(if sync_mode == SyncMode::Async { None } else { Some( registry .subscribe( &sync_mode, vec![RoutedEvent { source_name: EventType::Resolved.into(), scope_monikers: hashset!(target_moniker.clone()), }], ) .await, ) })); Ok(Self { registry: Arc::downgrade(&registry), model, target_moniker, resolve_instance_event_stream, debug: false, sync_mode, }) } pub async fn new_for_debug( model: Weak<Model>, target_moniker: AbsoluteMoniker, registry: &Arc<EventRegistry>, ) -> Result<Self, ModelError> { let mut event_source = Self::new(model, target_moniker, registry, SyncMode::Sync).await?; event_source.debug = true; Ok(event_source) } pub async fn start_component_tree(&mut self) { let mut resolve_instance_event_stream = self.resolve_instance_event_stream.lock().await; *resolve_instance_event_stream = None; } pub async fn subscribe( &mut self, events: Vec<CapabilityName>, ) -> Result<EventStream, EventsError> { let events = if self.debug { events .into_iter() .map(|event| RoutedEvent { source_name: event.clone(), scope_monikers: hashset!(AbsoluteMoniker::root()), }) .collect() } else { let route_result = self.route_events(&events).await.map_err(|e| EventsError::RoutingFailed(e))?; if route_result.len() != events.len() { let names = events .into_iter() .filter(|event| !route_result.contains_event(&event)) .collect(); return Err(EventsError::NotAvailable { names }); } route_result.to_vec() }; if let Some(registry) = self.registry.upgrade() { return Ok(registry.subscribe(&self.sync_mode, events).await); } Err(EventsError::RegistryNotFound) } pub fn serve(self, stream: fsys::BlockingEventSourceRequestStream) { fasync::spawn(async move { serve_event_source_sync(self, stream).await; }); } async fn route_events( &self, events: &Vec<CapabilityName>, ) -> Result<RouteEventsResult, ModelError> { let model = self.model.upgrade().ok_or(ModelError::ModelNotAvailable)?; let realm = model.look_up_realm(&self.target_moniker).await?; let decl = { let state = realm.lock_state().await; state.as_ref().expect("route_events: not registered").decl().clone() }; let mut result = RouteEventsResult::new(); for use_decl in decl.uses { match &use_decl { UseDecl::Event(event_decl) => { if !events.contains(&event_decl.target_name) { continue; } let (source_name, scope_moniker) = self.route_event(event_decl, &realm).await?; result.insert(source_name, scope_moniker); } _ => {} } } Ok(result) } async fn route_event( &self, event_decl: &UseEventDecl, realm: &Arc<Realm>, ) -> Result<(CapabilityName, AbsoluteMoniker), ModelError> { routing::route_use_event_capability(&UseDecl::Event(event_decl.clone()), &realm).await.map( |source| match source { CapabilitySource::Framework { capability: FrameworkCapability::Event(source_name), scope_moniker: Some(scope_moniker), } => (source_name, scope_moniker), _ => unreachable!(), }, ) } } #[async_trait] impl CapabilityProvider for EventSource { async fn open( self: Box<Self>, _flags: u32, _open_mode: u32, _relative_path: PathBuf, server_end: zx::Channel, ) -> Result<(), ModelError> { let stream = ServerEnd::<fsys::BlockingEventSourceMarker>::new(server_end) .into_stream() .expect("could not convert channel into stream"); self.serve(stream); Ok(()) } }
use { crate::{ capability::{CapabilityProvider, CapabilitySource, FrameworkCapability}, model::{ error::ModelError, events::{ event::SyncMode, registry::{EventRegistry, RoutedEvent}, serve::serve_event_source_sync, stream::EventStream, }, hooks::EventType, model::Model, moniker::AbsoluteMoniker, realm::Realm, routing, }, }, async_trait::async_trait, cm_rust::{CapabilityName, UseDecl, UseEventDecl}, fidl::endpoints::ServerEnd, fidl_fuchsia_sys2 as fsys, fuchsia_async as fasync, fuchsia_zircon as zx, futures::lock::Mutex, maplit::hashset, std::{ collections::{HashMap, HashSet}, path::PathBuf, sync::{Arc, Weak}, }, thiserror::Error, }; #[derive(Clone)] pub struct EventSource { model: Weak<Model>, target_moniker: AbsoluteMoniker, registry: Weak<EventRegistry>, resolve_instance_event_stream: Arc<Mutex<Option<EventStream>>>, debug: bool, sync_mode: SyncMode, } #[derive(Debug, Error)] pub enum EventsError { #[error("Registry not found")] RegistryNotFound, #[error("Events not allowed for subscri
.into_iter() .filter(|event| !route_result.contains_event(&event)) .collect(); return Err(EventsError::NotAvailable { names }); } route_result.to_vec() }; if let Some(registry) = self.registry.upgrade() { return Ok(registry.subscribe(&self.sync_mode, events).await); } Err(EventsError::RegistryNotFound) } pub fn serve(self, stream: fsys::BlockingEventSourceRequestStream) { fasync::spawn(async move { serve_event_source_sync(self, stream).await; }); } async fn route_events( &self, events: &Vec<CapabilityName>, ) -> Result<RouteEventsResult, ModelError> { let model = self.model.upgrade().ok_or(ModelError::ModelNotAvailable)?; let realm = model.look_up_realm(&self.target_moniker).await?; let decl = { let state = realm.lock_state().await; state.as_ref().expect("route_events: not registered").decl().clone() }; let mut result = RouteEventsResult::new(); for use_decl in decl.uses { match &use_decl { UseDecl::Event(event_decl) => { if !events.contains(&event_decl.target_name) { continue; } let (source_name, scope_moniker) = self.route_event(event_decl, &realm).await?; result.insert(source_name, scope_moniker); } _ => {} } } Ok(result) } async fn route_event( &self, event_decl: &UseEventDecl, realm: &Arc<Realm>, ) -> Result<(CapabilityName, AbsoluteMoniker), ModelError> { routing::route_use_event_capability(&UseDecl::Event(event_decl.clone()), &realm).await.map( |source| match source { CapabilitySource::Framework { capability: FrameworkCapability::Event(source_name), scope_moniker: Some(scope_moniker), } => (source_name, scope_moniker), _ => unreachable!(), }, ) } } #[async_trait] impl CapabilityProvider for EventSource { async fn open( self: Box<Self>, _flags: u32, _open_mode: u32, _relative_path: PathBuf, server_end: zx::Channel, ) -> Result<(), ModelError> { let stream = ServerEnd::<fsys::BlockingEventSourceMarker>::new(server_end) .into_stream() .expect("could not convert channel into stream"); self.serve(stream); Ok(()) } }
ption {:?}", names)] NotAvailable { names: Vec<CapabilityName> }, #[error("Routing failed")] RoutingFailed(#[source] ModelError), } struct RouteEventsResult { mapping: HashMap<CapabilityName, HashSet<AbsoluteMoniker>>, } impl RouteEventsResult { fn new() -> Self { Self { mapping: HashMap::new() } } fn insert(&mut self, source_name: CapabilityName, scope_moniker: AbsoluteMoniker) { self.mapping.entry(source_name).or_insert(HashSet::new()).insert(scope_moniker); } fn len(&self) -> usize { self.mapping.len() } fn contains_event(&self, event_name: &CapabilityName) -> bool { self.mapping.contains_key(event_name) } fn to_vec(self) -> Vec<RoutedEvent> { self.mapping .into_iter() .map(|(source_name, scope_monikers)| RoutedEvent { source_name, scope_monikers }) .collect() } } impl EventSource { pub async fn new( model: Weak<Model>, target_moniker: AbsoluteMoniker, registry: &Arc<EventRegistry>, sync_mode: SyncMode, ) -> Result<Self, ModelError> { let resolve_instance_event_stream = Arc::new(Mutex::new(if sync_mode == SyncMode::Async { None } else { Some( registry .subscribe( &sync_mode, vec![RoutedEvent { source_name: EventType::Resolved.into(), scope_monikers: hashset!(target_moniker.clone()), }], ) .await, ) })); Ok(Self { registry: Arc::downgrade(&registry), model, target_moniker, resolve_instance_event_stream, debug: false, sync_mode, }) } pub async fn new_for_debug( model: Weak<Model>, target_moniker: AbsoluteMoniker, registry: &Arc<EventRegistry>, ) -> Result<Self, ModelError> { let mut event_source = Self::new(model, target_moniker, registry, SyncMode::Sync).await?; event_source.debug = true; Ok(event_source) } pub async fn start_component_tree(&mut self) { let mut resolve_instance_event_stream = self.resolve_instance_event_stream.lock().await; *resolve_instance_event_stream = None; } pub async fn subscribe( &mut self, events: Vec<CapabilityName>, ) -> Result<EventStream, EventsError> { let events = if self.debug { events .into_iter() .map(|event| RoutedEvent { source_name: event.clone(), scope_monikers: hashset!(AbsoluteMoniker::root()), }) .collect() } else { let route_result = self.route_events(&events).await.map_err(|e| EventsError::RoutingFailed(e))?; if route_result.len() != events.len() { let names = events
random
[]
Rust
src/message.rs
media-cloud-ai/rs_http_worker
0848d1dec046ea13be1b3e75e90be73d698cd855
use amqp_worker::*; use amqp_worker::job::*; use reqwest; use reqwest::StatusCode; use std::fs::File; use std::io::prelude::*; pub fn process(message: &str) -> Result<JobResult, MessageError> { let job = Job::new(message)?; debug!("reveived message: {:?}", job); match job.check_requirements() { Ok(_) => {} Err(message) => { return Err(message); } } let source_path = job.get_string_parameter("source_path"); let destination_path = job.get_string_parameter("destination_path"); if source_path.is_none() { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message("missing source path parameter".to_string()); return Err(MessageError::ProcessingError(result)); } if destination_path.is_none() { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message("missing destination path parameter".to_string()); return Err(MessageError::ProcessingError(result)); } let url = source_path.unwrap(); let filename = destination_path.unwrap(); let client = reqwest::Client::builder() .build() .map_err(|e| { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message(e.to_string()); MessageError::ProcessingError(result) })?; let mut response = client .get(url.as_str()) .send() .map_err(|e| { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message(e.to_string()); MessageError::ProcessingError(result) })?; let status = response.status(); if status != StatusCode::OK { println!("ERROR {:?}", response); let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message("bad response status".to_string()); return Err(MessageError::ProcessingError(result)); } let mut body: Vec<u8> = vec![]; response .copy_to(&mut body) .map_err(|e| { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message(e.to_string()); MessageError::ProcessingError(result) })?; let mut file = File::create(filename.as_str()) .map_err(|e| { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message(e.to_string()); MessageError::ProcessingError(result) })?; file.write_all(&body) .map_err(|e| { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message(e.to_string()); MessageError::ProcessingError(result) })?; Ok(JobResult::new(job.job_id, JobStatus::Completed, vec![])) } #[test] fn ack_message_test() { let msg = r#"{ "parameters": [ { "id": "requirements", "type": "requirements", "value": {"paths": []} }, { "id": "source_path", "type": "string", "value": "https://staticftv-a.akamaihd.net/sous-titres/france4/20180214/172524974-5a843dcd126f8-1518616910.ttml" }, { "id": "source_paths", "type": "array_of_strings", "value": ["https://staticftv-a.akamaihd.net/sous-titres/france4/20180214/172524974-5a843dcd126f8-1518616910.ttml"] }, { "id": "destination_path", "type": "string", "value": "/tmp/172524974-5a843dcd126f8-1518616910.ttml" } ], "job_id":690 }"#; let result = process(msg); assert!(result.is_ok()); } #[test] fn nack_message_test() { let msg = r#"{ "parameters": [ { "id": "requirements", "type": "requirements", "value": {"paths": [ "/tmp/FiLe_ThAt_$h0uld_N0t_3xist$" ]} }, { "id": "source_path", "type": "string", "value": "https://staticftv-a.akamaihd.net/sous-titres/france4/20180214/172524974-5a843dcd126f8-1518616910.ttml" }, { "id": "source_paths", "type": "array_of_strings", "value": ["https://staticftv-a.akamaihd.net/sous-titres/france4/20180214/172524974-5a843dcd126f8-1518616910.ttml"] }, { "id": "destination_path", "type": "string", "value": "/tmp/172524974-5a843dcd126f8-1518616910.ttml" } ], "job_id":690 }"#; let result = process(msg); assert_eq!( result, Err(MessageError::RequirementsError( "Warning: Required file does not exists: \"/tmp/FiLe_ThAt_$h0uld_N0t_3xist$\"" .to_string() )) ); }
use amqp_worker::*; use amqp_worker::job::*; use reqwest; use reqwest::StatusCode; use std::fs::File; use std::io::prelude::*; pub fn process(message: &str) -> Result<JobResult, MessageError> { let job = Job::new(message)?; debug!("reveived message: {:?}", job);
let source_path = job.get_string_parameter("source_path"); let destination_path = job.get_string_parameter("destination_path"); if source_path.is_none() { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message("missing source path parameter".to_string()); return Err(MessageError::ProcessingError(result)); } if destination_path.is_none() { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message("missing destination path parameter".to_string()); return Err(MessageError::ProcessingError(result)); } let url = source_path.unwrap(); let filename = destination_path.unwrap(); let client = reqwest::Client::builder() .build() .map_err(|e| { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message(e.to_string()); MessageError::ProcessingError(result) })?; let mut response = client .get(url.as_str()) .send() .map_err(|e| { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message(e.to_string()); MessageError::ProcessingError(result) })?; let status = response.status(); if status != StatusCode::OK { println!("ERROR {:?}", response); let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message("bad response status".to_string()); return Err(MessageError::ProcessingError(result)); } let mut body: Vec<u8> = vec![]; response .copy_to(&mut body) .map_err(|e| { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message(e.to_string()); MessageError::ProcessingError(result) })?; let mut file = File::create(filename.as_str()) .map_err(|e| { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message(e.to_string()); MessageError::ProcessingError(result) })?; file.write_all(&body) .map_err(|e| { let result = JobResult::new(job.job_id, JobStatus::Error, vec![]) .with_message(e.to_string()); MessageError::ProcessingError(result) })?; Ok(JobResult::new(job.job_id, JobStatus::Completed, vec![])) } #[test] fn ack_message_test() { let msg = r#"{ "parameters": [ { "id": "requirements", "type": "requirements", "value": {"paths": []} }, { "id": "source_path", "type": "string", "value": "https://staticftv-a.akamaihd.net/sous-titres/france4/20180214/172524974-5a843dcd126f8-1518616910.ttml" }, { "id": "source_paths", "type": "array_of_strings", "value": ["https://staticftv-a.akamaihd.net/sous-titres/france4/20180214/172524974-5a843dcd126f8-1518616910.ttml"] }, { "id": "destination_path", "type": "string", "value": "/tmp/172524974-5a843dcd126f8-1518616910.ttml" } ], "job_id":690 }"#; let result = process(msg); assert!(result.is_ok()); } #[test] fn nack_message_test() { let msg = r#"{ "parameters": [ { "id": "requirements", "type": "requirements", "value": {"paths": [ "/tmp/FiLe_ThAt_$h0uld_N0t_3xist$" ]} }, { "id": "source_path", "type": "string", "value": "https://staticftv-a.akamaihd.net/sous-titres/france4/20180214/172524974-5a843dcd126f8-1518616910.ttml" }, { "id": "source_paths", "type": "array_of_strings", "value": ["https://staticftv-a.akamaihd.net/sous-titres/france4/20180214/172524974-5a843dcd126f8-1518616910.ttml"] }, { "id": "destination_path", "type": "string", "value": "/tmp/172524974-5a843dcd126f8-1518616910.ttml" } ], "job_id":690 }"#; let result = process(msg); assert_eq!( result, Err(MessageError::RequirementsError( "Warning: Required file does not exists: \"/tmp/FiLe_ThAt_$h0uld_N0t_3xist$\"" .to_string() )) ); }
match job.check_requirements() { Ok(_) => {} Err(message) => { return Err(message); } }
if_condition
[ { "content": "fn main() {\n\n println!(\"HTTP Worker (version {}) started \", env::var(\"VERSION\").expect(\"missing softwareversion\"));\n\n if env::var(\"VERBOSE\").is_ok() {\n\n simple_logger::init_with_level(Level::Debug).unwrap();\n\n } else {\n\n simple_logger::init_with_level(Level::Warn).unwrap();\n\n }\n\n\n\n start_worker(&HTTP_EVENT);\n\n}\n", "file_path": "src/main.rs", "rank": 3, "score": 18059.792073609213 }, { "content": "extern crate amqp_worker;\n\n#[macro_use]\n\nextern crate log;\n\nextern crate reqwest;\n\nextern crate simple_logger;\n\n\n\nuse amqp_worker::*;\n\nuse log::Level;\n\nuse std::env;\n\n\n\nmod message;\n\n\n\n#[derive(Debug)]\n", "file_path": "src/main.rs", "rank": 10, "score": 2.9188095244220595 }, { "content": "# rs_http_worker\n\nHttp Downloader worker\n\n\n\nThis is a deprecated worker, use [Transfer Worker](https://github.com/media-cloud-ai/rs_transfer_worker/) instead.\n", "file_path": "README.md", "rank": 11, "score": 0.8821367701750309 } ]
Rust
code/src/example_catalog.rs
cloudfuse-io/buzz-rust
7199662d40de5da138b53a3c8e62aeaf69545848
use std::sync::Arc; use crate::datasource::{CatalogFile, CatalogTable, StaticCatalogTable}; use arrow::datatypes::{DataType, Field, Schema, TimeUnit}; pub fn nyc_taxi_cloudfuse_sample() -> CatalogTable { CatalogTable::new(Box::new(StaticCatalogTable::new( nyc_taxi_v1_schema(TimeUnit::Microsecond), "us-east-2".to_owned(), "cloudfuse-taxi-data".to_owned(), vec!["month".to_owned()], vec![CatalogFile::new( "raw_small/2009/01/data.parquet", 27301328, vec!["2009/01".to_owned()], )], ))) } pub fn nyc_taxi_cloudfuse() -> CatalogTable { CatalogTable::new(Box::new(StaticCatalogTable::new( nyc_taxi_v1_schema(TimeUnit::Microsecond), "us-east-2".to_owned(), "cloudfuse-taxi-data".to_owned(), vec!["month".to_owned()], vec![ CatalogFile::new( "raw_5M/2009/01/data.parquet", 388070114, vec!["2009/01".to_owned()], ), CatalogFile::new( "raw_5M/2009/02/data.parquet", 368127982, vec!["2009/02".to_owned()], ), CatalogFile::new( "raw_5M/2009/03/data.parquet", 398600815, vec!["2009/03".to_owned()], ), CatalogFile::new( "raw_5M/2009/04/data.parquet", 396353841, vec!["2009/04".to_owned()], ), CatalogFile::new( "raw_5M/2009/05/data.parquet", 410283205, vec!["2009/05".to_owned()], ), ], ))) } pub fn nyc_taxi_ursa() -> CatalogTable { CatalogTable::new(Box::new(StaticCatalogTable::new( nyc_taxi_v1_schema(TimeUnit::Nanosecond), "us-east-2".to_owned(), "ursa-labs-taxi-data".to_owned(), vec!["month".to_owned()], vec![ CatalogFile::new( "2009/01/data.parquet", 461966527, vec!["2009/01".to_owned()], ), CatalogFile::new( "2009/02/data.parquet", 436405669, vec!["2009/02".to_owned()], ), CatalogFile::new( "2009/03/data.parquet", 474795751, vec!["2009/03".to_owned()], ), CatalogFile::new( "2009/04/data.parquet", 470914229, vec!["2009/04".to_owned()], ), CatalogFile::new( "2009/05/data.parquet", 489248585, vec!["2009/05".to_owned()], ), CatalogFile::new( "2009/06/data.parquet", 465578495, vec!["2009/06".to_owned()], ), CatalogFile::new( "2009/07/data.parquet", 448227037, vec!["2009/07".to_owned()], ), CatalogFile::new( "2009/08/data.parquet", 450774566, vec!["2009/08".to_owned()], ), CatalogFile::new( "2009/09/data.parquet", 460835784, vec!["2009/09".to_owned()], ), CatalogFile::new( "2009/10/data.parquet", 517609313, vec!["2009/10".to_owned()], ), CatalogFile::new( "2009/11/data.parquet", 471148697, vec!["2009/11".to_owned()], ), CatalogFile::new( "2009/12/data.parquet", 479899902, vec!["2009/12".to_owned()], ), ], ))) } fn nyc_taxi_v1_schema(time_unit: TimeUnit) -> Arc<Schema> { Arc::new(Schema::new(vec![ Field::new("vendor_id", DataType::Utf8, true), Field::new( "pickup_at", DataType::Timestamp(time_unit.clone(), Option::None), true, ), Field::new( "dropoff_at", DataType::Timestamp(time_unit.clone(), Option::None), true, ), Field::new("passenger_count", DataType::Int8, true), Field::new("trip_distance", DataType::Float32, true), Field::new("pickup_longitude", DataType::Float32, true), Field::new("pickup_latitude", DataType::Float32, true), Field::new("rate_code_id", DataType::Null, true), Field::new("store_and_fwd_flag", DataType::Utf8, true), Field::new("dropoff_longitude", DataType::Float32, true), Field::new("dropoff_latitude", DataType::Float32, true), Field::new("payment_type", DataType::Utf8, true), Field::new("fare_amount", DataType::Float32, true), Field::new("extra", DataType::Float32, true), Field::new("mta_tax", DataType::Float32, true), Field::new("tip_amount", DataType::Float32, true), Field::new("tolls_amount", DataType::Float32, true), Field::new("total_amount", DataType::Float32, true), ])) }
use std::sync::Arc; use crate::datasource::{CatalogFile, CatalogTable, StaticCatalogTable}; use arrow::datatypes::{DataType, Field, Schema, TimeUnit}; pub fn nyc_taxi_cloudfuse_sample() -> CatalogTable { CatalogTable::new(Box::new(StaticCatalogTable::new( nyc_taxi_v1_schema(TimeUnit::Microsecond), "us-east-2".to_owned(), "cloudfuse-taxi-data".to_owned(), vec!["month".to_owned()], vec![CatalogFile::new( "raw_small/2009/01/data.parquet", 27301328, vec!["2009/01".to_owned()], )], ))) } pub fn nyc_taxi_cloudfuse() -> CatalogTable { CatalogTable::new(Box::new(StaticCatalogTable::new( nyc_taxi_v1_schema(TimeUnit::Microsecond), "us-east-2".to_owned(), "cloudfuse-taxi-data".to_owned(), vec!["month".to_owned()], vec![ CatalogFile::new( "raw_5M/2009/01/data.parquet",
474795751, vec!["2009/03".to_owned()], ), CatalogFile::new( "2009/04/data.parquet", 470914229, vec!["2009/04".to_owned()], ), CatalogFile::new( "2009/05/data.parquet", 489248585, vec!["2009/05".to_owned()], ), CatalogFile::new( "2009/06/data.parquet", 465578495, vec!["2009/06".to_owned()], ), CatalogFile::new( "2009/07/data.parquet", 448227037, vec!["2009/07".to_owned()], ), CatalogFile::new( "2009/08/data.parquet", 450774566, vec!["2009/08".to_owned()], ), CatalogFile::new( "2009/09/data.parquet", 460835784, vec!["2009/09".to_owned()], ), CatalogFile::new( "2009/10/data.parquet", 517609313, vec!["2009/10".to_owned()], ), CatalogFile::new( "2009/11/data.parquet", 471148697, vec!["2009/11".to_owned()], ), CatalogFile::new( "2009/12/data.parquet", 479899902, vec!["2009/12".to_owned()], ), ], ))) } fn nyc_taxi_v1_schema(time_unit: TimeUnit) -> Arc<Schema> { Arc::new(Schema::new(vec![ Field::new("vendor_id", DataType::Utf8, true), Field::new( "pickup_at", DataType::Timestamp(time_unit.clone(), Option::None), true, ), Field::new( "dropoff_at", DataType::Timestamp(time_unit.clone(), Option::None), true, ), Field::new("passenger_count", DataType::Int8, true), Field::new("trip_distance", DataType::Float32, true), Field::new("pickup_longitude", DataType::Float32, true), Field::new("pickup_latitude", DataType::Float32, true), Field::new("rate_code_id", DataType::Null, true), Field::new("store_and_fwd_flag", DataType::Utf8, true), Field::new("dropoff_longitude", DataType::Float32, true), Field::new("dropoff_latitude", DataType::Float32, true), Field::new("payment_type", DataType::Utf8, true), Field::new("fare_amount", DataType::Float32, true), Field::new("extra", DataType::Float32, true), Field::new("mta_tax", DataType::Float32, true), Field::new("tip_amount", DataType::Float32, true), Field::new("tolls_amount", DataType::Float32, true), Field::new("total_amount", DataType::Float32, true), ])) }
388070114, vec!["2009/01".to_owned()], ), CatalogFile::new( "raw_5M/2009/02/data.parquet", 368127982, vec!["2009/02".to_owned()], ), CatalogFile::new( "raw_5M/2009/03/data.parquet", 398600815, vec!["2009/03".to_owned()], ), CatalogFile::new( "raw_5M/2009/04/data.parquet", 396353841, vec!["2009/04".to_owned()], ), CatalogFile::new( "raw_5M/2009/05/data.parquet", 410283205, vec!["2009/05".to_owned()], ), ], ))) } pub fn nyc_taxi_ursa() -> CatalogTable { CatalogTable::new(Box::new(StaticCatalogTable::new( nyc_taxi_v1_schema(TimeUnit::Nanosecond), "us-east-2".to_owned(), "ursa-labs-taxi-data".to_owned(), vec!["month".to_owned()], vec![ CatalogFile::new( "2009/01/data.parquet", 461966527, vec!["2009/01".to_owned()], ), CatalogFile::new( "2009/02/data.parquet", 436405669, vec!["2009/02".to_owned()], ), CatalogFile::new( "2009/03/data.parquet",
random
[ { "content": "pub fn catalog_schema(partition_names: &[String]) -> Arc<Schema> {\n\n let mut fields = vec![\n\n Field::new(\"key\", DataType::Utf8, false),\n\n Field::new(\"length\", DataType::UInt64, false),\n\n ];\n\n for col in partition_names {\n\n fields.push(Field::new(col, DataType::Utf8, false));\n\n }\n\n Arc::new(Schema::new(fields))\n\n}\n\n\n\n//// Implems ////\n\n\n\npub mod delta_catalog;\n\npub mod static_catalog;\n\npub(crate) mod test_catalog;\n\n// pub(crate) mod utils;\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "code/src/datasource/catalog/mod.rs", "rank": 0, "score": 128504.77182609186 }, { "content": "/// converts \"A AND (B AND (C OR D))\" => [A, B, C OR D]\n\n/// Copied from DataFusion filter pushdown\n\npub fn split_expr<'a>(predicate: &'a Expr, predicates: &mut Vec<&'a Expr>) {\n\n match predicate {\n\n Expr::BinaryExpr {\n\n right,\n\n op: Operator::And,\n\n left,\n\n } => {\n\n split_expr(&left, predicates);\n\n split_expr(&right, predicates);\n\n }\n\n other => predicates.push(other),\n\n }\n\n}\n\n\n", "file_path": "code/src/plan_utils.rs", "rank": 1, "score": 108746.21459421724 }, { "content": "fn serialize_schema(schema: &Schema) -> EncodedData {\n\n let options = IpcWriteOptions::default();\n\n let data_gen = writer::IpcDataGenerator::default();\n\n data_gen.schema_to_bytes(schema, &options)\n\n}\n\n\n", "file_path": "code/src/serde/to_proto.rs", "rank": 2, "score": 108712.74374054732 }, { "content": "fn refvec(vec: &Vec<String>) -> Vec<Option<&str>> {\n\n vec.iter()\n\n .map(|item| Some(item.as_ref()))\n\n .collect::<Vec<_>>()\n\n}\n", "file_path": "code/src/datasource/catalog/test_catalog.rs", "rank": 4, "score": 102697.9329582674 }, { "content": "pub fn downloader_creator(\n\n region: &str,\n\n) -> (String, Box<dyn Fn() -> Arc<dyn Downloader>>) {\n\n let region_clone = region.to_owned();\n\n let creator: Box<dyn Fn() -> Arc<dyn Downloader>> = Box::new(move || {\n\n Arc::new(S3Downloader {\n\n client: new_client(&region_clone),\n\n })\n\n });\n\n\n\n (format!(\"s3::{}\", region), creator)\n\n}\n\n\n", "file_path": "code/src/clients/s3.rs", "rank": 5, "score": 96433.72108108006 }, { "content": "pub fn serialize_hbee(\n\n hbee_table: &HBeeTableDesc,\n\n sql: String,\n\n source: String,\n\n) -> protobuf::HBeeScanNode {\n\n let schema = serialize_schema(&hbee_table.schema());\n\n let scan = match hbee_table {\n\n HBeeTableDesc::S3Parquet(table) => Some(\n\n protobuf::h_bee_scan_node::Scan::S3Parquet(protobuf::S3ParquetScanNode {\n\n region: table.region().to_owned(),\n\n bucket: table.bucket().to_owned(),\n\n files: table\n\n .files()\n\n .iter()\n\n .map(|sized_file| protobuf::SizedFile {\n\n key: sized_file.key.to_owned(),\n\n length: sized_file.length,\n\n })\n\n .collect(),\n\n }),\n\n ),\n\n };\n\n protobuf::HBeeScanNode {\n\n scan,\n\n sql,\n\n schema: schema.ipc_message,\n\n source,\n\n }\n\n}\n\n\n", "file_path": "code/src/serde/to_proto.rs", "rank": 6, "score": 96433.72108108006 }, { "content": "pub fn serialize_hcomb(\n\n hcomb_table: &HCombTableDesc,\n\n sql: String,\n\n source: String,\n\n) -> protobuf::HCombScanNode {\n\n let schema = serialize_schema(&hcomb_table.schema());\n\n protobuf::HCombScanNode {\n\n query_id: hcomb_table.query_id().to_owned(),\n\n nb_hbee: hcomb_table.nb_hbee() as u32,\n\n schema: schema.ipc_message,\n\n sql,\n\n source,\n\n }\n\n}\n", "file_path": "code/src/serde/to_proto.rs", "rank": 7, "score": 96433.72108108006 }, { "content": "pub fn deserialize_hbee(\n\n message: protobuf::HBeeScanNode,\n\n) -> Result<(HBeeTableDesc, String, String)> {\n\n let schema = convert::schema_from_bytes(&message.schema)?;\n\n let scan = message\n\n .scan\n\n .ok_or(internal_err!(\"Scan field cannot be empty\"))?;\n\n let provider = match scan {\n\n protobuf::h_bee_scan_node::Scan::S3Parquet(scan_node) => S3ParquetTable::new(\n\n scan_node.region.to_owned(),\n\n scan_node.bucket.to_owned(),\n\n scan_node\n\n .files\n\n .iter()\n\n .map(|sized_file| SizedFile {\n\n key: sized_file.key.to_owned(),\n\n length: sized_file.length,\n\n })\n\n .collect(),\n\n Arc::new(schema),\n\n ),\n\n };\n\n\n\n Ok((provider, message.sql, message.source))\n\n}\n\n\n", "file_path": "code/src/serde/from_proto.rs", "rank": 8, "score": 96433.72108108006 }, { "content": "pub fn deserialize_hcomb(\n\n message: protobuf::HCombScanNode,\n\n) -> Result<(HCombTableDesc, String, String)> {\n\n let schema = convert::schema_from_bytes(&message.schema)?;\n\n let provider = HCombTableDesc::new(\n\n message.query_id.to_owned(),\n\n message.nb_hbee as usize,\n\n Arc::new(schema),\n\n );\n\n Ok((provider, message.sql, message.source))\n\n}\n", "file_path": "code/src/serde/from_proto.rs", "rank": 9, "score": 96433.72108108006 }, { "content": "pub fn get_fargate_config() -> Result<FargateConfig> {\n\n envy::from_env::<FargateConfig>().map_err(|e| BuzzError::Internal(format!(\"{}\", e)))\n\n}\n\n\n\n#[derive(Deserialize, Debug)]\n\npub struct LambdaConfig {\n\n pub hbee_lambda_name: String,\n\n pub aws_region: String,\n\n}\n\n\n", "file_path": "code/src/models/env.rs", "rank": 13, "score": 84174.4331403364 }, { "content": "pub fn get_lambda_config() -> Result<LambdaConfig> {\n\n envy::from_env::<LambdaConfig>().map_err(|e| BuzzError::Internal(format!(\"{}\", e)))\n\n}\n", "file_path": "code/src/models/env.rs", "rank": 14, "score": 84174.4331403364 }, { "content": "fn test_schema() -> SchemaRef {\n\n Arc::new(Schema::new(vec![Field::new(\n\n \"data_col\",\n\n DataType::Int64,\n\n true,\n\n )]))\n\n}\n\n\n", "file_path": "code/src/datasource/catalog/test_catalog.rs", "rank": 15, "score": 82510.77438114957 }, { "content": "/// converts [A, B, C] => \"(A AND B) AND C\"\n\npub fn merge_expr<'a>(predicates: &[Expr]) -> Expr {\n\n let mut predicates_iter = predicates.iter();\n\n let mut merged_pred = predicates_iter\n\n .next()\n\n .expect(\"Merging requires at least one expr\")\n\n .clone();\n\n while let Some(expr) = predicates_iter.next() {\n\n merged_pred = logical_plan::and(merged_pred, expr.clone());\n\n }\n\n merged_pred\n\n}\n", "file_path": "code/src/plan_utils.rs", "rank": 16, "score": 81684.3651758748 }, { "content": "/// Search a TableProvider of the given type in the plan.\n\n/// Only works with linear plans (only one datasource).\n\npub fn find_table_name<'a, T: TableProvider + 'static>(\n\n plan: &'a LogicalPlan,\n\n) -> Result<&'a str> {\n\n let new_inputs = plan.inputs();\n\n if new_inputs.len() > 1 {\n\n Err(not_impl_err!(\n\n \"Operations with more than one inputs are not supported\",\n\n ))\n\n } else if new_inputs.len() == 1 {\n\n // recurse\n\n find_table_name::<T>(new_inputs[0])\n\n } else {\n\n if let Some(result_table) = as_table_name::<T>(&plan) {\n\n Ok(result_table)\n\n } else {\n\n Err(not_impl_err!(\n\n \"Expected root to be a {}\",\n\n std::any::type_name::<T>()\n\n ))\n\n }\n\n }\n\n}\n\n\n", "file_path": "code/src/services/utils.rs", "rank": 17, "score": 78693.78075612568 }, { "content": "pub fn file_id(bucket: &str, key: &str) -> String {\n\n format!(\"{}/{}\", bucket, key)\n\n}\n\n\n\n//// S3 Client ////\n\n\n", "file_path": "code/src/clients/s3.rs", "rank": 18, "score": 77069.03055254504 }, { "content": "fn main() {\n\n prost_build::compile_protos(&[\"proto/buzz.proto\"], &[\"proto\"])\n\n .unwrap_or_else(|e| panic!(\"protobuf compilation failed: {}\", e));\n\n}\n", "file_path": "code/build.rs", "rank": 19, "score": 53203.37903709616 }, { "content": "fn main() {\n\n tokio::runtime::Runtime::new()\n\n .unwrap()\n\n .block_on(async_main());\n\n}\n", "file_path": "code/examples/direct_s3.rs", "rank": 20, "score": 50926.719418773515 }, { "content": "fn descriptor_to_cmd(\n\n descriptor: Option<FlightDescriptor>,\n\n) -> Result<String, Box<dyn Error>> {\n\n let descriptor = descriptor.ok_or(Box::new(internal_err!(\n\n \"Descriptor not found in first flight\"\n\n )))?;\n\n if descriptor.r#type != flight_descriptor::DescriptorType::Cmd as i32 {\n\n Err(Box::new(internal_err!(\"Descriptor type should be cmd\")))\n\n } else {\n\n Ok(String::from_utf8(descriptor.cmd).unwrap())\n\n }\n\n}\n", "file_path": "code/src/flight_utils.rs", "rank": 21, "score": 49896.02325345845 }, { "content": "fn read_file(\n\n file_reader: Arc<SerializedFileReader<CachedFile>>,\n\n projection: Vec<usize>,\n\n batch_size: usize,\n\n response_tx: Sender<ArrowResult<RecordBatch>>,\n\n) -> DataFusionResult<()> {\n\n let mut arrow_reader = ParquetFileArrowReader::new(file_reader.clone());\n\n let mut batch_reader =\n\n arrow_reader.get_record_reader_by_columns(projection.clone(), batch_size)?;\n\n loop {\n\n match batch_reader.next() {\n\n Some(Ok(batch)) => send_result(&response_tx, Ok(batch))?,\n\n None => {\n\n break;\n\n }\n\n Some(Err(e)) => {\n\n let err_msg = format!(\"Error reading batch from file: {}\", e.to_string());\n\n // send error to operator\n\n send_result(\n\n &response_tx,\n\n Err(ArrowError::ParquetError(err_msg.clone())),\n\n )?;\n\n // terminate thread with error\n\n return Err(DataFusionError::Execution(err_msg));\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "code/src/execution_plan/parquet.rs", "rank": 22, "score": 48928.31286988888 }, { "content": "fn send_result(\n\n response_tx: &Sender<ArrowResult<RecordBatch>>,\n\n result: ArrowResult<RecordBatch>,\n\n) -> DataFusionResult<()> {\n\n response_tx\n\n .blocking_send(result)\n\n .map_err(|e| DataFusionError::Execution(e.to_string()))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "code/src/execution_plan/parquet.rs", "rank": 23, "score": 48928.31286988888 }, { "content": "#[async_trait]\n\npub trait SplittableTable {\n\n fn split(&self, files: Vec<SizedFile>) -> Vec<HBeeTableDesc>;\n\n /// Get the names of the partitioning columns, in order of evaluation.\n\n fn partition_columns(&self) -> &[String];\n\n /// schema including the partition columns\n\n fn schema(&self) -> SchemaRef;\n\n fn statistics(&self) -> Statistics {\n\n Statistics::default()\n\n }\n\n async fn file_table(&self) -> Arc<dyn TableProvider + Send + Sync>;\n\n}\n\n\n\n/// A generic catalog table that wraps splittable tables\n\npub struct CatalogTable {\n\n source_table: Box<dyn SplittableTable + Send + Sync>,\n\n}\n\n\n\nimpl CatalogTable {\n\n pub fn new(source_table: Box<dyn SplittableTable + Send + Sync>) -> Self {\n\n Self { source_table }\n", "file_path": "code/src/datasource/catalog/mod.rs", "rank": 24, "score": 48316.73348387003 }, { "content": "#[async_trait]\n\npub trait HCombScheduler {\n\n /// Notifies the hcomb that a query is starting and opens a stream of results.\n\n async fn schedule(\n\n &self,\n\n address: &HCombAddress,\n\n hcomb_table: &HCombTableDesc,\n\n sql: String,\n\n source: String,\n\n ) -> Result<Pin<Box<dyn Stream<Item = ArrowResult<RecordBatch>>>>>;\n\n}\n\n\n\npub struct HttpHCombScheduler;\n\n\n\n#[async_trait]\n\nimpl HCombScheduler for HttpHCombScheduler {\n\n async fn schedule(\n\n &self,\n\n address: &HCombAddress,\n\n hcomb_table: &HCombTableDesc,\n\n sql: String,\n\n source: String,\n\n ) -> Result<Pin<Box<dyn Stream<Item = ArrowResult<RecordBatch>>>>> {\n\n flight_client::call_do_get(address, hcomb_table, sql, source)\n\n .await\n\n .map_err(|e| internal_err!(\"Could not get result from HComb: {}\", e))\n\n }\n\n}\n", "file_path": "code/src/services/fuse/hcomb_scheduler.rs", "rank": 25, "score": 46639.25452908206 }, { "content": "#[async_trait]\n\npub trait HBeeScheduler {\n\n async fn schedule(\n\n &self,\n\n query_id: String,\n\n address: &HCombAddress,\n\n table: &HBeeTableDesc,\n\n sql: String,\n\n source: String,\n\n ) -> Result<()>;\n\n}\n\n\n\npub struct TestHBeeScheduler {\n\n pub domain: String,\n\n}\n\n\n\n#[async_trait]\n\nimpl HBeeScheduler for TestHBeeScheduler {\n\n async fn schedule(\n\n &self,\n\n query_id: String,\n", "file_path": "code/src/services/fuse/hbee_scheduler.rs", "rank": 26, "score": 46639.25452908206 }, { "content": "#[async_trait]\n\npub trait HCombManager {\n\n /// Search for availaible combs or start new ones if none was found.\n\n async fn find_or_start(&self, capactity: &HCombCapacity)\n\n -> Result<Vec<HCombAddress>>;\n\n}\n\n\n\npub struct TestHCombManager {\n\n pub domain: String,\n\n}\n\n\n\n#[async_trait]\n\nimpl HCombManager for TestHCombManager {\n\n async fn find_or_start(\n\n &self,\n\n capactity: &HCombCapacity,\n\n ) -> Result<Vec<HCombAddress>> {\n\n assert_eq!(capactity.zones, 1, \"Only single zone supported for now\");\n\n Ok(vec![format!(\"http://{}:3333\", self.domain)])\n\n }\n\n}\n", "file_path": "code/src/services/fuse/hcomb_manager.rs", "rank": 27, "score": 46639.25452908206 }, { "content": "#[async_trait]\n\npub trait Downloader: Send + Sync {\n\n async fn download(\n\n &self,\n\n file_id: String,\n\n start: u64,\n\n length: usize,\n\n ) -> Result<Vec<u8>>;\n\n}\n\n\n", "file_path": "code/src/clients/range_cache.rs", "rank": 28, "score": 44530.24122748261 }, { "content": "#[async_trait]\n\npub trait Collector: Send + Sync {\n\n /// send results back to hcomb\n\n async fn send_back(\n\n &self,\n\n query_id: String,\n\n data: Result<Vec<RecordBatch>>,\n\n address: HCombAddress,\n\n ) -> Result<()>;\n\n}\n\n\n\npub struct NoopCollector {}\n\n\n\n#[async_trait]\n\nimpl Collector for NoopCollector {\n\n async fn send_back(\n\n &self,\n\n _query_id: String,\n\n data: Result<Vec<RecordBatch>>,\n\n _address: HCombAddress,\n\n ) -> Result<()> {\n", "file_path": "code/src/services/hbee/collector.rs", "rank": 29, "score": 44530.24122748261 }, { "content": "fn as_table_name<'a, T: TableProvider + 'static>(\n\n plan: &'a LogicalPlan,\n\n) -> Option<&'a str> {\n\n if let LogicalPlan::TableScan {\n\n source, table_name, ..\n\n } = plan\n\n {\n\n source\n\n .as_any()\n\n .downcast_ref::<T>()\n\n .map(|_| table_name.as_ref())\n\n } else {\n\n None\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::sync::Arc;\n\n\n", "file_path": "code/src/services/utils.rs", "rank": 30, "score": 40971.45209923813 }, { "content": "fn cmd_to_descriptor(cmd: &str) -> Option<FlightDescriptor> {\n\n Some(FlightDescriptor {\n\n r#type: flight_descriptor::DescriptorType::Cmd as i32,\n\n cmd: cmd.as_bytes().to_owned(),\n\n path: vec![],\n\n })\n\n}\n\n\n", "file_path": "code/src/flight_utils.rs", "rank": 31, "score": 40205.67693084196 }, { "content": "fn new_client(region: &str) -> Arc<EcsClient> {\n\n let region = Region::from_str(region).unwrap();\n\n Arc::new(EcsClient::new(region))\n\n}\n", "file_path": "code/src/clients/fargate.rs", "rank": 32, "score": 40205.67693084196 }, { "content": "fn new_client(region: &str) -> Arc<LambdaClient> {\n\n let region = Region::from_str(region).unwrap();\n\n Arc::new(LambdaClient::new(region))\n\n}\n", "file_path": "code/src/clients/lambda.rs", "rank": 33, "score": 40205.67693084196 }, { "content": "fn new_client(region: &str) -> Arc<S3Client> {\n\n let region = Region::from_str(region).unwrap();\n\n Arc::new(S3Client::new(region))\n\n}\n", "file_path": "code/src/clients/s3.rs", "rank": 34, "score": 40205.67693084196 }, { "content": "//! Test Fixtures for Catalog Tables\n\n\n\nuse super::*;\n\nuse crate::datasource::{S3ParquetTable, SplittableTable};\n\nuse crate::models::SizedFile;\n\nuse arrow::datatypes::DataType;\n\nuse arrow::datatypes::{Field, Schema, SchemaRef};\n\nuse arrow::record_batch::RecordBatch;\n\nuse async_trait::async_trait;\n\nuse datafusion::datasource::{MemTable, TableProvider};\n\n\n\n/// `pattern_vec(pattern, len)` creates a vector of String of length `len`\n\nmacro_rules! pattern_vec {\n\n ($x:expr, $y:expr) => {\n\n (1..=$y).map(|i| format!($x, i)).collect::<Vec<String>>()\n\n };\n\n}\n\n\n\n/// A SplittableTable that splits into `nb_split` S3Parquet tables\n\npub struct MockSplittableTable {\n", "file_path": "code/src/datasource/catalog/test_catalog.rs", "rank": 35, "score": 14.481863997203495 }, { "content": "\n\n fn statistics(&self) -> Statistics {\n\n Statistics::default()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use arrow::array::Int32Array;\n\n use arrow::datatypes::{DataType, Field, Schema};\n\n use datafusion::datasource::TableProvider;\n\n\n\n #[tokio::test]\n\n async fn test_not_empty() -> Result<()> {\n\n let schema = Arc::new(Schema::new(vec![Field::new(\"a\", DataType::Int32, false)]));\n\n let hcomb_table_desc =\n\n HCombTableDesc::new(\"mock_query_id\".to_owned(), 1, schema.clone());\n\n let batches = vec![RecordBatch::try_new(\n\n schema.clone(),\n", "file_path": "code/src/datasource/hcomb.rs", "rank": 36, "score": 14.145275044056165 }, { "content": "use datafusion::physical_plan::Partitioning;\n\nuse datafusion::physical_plan::{RecordBatchStream, SendableRecordBatchStream};\n\nuse futures::stream::{Stream, StreamExt};\n\n\n\n/// Execution plan for scanning a Parquet file\n\n#[derive(Debug, Clone)]\n\npub struct ParquetExec {\n\n files: Vec<CachedFile>,\n\n /// Schema before projection is applied\n\n file_schema: SchemaRef,\n\n /// Schema after projection is applied\n\n projected_schema: SchemaRef,\n\n /// Projection for which columns to load\n\n projection: Vec<usize>,\n\n /// Batch size\n\n batch_size: usize,\n\n}\n\n\n\nimpl ParquetExec {\n\n /// Create a new Parquet reader execution plan\n", "file_path": "code/src/execution_plan/parquet.rs", "rank": 37, "score": 12.661762724769257 }, { "content": "use std::sync::Arc;\n\n\n\nuse super::{catalog_schema, SplittableTable};\n\nuse crate::datasource::{HBeeTableDesc, S3ParquetTable};\n\nuse crate::error::Result;\n\nuse crate::models::SizedFile;\n\nuse arrow::array::*;\n\nuse arrow::datatypes::*;\n\nuse arrow::record_batch::RecordBatch;\n\nuse async_trait::async_trait;\n\nuse datafusion::datasource::{MemTable, TableProvider};\n\n\n\npub struct CatalogFile {\n\n sized_file: SizedFile,\n\n partitions: Vec<String>,\n\n}\n\n\n\nimpl CatalogFile {\n\n pub fn new(key: &str, length: u64, partitions: Vec<String>) -> Self {\n\n CatalogFile {\n", "file_path": "code/src/datasource/catalog/static_catalog.rs", "rank": 38, "score": 12.18747743844537 }, { "content": "//! These serialization / deserialization methods allow the exchange of DataFusion logical plans between services\n\n\n\nmod from_proto;\n\nmod to_proto;\n\n\n\npub use from_proto::*;\n\npub use to_proto::*;\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::sync::Arc;\n\n\n\n use super::*;\n\n use crate::datasource::{HCombTableDesc, S3ParquetTable};\n\n use crate::models::SizedFile;\n\n use arrow::datatypes::{DataType, Field, Schema, TimeUnit};\n\n\n\n #[test]\n\n fn roundtrip_parquet() {\n\n let parquet_table = S3ParquetTable::new(\n", "file_path": "code/src/serde/mod.rs", "rank": 39, "score": 11.988486544678416 }, { "content": "use std::sync::Arc;\n\n\n\nuse super::HBeeTableDesc;\n\nuse crate::clients::s3;\n\nuse crate::clients::CachedFile;\n\nuse crate::clients::RangeCache;\n\nuse crate::execution_plan::ParquetExec;\n\nuse crate::models::SizedFile;\n\nuse arrow::datatypes::*;\n\nuse datafusion::error::Result;\n\nuse datafusion::logical_plan::Expr;\n\nuse datafusion::physical_plan::ExecutionPlan;\n\n\n\n/// Table-based representation of a `ParquetFile` backed by S3.\n\n#[derive(Debug)]\n\npub struct S3ParquetTable {\n\n region: String,\n\n bucket: String,\n\n files: Vec<SizedFile>,\n\n schema: SchemaRef,\n", "file_path": "code/src/datasource/hbee/s3_parquet.rs", "rank": 40, "score": 11.43700792960215 }, { "content": " }\n\n\n\n pub fn bucket(&self) -> &str {\n\n &self.bucket\n\n }\n\n\n\n pub fn files(&self) -> &[SizedFile] {\n\n &self.files\n\n }\n\n\n\n pub fn schema(&self) -> SchemaRef {\n\n self.schema.clone()\n\n }\n\n\n\n pub fn scan(\n\n &self,\n\n cache: Arc<RangeCache>,\n\n projection: &Option<Vec<usize>>,\n\n batch_size: usize,\n\n _filters: &[Expr],\n", "file_path": "code/src/datasource/hbee/s3_parquet.rs", "rank": 41, "score": 11.1833166104951 }, { "content": " #[tokio::test]\n\n async fn test_small_parquet() {\n\n let schema = Arc::new(Schema::new(vec![Field::new(\"a\", DataType::Int32, false)]));\n\n let rec_batch = RecordBatch::try_new(\n\n schema.clone(),\n\n vec![Arc::new(Int32Array::from(vec![1, 2, 3]))],\n\n )\n\n .unwrap();\n\n\n\n let filename = \"test_small_parquet.parquet\";\n\n let results = write_and_exec(&rec_batch, filename).await;\n\n assert_eq!(results.len(), 1);\n\n assert_eq!(format!(\"{:?}\", results[0]), format!(\"{:?}\", rec_batch));\n\n }\n\n\n\n #[tokio::test]\n\n async fn test_parquet_two_columns() {\n\n let schema = Arc::new(Schema::new(vec![\n\n Field::new(\"a\", DataType::Int32, false),\n\n Field::new(\"b\", DataType::Utf8, false),\n", "file_path": "code/src/execution_plan/parquet.rs", "rank": 42, "score": 11.07452091652798 }, { "content": " fields,\n\n test_schema().metadata().clone(),\n\n ))\n\n }\n\n async fn file_table(&self) -> Arc<dyn TableProvider + Send + Sync> {\n\n let mut fields = vec![\n\n Field::new(\"key\", DataType::Utf8, false),\n\n Field::new(\"length\", DataType::UInt64, false),\n\n ];\n\n for partition in &self.partitions {\n\n fields.push(Field::new(partition, DataType::Utf8, false));\n\n }\n\n\n\n let file_table_schema = Arc::new(Schema::new(fields));\n\n\n\n let keys = pattern_vec!(\"file_{}\", self.nb_split);\n\n let lengths = vec![999999999 as u64; self.nb_split];\n\n let parts =\n\n vec![pattern_vec!(\"part_value_{:03}\", self.nb_split); self.partitions.len()];\n\n\n", "file_path": "code/src/datasource/catalog/test_catalog.rs", "rank": 43, "score": 11.026224907784652 }, { "content": "use crate::error::{BuzzError, Result};\n\nuse serde::Deserialize;\n\n\n\n#[derive(Deserialize, Debug)]\n\npub struct FargateConfig {\n\n pub hcomb_cluster_name: String,\n\n pub hcomb_task_sg_id: String,\n\n pub public_subnets: Vec<String>,\n\n pub hcomb_task_def_arn: String,\n\n pub aws_region: String,\n\n}\n\n\n", "file_path": "code/src/models/env.rs", "rank": 44, "score": 10.507223908357838 }, { "content": " assert_eq!(\n\n format!(\"{:?}\", hcomb_table),\n\n format!(\"{:?}\", transfered_table)\n\n );\n\n }\n\n\n\n fn test_schema() -> Schema {\n\n Schema::new(vec![\n\n Field::new(\"id\", DataType::Int32, false),\n\n Field::new(\"name\", DataType::Utf8, false),\n\n Field::new(\"state\", DataType::Utf8, false),\n\n Field::new(\"salary\", DataType::Float64, false),\n\n Field::new(\n\n \"last_login\",\n\n DataType::Timestamp(TimeUnit::Millisecond, None),\n\n false,\n\n ),\n\n ])\n\n }\n\n}\n", "file_path": "code/src/serde/mod.rs", "rank": 45, "score": 10.435362361619164 }, { "content": "}\n\n\n\n/// Convert a vector of RecordBatches and a cmd to a stream of flights\n\n/// If there are no batches (empty vec), a flight with an empty schema is sent\n\npub async fn batch_vec_to_flight(\n\n cmd: &str,\n\n batches: Vec<RecordBatch>,\n\n) -> Result<impl Stream<Item = FlightData>, Box<dyn Error>> {\n\n let schema;\n\n if batches.len() == 0 {\n\n schema = Arc::new(Schema::empty());\n\n } else {\n\n schema = batches[0].schema();\n\n }\n\n // create an initial FlightData message that sends schema\n\n let options = IpcWriteOptions::default();\n\n let mut flight_schema = flight_data_from_arrow_schema(&schema, &options);\n\n flight_schema.flight_descriptor = cmd_to_descriptor(&cmd);\n\n\n\n let mut flight_vec = vec![flight_schema];\n", "file_path": "code/src/flight_utils.rs", "rank": 46, "score": 10.382412025311147 }, { "content": " pub fn new(\n\n files: Vec<CachedFile>,\n\n projection: Option<Vec<usize>>,\n\n batch_size: usize,\n\n schema: SchemaRef,\n\n ) -> Self {\n\n let projection = match projection {\n\n Some(p) => p,\n\n None => (0..schema.fields().len()).collect(),\n\n };\n\n let projected_schema = Schema::new(\n\n projection\n\n .iter()\n\n .map(|col| schema.field(*col).clone())\n\n .collect(),\n\n );\n\n Self {\n\n files,\n\n file_schema: schema,\n\n projected_schema: Arc::new(projected_schema),\n", "file_path": "code/src/execution_plan/parquet.rs", "rank": 47, "score": 10.358823060645578 }, { "content": " }\n\n }\n\n\n\n pub fn new_empty(desc: HCombTableDesc) -> Self {\n\n Self::new(desc, Box::pin(futures::stream::iter(vec![])))\n\n }\n\n\n\n pub fn query_id(&self) -> &str {\n\n &self.desc.query_id\n\n }\n\n}\n\n\n\nimpl TableProvider for HCombTable {\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n\n\n fn schema(&self) -> SchemaRef {\n\n self.desc.schema()\n\n }\n", "file_path": "code/src/datasource/hcomb.rs", "rank": 49, "score": 10.147333983074985 }, { "content": " .into_iter()\n\n .map(|file| {\n\n S3ParquetTable::new(\n\n \"north-pole-1\".to_owned(),\n\n \"santas-bucket\".to_owned(),\n\n vec![file],\n\n test_schema(),\n\n )\n\n })\n\n .collect::<Vec<_>>()\n\n }\n\n fn partition_columns(&self) -> &[String] {\n\n &self.partitions\n\n }\n\n fn schema(&self) -> SchemaRef {\n\n let mut fields = test_schema().fields().clone();\n\n for partition_col in &self.partitions {\n\n fields.push(Field::new(partition_col, DataType::Utf8, false))\n\n }\n\n Arc::new(Schema::new_with_metadata(\n", "file_path": "code/src/datasource/catalog/test_catalog.rs", "rank": 50, "score": 10.053311699638556 }, { "content": "}\n\n\n\nimpl S3ParquetTable {\n\n /// Initialize a new `ParquetTable` from a list of s3 files and an expected schema.\n\n pub fn new(\n\n region: String,\n\n bucket: String,\n\n files: Vec<SizedFile>,\n\n schema: SchemaRef,\n\n ) -> HBeeTableDesc {\n\n HBeeTableDesc::S3Parquet(Self {\n\n schema,\n\n region,\n\n bucket,\n\n files,\n\n })\n\n }\n\n\n\n pub fn region(&self) -> &str {\n\n &self.region\n", "file_path": "code/src/datasource/hbee/s3_parquet.rs", "rank": 51, "score": 9.960637958008165 }, { "content": "use std::any::Any;\n\nuse std::pin::Pin;\n\nuse std::sync::{Arc, Mutex};\n\n\n\nuse crate::execution_plan::StreamExec;\n\nuse arrow::datatypes::*;\n\nuse arrow::error::Result as ArrowResult;\n\nuse arrow::record_batch::RecordBatch;\n\nuse datafusion::datasource::datasource::Statistics;\n\nuse datafusion::datasource::TableProvider;\n\nuse datafusion::error::Result;\n\nuse datafusion::logical_plan::Expr;\n\nuse datafusion::physical_plan::ExecutionPlan;\n\nuse futures::Stream;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct HCombTableDesc {\n\n query_id: String,\n\n nb_hbee: usize,\n\n schema: SchemaRef,\n", "file_path": "code/src/datasource/hcomb.rs", "rank": 52, "score": 9.546418173573663 }, { "content": "use std::any::Any;\n\nuse std::fmt;\n\nuse std::pin::Pin;\n\nuse std::sync::{Arc, Mutex};\n\nuse std::task::{Context, Poll};\n\n\n\nuse arrow::datatypes::{Schema, SchemaRef};\n\nuse arrow::error::Result as ArrowResult;\n\nuse arrow::record_batch::RecordBatch;\n\nuse datafusion::error::{DataFusionError, Result};\n\nuse datafusion::physical_plan::ExecutionPlan;\n\nuse datafusion::physical_plan::Partitioning;\n\nuse datafusion::physical_plan::{RecordBatchStream, SendableRecordBatchStream};\n\n\n\nuse async_trait::async_trait;\n\nuse futures::stream::Stream;\n\nuse pin_project::pin_project;\n\n\n\npub struct StreamExec {\n\n stream: Mutex<\n", "file_path": "code/src/execution_plan/stream.rs", "rank": 53, "score": 9.376727856488344 }, { "content": " async fn test_larger_parquet() {\n\n let schema =\n\n Arc::new(Schema::new(vec![Field::new(\"a\", DataType::Float64, false)]));\n\n let rec_batch = RecordBatch::try_new(\n\n schema.clone(),\n\n vec![Arc::new(Float64Array::from(\n\n (0..200_000).map(|val| val as f64 * 0.1).collect::<Vec<_>>(),\n\n ))],\n\n )\n\n .unwrap();\n\n\n\n let filename = \"test_larger_parquet.parquet\";\n\n let results = write_and_exec(&rec_batch, filename).await;\n\n assert_eq!(\n\n results.iter().map(|rb| rb.num_rows()).sum::<usize>(),\n\n 200_000\n\n );\n\n }\n\n\n\n /// Write the given `rec_batch` as a parquet file then make it into an exec plan\n", "file_path": "code/src/execution_plan/parquet.rs", "rank": 54, "score": 9.306271639537 }, { "content": " pub fn new(\n\n schema: SchemaRef,\n\n region: String,\n\n bucket: String,\n\n partition_cols: Vec<String>,\n\n files: Vec<CatalogFile>,\n\n ) -> Self {\n\n Self {\n\n schema,\n\n region,\n\n bucket,\n\n files,\n\n partition_cols,\n\n }\n\n }\n\n\n\n fn to_table(&self) -> Result<Arc<dyn TableProvider + Send + Sync>> {\n\n let mut key_builder = StringBuilder::new(self.files.len());\n\n let mut length_builder = UInt64Builder::new(self.files.len());\n\n let mut partition_builders = self\n", "file_path": "code/src/datasource/catalog/static_catalog.rs", "rank": 55, "score": 9.28179539949359 }, { "content": " self.bucket.clone(),\n\n vec![file],\n\n Arc::clone(&self.schema),\n\n )\n\n })\n\n .collect()\n\n }\n\n fn partition_columns(&self) -> &[String] {\n\n &self.partition_cols\n\n }\n\n fn schema(&self) -> SchemaRef {\n\n let mut fields = self.schema.fields().clone();\n\n for partition_col in &self.partition_cols {\n\n fields.push(Field::new(partition_col, DataType::Utf8, false))\n\n }\n\n Arc::new(Schema::new_with_metadata(\n\n fields,\n\n self.schema.metadata().clone(),\n\n ))\n\n }\n\n async fn file_table(&self) -> Arc<dyn TableProvider + Send + Sync> {\n\n self.to_table().unwrap()\n\n }\n\n}\n", "file_path": "code/src/datasource/catalog/static_catalog.rs", "rank": 56, "score": 9.09540505764696 }, { "content": " sized_file: SizedFile {\n\n key: key.to_owned(),\n\n length,\n\n },\n\n partitions,\n\n }\n\n }\n\n}\n\n\n\n/// A catalog table that contains a static list of files.\n\n/// Only supports S3 parquet files for now and simply sends each file into a different hbee.\n\npub struct StaticCatalogTable {\n\n schema: SchemaRef,\n\n region: String,\n\n bucket: String,\n\n files: Vec<CatalogFile>,\n\n partition_cols: Vec<String>,\n\n}\n\n\n\nimpl StaticCatalogTable {\n", "file_path": "code/src/datasource/catalog/static_catalog.rs", "rank": 57, "score": 9.087308256733161 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::env;\n\n use std::fs;\n\n use std::io::SeekFrom;\n\n use std::path::PathBuf;\n\n\n\n use super::*;\n\n use crate::clients::Downloader;\n\n use crate::clients::RangeCache;\n\n use crate::error::Result as BuzzResult;\n\n use arrow::array::*;\n\n use arrow::datatypes::{DataType, Field, Schema};\n\n use arrow_parquet::arrow::ArrowWriter;\n\n use async_trait::async_trait;\n\n use tokio::fs::File as TokioFile;\n\n use tokio::io::{AsyncReadExt, AsyncSeekExt};\n\n\n", "file_path": "code/src/execution_plan/parquet.rs", "rank": 58, "score": 8.99143586622457 }, { "content": " Option<Pin<Box<dyn Stream<Item = ArrowResult<RecordBatch>> + Send + Sync>>>,\n\n >,\n\n schema: SchemaRef,\n\n\n\n projection: Vec<usize>,\n\n batch_size: usize,\n\n}\n\n\n\nimpl fmt::Debug for StreamExec {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"StreamExec\")\n\n .field(\"schema\", &self.schema)\n\n .field(\"projection\", &self.projection)\n\n .field(\"batch_size\", &self.batch_size)\n\n .finish()\n\n }\n\n}\n\n\n\nimpl StreamExec {\n\n pub fn new(\n", "file_path": "code/src/execution_plan/stream.rs", "rank": 59, "score": 8.701723501884226 }, { "content": " vec![Arc::new(Int32Array::from(vec![1, 2, 3]))],\n\n )?];\n\n let hcomb_table = HCombTable::new(\n\n hcomb_table_desc,\n\n Box::pin(futures::stream::iter(\n\n batches.clone().into_iter().map(|b| Ok(b)),\n\n )),\n\n );\n\n\n\n let exec_plan = hcomb_table.scan(&None, 1024, &[], None)?;\n\n\n\n let results = datafusion::physical_plan::collect(exec_plan).await?;\n\n assert_eq!(results.len(), 1);\n\n assert_eq!(format!(\"{:?}\", results), format!(\"{:?}\", batches));\n\n Ok(())\n\n }\n\n\n\n #[tokio::test]\n\n async fn test_empty() -> Result<()> {\n\n let schema = Arc::new(Schema::new(vec![Field::new(\"a\", DataType::Int32, false)]));\n", "file_path": "code/src/datasource/hcomb.rs", "rank": 60, "score": 8.69497312207183 }, { "content": "}\n\n\n\nimpl HCombTableDesc {\n\n pub fn new(query_id: String, nb_hbee: usize, schema: SchemaRef) -> Self {\n\n Self {\n\n query_id,\n\n nb_hbee,\n\n schema,\n\n }\n\n }\n\n\n\n pub fn query_id(&self) -> &str {\n\n &self.query_id\n\n }\n\n\n\n pub fn nb_hbee(&self) -> usize {\n\n self.nb_hbee\n\n }\n\n\n\n pub fn schema(&self) -> SchemaRef {\n", "file_path": "code/src/datasource/hcomb.rs", "rank": 61, "score": 8.490832126328918 }, { "content": "use std::collections::{BTreeMap, HashMap};\n\nuse std::io::{self, Read};\n\nuse std::sync::atomic::{AtomicUsize, Ordering};\n\nuse std::sync::{Arc, Mutex};\n\nuse std::time::Instant;\n\n\n\nuse crate::error::{BuzzError, Result};\n\nuse crate::{ensure, internal_err};\n\nuse async_trait::async_trait;\n\nuse tokio::sync::mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender};\n\n\n\n/// A reader that points to a cached chunk\n\n/// TODO this cannot read from multiple concatenated chunks\n\npub struct CachedRead {\n\n data: Arc<Vec<u8>>,\n\n position: u64,\n\n remaining: u64,\n\n}\n\n\n\nimpl Read for CachedRead {\n", "file_path": "code/src/clients/range_cache.rs", "rank": 62, "score": 8.369961953605495 }, { "content": "use std::error::Error;\n\nuse std::pin::Pin;\n\n\n\nuse crate::datasource::HCombTableDesc;\n\nuse crate::flight_utils;\n\nuse crate::models::{actions, HCombAddress};\n\nuse crate::serde;\n\nuse arrow::error::Result as ArrowResult;\n\nuse arrow::record_batch::RecordBatch;\n\nuse arrow_flight::flight_service_client::FlightServiceClient;\n\nuse arrow_flight::Ticket;\n\nuse futures::Stream;\n\nuse futures::StreamExt;\n\nuse prost::Message;\n\n\n\n/// Calls the hcomb do_get endpoint, expecting the first message to be the schema\n\npub async fn call_do_get(\n\n address: &HCombAddress,\n\n hcomb_table: &HCombTableDesc,\n\n sql: String,\n", "file_path": "code/src/clients/flight_client.rs", "rank": 63, "score": 8.33907486941704 }, { "content": "//! Datasources are implementations of DataFusion's TableProvider trait\n\n\n\nmod catalog;\n\nmod hbee;\n\nmod hcomb;\n\n\n\npub use catalog::delta_catalog::DeltaCatalogTable;\n\npub use catalog::static_catalog::{CatalogFile, StaticCatalogTable};\n\npub use catalog::test_catalog::MockSplittableTable;\n\npub use catalog::{CatalogTable, SplittableTable};\n\npub use hbee::{s3_parquet::S3ParquetTable, HBeeTable, HBeeTableDesc};\n\npub use hcomb::{HCombTable, HCombTableDesc};\n", "file_path": "code/src/datasource/mod.rs", "rank": 64, "score": 8.278113222192086 }, { "content": "//! modules that help connecting to the outside world\n\n\n\nmod cached_file;\n\npub mod fargate;\n\npub mod flight_client;\n\npub mod lambda;\n\nmod range_cache;\n\npub mod s3;\n\n\n\npub use cached_file::CachedFile;\n\npub use range_cache::{Downloader, RangeCache};\n", "file_path": "code/src/clients/mod.rs", "rank": 65, "score": 8.0369625551056 }, { "content": "mod fuse_service;\n\nmod hbee_scheduler;\n\nmod hcomb_manager;\n\nmod hcomb_scheduler;\n\nmod query_planner;\n\n\n\npub use fuse_service::FuseService;\n\npub use hbee_scheduler::{HBeeScheduler, LambdaHBeeScheduler, TestHBeeScheduler};\n\npub use hcomb_manager::{FargateHCombManager, HCombManager, TestHCombManager};\n\npub use hcomb_scheduler::{HCombScheduler, HttpHCombScheduler};\n\npub use query_planner::{HBeePlan, QueryPlanner};\n", "file_path": "code/src/services/fuse/mod.rs", "rank": 66, "score": 7.966310812831107 }, { "content": " let schema = catalog_schema(&delta_metadata.partition_columns);\n\n\n\n let record_batch = RecordBatch::try_new(Arc::clone(&schema), col_arrays)?;\n\n Ok(Arc::new(MemTable::try_new(\n\n schema,\n\n vec![vec![record_batch]],\n\n )?))\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl SplittableTable for DeltaCatalogTable {\n\n fn split(&self, files: Vec<SizedFile>) -> Vec<HBeeTableDesc> {\n\n files\n\n .into_iter()\n\n .map(|file| {\n\n S3ParquetTable::new(\n\n self.region.clone(),\n\n self.bucket.clone(),\n\n vec![file],\n", "file_path": "code/src/datasource/catalog/delta_catalog.rs", "rank": 67, "score": 7.956099721182783 }, { "content": "use crate::datasource::{CatalogTable, HBeeTableDesc, HCombTable, HCombTableDesc};\n\nuse crate::error::{BuzzError, Result};\n\nuse crate::models::query::{BuzzStep, BuzzStepType};\n\nuse crate::not_impl_err;\n\nuse crate::plan_utils;\n\nuse crate::services::utils;\n\nuse datafusion::execution::context::ExecutionContext;\n\nuse datafusion::logical_plan::LogicalPlan;\n\nuse futures::future::{BoxFuture, FutureExt};\n\nuse std::sync::Arc;\n\n\n\npub struct QueryPlanner {\n\n /// This execution context is not meant to run queries but only to plan them.\n\n execution_context: ExecutionContext,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct HBeePlan {\n\n pub sql: String,\n\n pub source: String,\n", "file_path": "code/src/services/fuse/query_planner.rs", "rank": 68, "score": 7.931600482276044 }, { "content": " }\n\n\n\n let schema = catalog_schema(&self.partition_cols);\n\n\n\n let record_batch = RecordBatch::try_new(Arc::clone(&schema), col_arrays)?;\n\n Ok(Arc::new(MemTable::try_new(\n\n schema,\n\n vec![vec![record_batch]],\n\n )?))\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl SplittableTable for StaticCatalogTable {\n\n fn split(&self, files: Vec<SizedFile>) -> Vec<HBeeTableDesc> {\n\n files\n\n .into_iter()\n\n .map(|file| {\n\n S3ParquetTable::new(\n\n self.region.clone(),\n", "file_path": "code/src/datasource/catalog/static_catalog.rs", "rank": 69, "score": 7.907443652613384 }, { "content": "use std::sync::Arc;\n\n\n\nuse super::{catalog_schema, SplittableTable};\n\nuse crate::datasource::{HBeeTableDesc, S3ParquetTable};\n\nuse crate::error::Result;\n\nuse crate::internal_err;\n\nuse crate::models::SizedFile;\n\nuse arrow::array::*;\n\nuse arrow::datatypes::*;\n\nuse arrow::record_batch::RecordBatch;\n\nuse async_trait::async_trait;\n\nuse datafusion::datasource::{MemTable, TableProvider};\n\nuse deltalake::storage::{\n\n file::FileStorageBackend, s3::S3StorageBackend, StorageBackend,\n\n};\n\nuse deltalake::{DeltaTable, Schema as DeltaSchema};\n\nuse rusoto_core::Region;\n\nuse rusoto_s3::S3Client;\n\nuse std::convert::{TryFrom, TryInto};\n\nuse std::str::FromStr;\n", "file_path": "code/src/datasource/catalog/delta_catalog.rs", "rank": 70, "score": 7.891781835325056 }, { "content": " projection,\n\n batch_size,\n\n }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl ExecutionPlan for StreamExec {\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n\n\n fn schema(&self) -> SchemaRef {\n\n self.schema.clone()\n\n }\n\n\n\n fn children(&self) -> Vec<Arc<dyn ExecutionPlan>> {\n\n // this is a leaf node and has no children\n\n vec![]\n\n }\n", "file_path": "code/src/execution_plan/stream.rs", "rank": 71, "score": 7.889583155462567 }, { "content": "}\n\n\n\n#[derive(PartialEq, Deserialize)]\n\npub enum BuzzCatalogType {\n\n DeltaLake,\n\n Static,\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct BuzzCatalog {\n\n pub name: String,\n\n pub uri: String,\n\n pub r#type: BuzzCatalogType,\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct BuzzQuery {\n\n pub steps: Vec<BuzzStep>,\n\n pub capacity: HCombCapacity,\n\n pub catalogs: Vec<BuzzCatalog>,\n\n}\n", "file_path": "code/src/models/query.rs", "rank": 72, "score": 7.879061909658474 }, { "content": "mod collector;\n\nmod hbee_service;\n\n\n\npub use collector::{Collector, HttpCollector, NoopCollector};\n\npub use hbee_service::HBeeService;\n", "file_path": "code/src/services/hbee/mod.rs", "rank": 73, "score": 7.83659470335551 }, { "content": "//! Execution plans are implementations of DataFusion's ExecutionPlan trait\n\n\n\nmod parquet;\n\nmod stream;\n\n\n\npub use parquet::ParquetExec;\n\npub use stream::StreamExec;\n", "file_path": "code/src/execution_plan/mod.rs", "rank": 74, "score": 7.740314307213557 }, { "content": "mod flight_service;\n\nmod hcomb_service;\n\nmod results_service;\n\n\n\npub use flight_service::FlightServiceImpl;\n\npub use hcomb_service::HCombService;\n", "file_path": "code/src/services/hcomb/mod.rs", "rank": 75, "score": 7.693055868148796 }, { "content": "pub mod s3_parquet;\n\n\n\nuse std::any::Any;\n\nuse std::sync::Arc;\n\n\n\nuse crate::clients::RangeCache;\n\nuse arrow::datatypes::*;\n\nuse datafusion::datasource::datasource::Statistics;\n\nuse datafusion::datasource::TableProvider;\n\nuse datafusion::error::Result;\n\nuse datafusion::logical_plan::Expr;\n\nuse datafusion::physical_plan::ExecutionPlan;\n\nuse s3_parquet::S3ParquetTable;\n\n\n\n/// Implemented as an enum because serialization must be mapped for new implems\n\n#[derive(Debug)]\n\npub enum HBeeTableDesc {\n\n S3Parquet(S3ParquetTable),\n\n}\n\n\n", "file_path": "code/src/datasource/hbee/mod.rs", "rank": 76, "score": 7.563445570012392 }, { "content": "use std::convert::Into;\n\nuse std::str::FromStr;\n\nuse std::sync::Arc;\n\nuse std::time::Duration;\n\n\n\nuse crate::error::{BuzzError, Result};\n\nuse crate::models::env;\n\nuse rusoto_core::Region;\n\nuse rusoto_lambda::{InvocationRequest, Lambda, LambdaClient};\n\nuse tokio::time::timeout;\n\n\n\npub struct LambdaInvokeClient {\n\n client: Arc<LambdaClient>,\n\n lambda_name: String,\n\n}\n\n\n\nimpl LambdaInvokeClient {\n\n pub fn try_new() -> Result<Self> {\n\n let config = env::get_lambda_config()?;\n\n Ok(Self {\n", "file_path": "code/src/clients/lambda.rs", "rank": 77, "score": 7.502236552867642 }, { "content": "/// Convert a stream of RecordBatches and a cmd to a stream of flights\n\npub async fn batch_stream_to_flight(\n\n cmd: &str,\n\n batches: SendableRecordBatchStream,\n\n) -> Result<impl Stream<Item = Result<FlightData, Status>> + Send + Sync, Box<dyn Error>>\n\n{\n\n let (sender, result) =\n\n tokio::sync::mpsc::unbounded_channel::<Result<FlightData, Status>>();\n\n\n\n let options = Arc::new(IpcWriteOptions::default());\n\n let mut flight_schema = flight_data_from_arrow_schema(&batches.schema(), &options);\n\n flight_schema.flight_descriptor = cmd_to_descriptor(&cmd);\n\n sender.send(Ok(flight_schema))?;\n\n\n\n // use channels to make stream sync (required by tonic)\n\n // TODO what happens with errors (currently all unwrapped in spawned task)\n\n tokio::spawn(async move {\n\n // then stream the rest\n\n batches\n\n .for_each(|batch_res| async {\n", "file_path": "code/src/flight_utils.rs", "rank": 78, "score": 7.390078197989758 }, { "content": "impl HBeeTableDesc {\n\n pub fn schema(&self) -> SchemaRef {\n\n match self {\n\n HBeeTableDesc::S3Parquet(table) => table.schema(),\n\n }\n\n }\n\n}\n\n\n\n/// A table that can be distributed to hbees\n\npub struct HBeeTable {\n\n desc: Arc<HBeeTableDesc>,\n\n cache: Arc<RangeCache>,\n\n}\n\n\n\nimpl HBeeTable {\n\n pub fn new(desc: Arc<HBeeTableDesc>, cache: Arc<RangeCache>) -> Self {\n\n Self { desc, cache }\n\n }\n\n\n\n pub fn description(&self) -> Arc<HBeeTableDesc> {\n", "file_path": "code/src/datasource/hbee/mod.rs", "rank": 79, "score": 7.3889398961929045 }, { "content": "use fmt::Debug;\n\nuse std::any::Any;\n\nuse std::fmt;\n\nuse std::sync::Arc;\n\nuse std::task::{Context, Poll};\n\nuse tokio::{\n\n sync::mpsc::{channel, Receiver, Sender},\n\n task,\n\n};\n\nuse tokio_stream::wrappers::ReceiverStream;\n\n\n\nuse crate::clients::CachedFile;\n\nuse arrow::datatypes::{Schema, SchemaRef};\n\nuse arrow::error::{ArrowError, Result as ArrowResult};\n\nuse arrow::record_batch::RecordBatch;\n\nuse arrow_parquet::arrow::{ArrowReader, ParquetFileArrowReader};\n\nuse arrow_parquet::file::reader::{FileReader, Length, SerializedFileReader};\n\nuse async_trait::async_trait;\n\nuse datafusion::error::{DataFusionError, Result as DataFusionResult};\n\nuse datafusion::physical_plan::ExecutionPlan;\n", "file_path": "code/src/execution_plan/parquet.rs", "rank": 80, "score": 7.303145010706386 }, { "content": "use std::error;\n\nuse std::fmt::{Display, Formatter};\n\nuse std::io;\n\nuse std::result;\n\n\n\nuse arrow::error::ArrowError;\n\nuse arrow_parquet::errors::ParquetError;\n\nuse datafusion::error::DataFusionError;\n\nuse deltalake::DeltaTableError;\n\n\n\n/// Result type for operations that could result in an [BuzzError]\n\npub type Result<T> = result::Result<T, BuzzError>;\n\n\n\n/// Buzz error\n\n#[derive(Debug)]\n\npub enum BuzzError {\n\n /// Error returned by arrow.\n\n ArrowError(ArrowError),\n\n /// Error returned by DeltaLake.\n\n DeltaTableError(DeltaTableError),\n", "file_path": "code/src/error.rs", "rank": 81, "score": 7.29805696709737 }, { "content": "//! Utils to convert flight objects to and from record batches\n\n\n\nuse std::convert::TryFrom;\n\nuse std::error::Error;\n\nuse std::sync::Arc;\n\n\n\nuse crate::internal_err;\n\nuse arrow::datatypes::Schema;\n\nuse arrow::error::{ArrowError, Result as ArrowResult};\n\nuse arrow::ipc::writer::IpcWriteOptions;\n\nuse arrow::record_batch::RecordBatch;\n\nuse arrow_flight::utils::{\n\n flight_data_from_arrow_batch, flight_data_from_arrow_schema,\n\n flight_data_to_arrow_batch,\n\n};\n\nuse arrow_flight::{flight_descriptor, FlightData, FlightDescriptor};\n\nuse datafusion::physical_plan::SendableRecordBatchStream;\n\nuse futures::{Stream, StreamExt};\n\nuse tokio_stream::wrappers::UnboundedReceiverStream;\n\nuse tonic::Status;\n", "file_path": "code/src/flight_utils.rs", "rank": 82, "score": 7.29785620930427 }, { "content": "use crate::datasource::{HBeeTableDesc, HCombTableDesc};\n\nuse crate::protobuf;\n\nuse arrow::datatypes::Schema;\n\nuse arrow::ipc::{writer, writer::EncodedData, writer::IpcWriteOptions};\n\n\n", "file_path": "code/src/serde/to_proto.rs", "rank": 83, "score": 7.277294533433888 }, { "content": "\n\n let mut batches: Vec<FlightData> = batches\n\n .iter()\n\n .flat_map(|batch| {\n\n let (dicts, data) = flight_data_from_arrow_batch(batch, &options);\n\n dicts.into_iter().chain(std::iter::once(data))\n\n })\n\n .collect();\n\n\n\n // append batch vector to schema vector, so that the first message sent is the schema\n\n flight_vec.append(&mut batches);\n\n\n\n Ok(futures::stream::iter(flight_vec))\n\n}\n\n\n", "file_path": "code/src/flight_utils.rs", "rank": 84, "score": 7.178434697082957 }, { "content": "//! Models are entities that are common to services\n\n\n\npub mod actions;\n\npub mod env;\n\nmod hbee_event;\n\npub mod query;\n\n\n\npub use hbee_event::{HBeeEvent, HBeePlanBytes};\n\n\n\npub type HCombAddress = String;\n\n\n\n#[derive(Clone, Debug)]\n\npub struct SizedFile {\n\n pub key: String,\n\n pub length: u64,\n\n}\n", "file_path": "code/src/models/mod.rs", "rank": 85, "score": 7.093407912011705 }, { "content": " stream: Pin<Box<dyn Stream<Item = ArrowResult<RecordBatch>> + Send + Sync>>,\n\n schema: SchemaRef,\n\n projection: Option<Vec<usize>>,\n\n batch_size: usize,\n\n ) -> Self {\n\n let projection = match projection {\n\n Some(p) => p,\n\n None => (0..schema.fields().len()).collect(),\n\n };\n\n\n\n let projected_schema = Schema::new(\n\n projection\n\n .iter()\n\n .map(|i| schema.field(*i).clone())\n\n .collect(),\n\n );\n\n\n\n Self {\n\n stream: Mutex::new(Some(stream)),\n\n schema: Arc::new(projected_schema),\n", "file_path": "code/src/execution_plan/stream.rs", "rank": 86, "score": 6.886048713864216 }, { "content": "use serde::Deserialize;\n\n\n\n#[derive(PartialEq, Deserialize)]\n\npub enum BuzzStepType {\n\n HBee,\n\n HComb,\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct BuzzStep {\n\n pub sql: String,\n\n pub name: String,\n\n pub partition_filter: Option<String>,\n\n pub step_type: BuzzStepType,\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct HCombCapacity {\n\n /// For now only 1 zone is supported (I know, I know... YAGNI! :)\n\n pub zones: i16,\n", "file_path": "code/src/models/query.rs", "rank": 87, "score": 6.8722993633235046 }, { "content": " Arc::clone(&self.desc)\n\n }\n\n}\n\n\n\nimpl TableProvider for HBeeTable {\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n\n\n fn schema(&self) -> SchemaRef {\n\n self.desc.schema()\n\n }\n\n\n\n fn scan(\n\n &self,\n\n projection: &Option<Vec<usize>>,\n\n batch_size: usize,\n\n filters: &[Expr],\n\n limit: Option<usize>,\n\n ) -> Result<Arc<dyn ExecutionPlan>> {\n", "file_path": "code/src/datasource/hbee/mod.rs", "rank": 88, "score": 6.8019950571751195 }, { "content": " use super::*;\n\n use crate::datasource::CatalogTable;\n\n use arrow::datatypes::Schema;\n\n use datafusion::datasource::empty::EmptyTable;\n\n use datafusion::execution::context::ExecutionContext;\n\n use datafusion::logical_plan::{sum, Expr};\n\n use datafusion::scalar::ScalarValue;\n\n\n\n #[test]\n\n fn search_table_df_plan() -> Result<()> {\n\n let mut ctx = ExecutionContext::new();\n\n let empty_table = Arc::new(EmptyTable::new(Arc::new(Schema::empty())));\n\n let scalar_expr = Expr::Literal(ScalarValue::from(10));\n\n\n\n let source_df = ctx.read_table(empty_table.clone())?;\n\n let log_plan = &source_df.to_logical_plan();\n\n find_table_name::<EmptyTable>(log_plan)?;\n\n\n\n let filtered_df =\n\n source_df.filter(scalar_expr.clone().eq(scalar_expr.clone()))?;\n", "file_path": "code/src/services/utils.rs", "rank": 89, "score": 6.801381633250285 }, { "content": " pub table: HBeeTableDesc,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct HCombPlan {\n\n pub sql: String,\n\n pub source: String,\n\n pub table: HCombTableDesc,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ZonePlan {\n\n pub hbee: Vec<HBeePlan>,\n\n pub hcomb: HCombPlan,\n\n}\n\n\n\n/// The plans to be distributed among hbees and hcombs\n\n/// To transfer them over the wire, these logical plans should be serializable\n\n#[derive(Debug)]\n\npub struct DistributedPlan {\n", "file_path": "code/src/services/fuse/query_planner.rs", "rank": 90, "score": 6.797914811595747 }, { "content": "use std::io::Cursor;\n\n\n\nuse crate::datasource::HBeeTableDesc;\n\nuse crate::error::Result;\n\nuse crate::internal_err;\n\nuse crate::models::HCombAddress;\n\nuse crate::protobuf;\n\nuse crate::serde as proto_serde;\n\nuse base64;\n\nuse prost::Message;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Serialize, Deserialize)]\n\n/// Binary Base64 encoded representation of a logical plan\n\n/// TODO serialize this as json instead of base64 proto\n\npub struct HBeePlanBytes {\n\n #[serde(rename = \"b\")]\n\n bytes: String,\n\n}\n\n\n", "file_path": "code/src/models/hbee_event.rs", "rank": 91, "score": 6.710694366753317 }, { "content": " let mut arrays = vec![\n\n Arc::new(StringArray::from(refvec(&keys))) as ArrayRef,\n\n Arc::new(UInt64Array::from(lengths)) as ArrayRef,\n\n ];\n\n for i in 1..=parts.len() {\n\n arrays.push(Arc::new(StringArray::from(refvec(&parts[i - 1]))) as ArrayRef);\n\n }\n\n\n\n let batches = RecordBatch::try_new(Arc::clone(&file_table_schema), arrays)\n\n .expect(\"Invalid test data\");\n\n\n\n Arc::new(\n\n MemTable::try_new(file_table_schema, vec![vec![batches]])\n\n .expect(\"invalid test table\"),\n\n )\n\n }\n\n}\n\n\n", "file_path": "code/src/datasource/catalog/test_catalog.rs", "rank": 92, "score": 6.707856591047081 }, { "content": " let end_length = 1024 * 1024;\n\n let (end_start, end_length) = match file.len().checked_sub(end_length) {\n\n Some(val) => (val, end_length),\n\n None => (0, file.len()),\n\n };\n\n file.prefetch(end_start, end_length as usize);\n\n end_start\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl ExecutionPlan for ParquetExec {\n\n /// Return a reference to Any that can be used for downcasting\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n\n\n fn schema(&self) -> SchemaRef {\n\n self.projected_schema.clone()\n\n }\n", "file_path": "code/src/execution_plan/parquet.rs", "rank": 93, "score": 6.593292797862189 }, { "content": " /// One hcomb/hbee combination of plan for each zone.\n\n pub zones: Vec<ZonePlan>,\n\n pub nb_hbee: usize,\n\n}\n\n\n\nimpl QueryPlanner {\n\n pub fn new() -> Self {\n\n Self {\n\n execution_context: ExecutionContext::new(),\n\n }\n\n }\n\n\n\n pub fn add_catalog(&mut self, name: &str, table: CatalogTable) -> Result<()> {\n\n self.execution_context\n\n .register_table(name, Arc::new(table))?;\n\n Ok(())\n\n }\n\n\n\n pub async fn plan(\n\n &mut self,\n", "file_path": "code/src/services/fuse/query_planner.rs", "rank": 94, "score": 6.555344908512266 }, { "content": "use std::io::Cursor;\n\nuse std::pin::Pin;\n\nuse std::sync::Arc;\n\n\n\nuse super::hcomb_service::HCombService;\n\nuse crate::error::BuzzError;\n\nuse crate::flight_utils;\n\nuse crate::models::actions;\n\nuse crate::protobuf;\n\nuse crate::serde;\n\nuse arrow_flight::flight_service_server::FlightServiceServer;\n\nuse arrow_flight::{\n\n flight_service_server::FlightService, Action, ActionType, Criteria, Empty,\n\n FlightData, FlightDescriptor, FlightInfo, HandshakeRequest, HandshakeResponse,\n\n PutResult, SchemaResult, Ticket,\n\n};\n\nuse futures::Stream;\n\nuse prost::Message;\n\nuse tonic::transport::Server;\n\nuse tonic::{Request, Response, Status, Streaming};\n", "file_path": "code/src/services/hcomb/flight_service.rs", "rank": 95, "score": 6.504769234162138 }, { "content": "use std::iter::IntoIterator;\n\nuse std::str::FromStr;\n\nuse std::sync::Arc;\n\nuse std::time::{Duration, Instant};\n\n\n\nuse crate::error::{BuzzError, Result};\n\nuse crate::models::env::{self, FargateConfig};\n\nuse rusoto_core::Region;\n\nuse rusoto_ecs::{\n\n AwsVpcConfiguration, DescribeTasksRequest, Ecs, EcsClient, ListTasksRequest,\n\n ListTasksResponse, NetworkConfiguration, RunTaskRequest,\n\n};\n\nuse tokio::time::timeout;\n\n\n\npub struct FargateCreationClient {\n\n client: Arc<EcsClient>,\n\n config: Arc<FargateConfig>,\n\n}\n\n\n\nimpl FargateCreationClient {\n", "file_path": "code/src/clients/fargate.rs", "rank": 96, "score": 6.442482512196262 }, { "content": "use std::sync::Arc;\n\nuse std::time::Instant;\n\n\n\nuse super::Collector;\n\nuse crate::clients::RangeCache;\n\nuse crate::datasource::{HBeeTableDesc, HBeeTable};\n\nuse crate::error::Result;\n\nuse crate::internal_err;\n\nuse crate::models::HCombAddress;\n\nuse arrow::record_batch::RecordBatch;\n\nuse datafusion::execution::context::{ExecutionConfig, ExecutionContext};\n\nuse datafusion::physical_plan::{merge::MergeExec, ExecutionPlan};\n\n\n\npub struct HBeeService {\n\n execution_config: ExecutionConfig,\n\n range_cache: Arc<RangeCache>,\n\n collector: Box<dyn Collector>,\n\n}\n\n\n\nimpl HBeeService {\n", "file_path": "code/src/services/hbee/hbee_service.rs", "rank": 97, "score": 6.418673257791744 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\npub enum ActionType {\n\n Fail,\n\n HealthCheck,\n\n Unknown,\n\n}\n\n\n\nimpl ActionType {\n\n pub fn from_string(serialized: String) -> Self {\n\n match serialized.as_str() {\n\n \"F\" => ActionType::Fail,\n\n \"H\" => ActionType::HealthCheck,\n\n _ => ActionType::Unknown,\n\n }\n\n }\n\n\n\n pub fn to_string(&self) -> String {\n\n match self {\n\n ActionType::Fail => \"F\".to_owned(),\n", "file_path": "code/src/models/actions.rs", "rank": 98, "score": 6.414024857543628 }, { "content": "\n\n let expected_batches = vec![RecordBatch::try_new(\n\n catalog_schema(&[]),\n\n vec![\n\n Arc::new(StringArray::from(vec![\n\n \"./examples/delta-tbl-overwrite/part-00000-f4a247c9-a3bb-4b1e-adc7-7269808b8d73-c000.snappy.parquet\",\n\n ])),\n\n Arc::new(UInt64Array::from(vec![1006])),\n\n ],\n\n )\n\n .expect(\"Build target RecordBatch\")];\n\n\n\n assert_eq!(\n\n format!(\"{:?}\", catalog_rgs),\n\n format!(\"{:?}\", expected_batches)\n\n );\n\n }\n\n\n\n #[tokio::test]\n\n async fn test_partitioned_delta_catalog() {\n", "file_path": "code/src/datasource/catalog/delta_catalog.rs", "rank": 99, "score": 6.312594783122082 } ]
Rust
src/main.rs
jonathanmorley/rpg-cli
93a816e875287a884b5045c430dffbb4e2a4ce27
use game::Game; mod character; mod game; mod item; mod location; mod log; mod randomizer; use crate::location::Location; use clap::Clap; #[derive(Clap)] struct Opts { destination: Option<String>, #[clap(long)] pwd: bool, #[clap(long)] reset: bool, #[clap(long)] run: bool, #[clap(long)] bribe: bool, #[clap(short, long)] shop: bool, #[clap(short, long)] inventory: bool, } fn main() { let opts: Opts = Opts::parse(); let mut game = Game::load().unwrap_or_else(|_| Game::new()); if opts.pwd { println!("{}", game.location.path_string()); } else if opts.reset { game.reset() } else if opts.shop { shop(&mut game, &opts.destination); } else if opts.inventory { inventory(&mut game, &opts.destination); } else if let Some(dest) = opts.destination { go_to(&mut game, &dest, opts.run, opts.bribe); } else { log::status(&game); } game.save().unwrap() } fn go_to(game: &mut Game, dest: &str, run: bool, bribe: bool) { if let Ok(dest) = Location::from(&dest) { if let Err(game::Error::GameOver) = game.go_to(&dest, run, bribe) { game.reset(); } } else { println!("No such file or directory"); std::process::exit(1); } } fn shop(game: &mut Game, item_name: &Option<String>) { if game.location.is_home() { if let Some(item_name) = item_name { let item_name = sanitize(item_name); match item::shop::buy(game, &item_name) { Err(item::shop::Error::NotEnoughGold) => { println!("Not enough gold.") } Err(item::shop::Error::ItemNotAvailable) => { println!("Item not available.") } Ok(()) => {} } } else { item::shop::list(game); } } else { println!("Shop is only allowed at home.") } } fn inventory(game: &mut Game, item_name: &Option<String>) { if let Some(item_name) = item_name { let item_name = sanitize(item_name); if let Err(game::Error::ItemNotFound) = game.use_item(&item_name) { println!("Item not found."); } } else { println!("{}", log::format_inventory(&game)); } } fn sanitize(name: &str) -> String { let name = name.to_lowercase(); let name = match name.as_str() { "p" | "potion" => "potion", "e" | "escape" => "escape", "sw" | "sword" => "sword", "sh" | "shield" => "shield", n => n, }; name.to_string() }
use game::Game; mod character; mod game; mod item; mod location; mod log; mod randomizer; use crate::location::Location; use clap::Clap; #[derive(Clap)] struct Opts { destination: Option<String>, #[clap(long)] pwd: bool, #[clap(long)] reset: bool, #[clap(long)] run: bool, #[clap(long)] bribe: bool, #[clap(short, long)] shop: bool, #[clap(short, long)] inventory: bool, } fn main() { let opts: Opts = Opts::parse(); let mut game = Game::load().unwrap_or_else(|_| Game::new()); if opts.pwd { println!("{}", game.location.path_string()); } else if opts.reset { game.reset() } else if opts.shop { shop(&mut game, &opts.destination); } else if opts.inventory { inventory(&mut game, &opts.destination); } else if let Some(dest) = opts.destination { go_to(&mut game, &dest, opts.run, opts.bribe); } else { log::status(&game); } game.save().unwrap() } fn go_to(game: &mut Game, dest: &str, run: bool, bribe: bool) { if let Ok(dest) = Location::from(&dest) { if let Err(game::Error::GameOve
n => n, }; name.to_string() }
r) = game.go_to(&dest, run, bribe) { game.reset(); } } else { println!("No such file or directory"); std::process::exit(1); } } fn shop(game: &mut Game, item_name: &Option<String>) { if game.location.is_home() { if let Some(item_name) = item_name { let item_name = sanitize(item_name); match item::shop::buy(game, &item_name) { Err(item::shop::Error::NotEnoughGold) => { println!("Not enough gold.") } Err(item::shop::Error::ItemNotAvailable) => { println!("Item not available.") } Ok(()) => {} } } else { item::shop::list(game); } } else { println!("Shop is only allowed at home.") } } fn inventory(game: &mut Game, item_name: &Option<String>) { if let Some(item_name) = item_name { let item_name = sanitize(item_name); if let Err(game::Error::ItemNotFound) = game.use_item(&item_name) { println!("Item not found."); } } else { println!("{}", log::format_inventory(&game)); } } fn sanitize(name: &str) -> String { let name = name.to_lowercase(); let name = match name.as_str() { "p" | "potion" => "potion", "e" | "escape" => "escape", "sw" | "sword" => "sword", "sh" | "shield" => "shield",
random
[]
Rust
crates/apollo-smith/src/input_value.rs
isabella232/apollo-rs
963b3552deaf7ba3a7eb5a698a90f20d0fc08242
use crate::{description::Description, directive::Directive, name::Name, ty::Ty, DocumentBuilder}; use arbitrary::Result; #[derive(Debug, Clone, PartialEq)] pub enum InputValue { Variable(Name), Int(i64), Float(f64), String(String), Boolean(bool), Null, Enum(Name), List(Vec<InputValue>), Object(Vec<(Name, InputValue)>), } impl From<InputValue> for apollo_encoder::Value { fn from(input_value: InputValue) -> Self { match input_value { InputValue::Variable(v) => Self::Variable(v.into()), InputValue::Int(i) => Self::Int(i), InputValue::Float(f) => Self::Float(f), InputValue::String(s) => Self::String(s), InputValue::Boolean(b) => Self::Boolean(b), InputValue::Null => Self::Null, InputValue::Enum(enm) => Self::Enum(enm.into()), InputValue::List(l) => Self::List(l.into_iter().map(Into::into).collect()), InputValue::Object(o) => { Self::Object(o.into_iter().map(|(n, i)| (n.into(), i.into())).collect()) } } } } impl From<InputValue> for String { fn from(input_val: InputValue) -> Self { match input_val { InputValue::Variable(v) => format!("${}", String::from(v)), InputValue::Int(i) => format!("{i}"), InputValue::Float(f) => format!("{f}"), InputValue::String(s) => s, InputValue::Boolean(b) => format!("{b}"), InputValue::Null => String::from("null"), InputValue::Enum(val) => val.into(), InputValue::List(list) => format!( "[{}]", list.into_iter() .map(String::from) .collect::<Vec<String>>() .join(", ") ), InputValue::Object(obj) => format!( "{{ {} }}", obj.into_iter() .map(|(k, v)| format!("{}: {}", String::from(k), String::from(v))) .collect::<Vec<String>>() .join(", ") ), } } } #[derive(Debug, Clone, PartialEq)] pub struct InputValueDef { pub(crate) description: Option<Description>, pub(crate) name: Name, pub(crate) ty: Ty, pub(crate) default_value: Option<InputValue>, pub(crate) directives: Vec<Directive>, } impl From<InputValueDef> for apollo_encoder::InputValueDefinition { fn from(input_val: InputValueDef) -> Self { let mut new_input_val = Self::new(input_val.name.into(), input_val.ty.into()); new_input_val.description(input_val.description.map(String::from)); new_input_val.default(input_val.default_value.map(String::from)); input_val .directives .into_iter() .for_each(|directive| new_input_val.directive(directive.into())); new_input_val } } impl From<InputValueDef> for apollo_encoder::InputField { fn from(input_val: InputValueDef) -> Self { let mut new_input_val = Self::new(input_val.name.into(), input_val.ty.into()); new_input_val.description(input_val.description.map(String::from)); new_input_val.default(input_val.default_value.map(String::from)); input_val .directives .into_iter() .for_each(|directive| new_input_val.directive(directive.into())); new_input_val } } impl<'a> DocumentBuilder<'a> { pub fn input_value(&mut self) -> Result<InputValue> { let val = match self.u.int_in_range(0..=8usize)? { 0 => InputValue::Int(self.u.arbitrary()?), 1 => InputValue::Float(self.u.arbitrary()?), 2 => InputValue::String(self.limited_string(40)?), 3 => InputValue::Boolean(self.u.arbitrary()?), 4 => InputValue::Null, 5 => { if !self.enum_type_defs.is_empty() { let enum_choosed = self.choose_enum()?.clone(); InputValue::Enum(self.arbitrary_variant(&enum_choosed)?.clone()) } else { self.input_value()? } } 6 => { InputValue::List( (0..self.u.int_in_range(2..=4usize)?) .map(|_| self.input_value()) .collect::<Result<Vec<_>>>()?, ) } 7 => InputValue::Object( (0..self.u.int_in_range(2..=4usize)?) .map(|_| Ok((self.name()?, self.input_value()?))) .collect::<Result<Vec<_>>>()?, ), 8 => InputValue::Variable(self.name()?), _ => unreachable!(), }; Ok(val) } pub fn input_values_def(&mut self) -> Result<Vec<InputValueDef>> { let arbitrary_iv_num = self.u.int_in_range(2..=5usize)?; let mut input_values = Vec::with_capacity(arbitrary_iv_num - 1); for i in 0..arbitrary_iv_num { let description = self .u .arbitrary() .unwrap_or(false) .then(|| self.description()) .transpose()?; let name = self.name_with_index(i)?; let ty = self.choose_ty(&self.list_existing_types())?; let directives = self.directives()?; let default_value = self .u .arbitrary() .unwrap_or(false) .then(|| self.input_value()) .transpose()?; input_values.push(InputValueDef { description, name, ty, default_value, directives, }); } Ok(input_values) } pub fn input_value_def(&mut self) -> Result<InputValueDef> { let description = self .u .arbitrary() .unwrap_or(false) .then(|| self.description()) .transpose()?; let name = self.name()?; let ty = self.choose_ty(&self.list_existing_types())?; let directives = self.directives()?; let default_value = self .u .arbitrary() .unwrap_or(false) .then(|| self.input_value()) .transpose()?; Ok(InputValueDef { description, name, ty, default_value, directives, }) } }
use crate::{description::Description, directive::Directive, name::Name, ty::Ty, DocumentBuilder}; use arbitrary::Result; #[derive(Debug, Clone, PartialEq)] pub enum InputValue { Variable(Name), Int(i64), Float(f64), String(String), Boolean(bool), Null, Enum(Name), List(Vec<InputValue>), Object(Vec<(Name, InputValue)>), } impl From<InputValue> for apollo_encoder::Value {
} impl From<InputValue> for String { fn from(input_val: InputValue) -> Self { match input_val { InputValue::Variable(v) => format!("${}", String::from(v)), InputValue::Int(i) => format!("{i}"), InputValue::Float(f) => format!("{f}"), InputValue::String(s) => s, InputValue::Boolean(b) => format!("{b}"), InputValue::Null => String::from("null"), InputValue::Enum(val) => val.into(), InputValue::List(list) => format!( "[{}]", list.into_iter() .map(String::from) .collect::<Vec<String>>() .join(", ") ), InputValue::Object(obj) => format!( "{{ {} }}", obj.into_iter() .map(|(k, v)| format!("{}: {}", String::from(k), String::from(v))) .collect::<Vec<String>>() .join(", ") ), } } } #[derive(Debug, Clone, PartialEq)] pub struct InputValueDef { pub(crate) description: Option<Description>, pub(crate) name: Name, pub(crate) ty: Ty, pub(crate) default_value: Option<InputValue>, pub(crate) directives: Vec<Directive>, } impl From<InputValueDef> for apollo_encoder::InputValueDefinition { fn from(input_val: InputValueDef) -> Self { let mut new_input_val = Self::new(input_val.name.into(), input_val.ty.into()); new_input_val.description(input_val.description.map(String::from)); new_input_val.default(input_val.default_value.map(String::from)); input_val .directives .into_iter() .for_each(|directive| new_input_val.directive(directive.into())); new_input_val } } impl From<InputValueDef> for apollo_encoder::InputField { fn from(input_val: InputValueDef) -> Self { let mut new_input_val = Self::new(input_val.name.into(), input_val.ty.into()); new_input_val.description(input_val.description.map(String::from)); new_input_val.default(input_val.default_value.map(String::from)); input_val .directives .into_iter() .for_each(|directive| new_input_val.directive(directive.into())); new_input_val } } impl<'a> DocumentBuilder<'a> { pub fn input_value(&mut self) -> Result<InputValue> { let val = match self.u.int_in_range(0..=8usize)? { 0 => InputValue::Int(self.u.arbitrary()?), 1 => InputValue::Float(self.u.arbitrary()?), 2 => InputValue::String(self.limited_string(40)?), 3 => InputValue::Boolean(self.u.arbitrary()?), 4 => InputValue::Null, 5 => { if !self.enum_type_defs.is_empty() { let enum_choosed = self.choose_enum()?.clone(); InputValue::Enum(self.arbitrary_variant(&enum_choosed)?.clone()) } else { self.input_value()? } } 6 => { InputValue::List( (0..self.u.int_in_range(2..=4usize)?) .map(|_| self.input_value()) .collect::<Result<Vec<_>>>()?, ) } 7 => InputValue::Object( (0..self.u.int_in_range(2..=4usize)?) .map(|_| Ok((self.name()?, self.input_value()?))) .collect::<Result<Vec<_>>>()?, ), 8 => InputValue::Variable(self.name()?), _ => unreachable!(), }; Ok(val) } pub fn input_values_def(&mut self) -> Result<Vec<InputValueDef>> { let arbitrary_iv_num = self.u.int_in_range(2..=5usize)?; let mut input_values = Vec::with_capacity(arbitrary_iv_num - 1); for i in 0..arbitrary_iv_num { let description = self .u .arbitrary() .unwrap_or(false) .then(|| self.description()) .transpose()?; let name = self.name_with_index(i)?; let ty = self.choose_ty(&self.list_existing_types())?; let directives = self.directives()?; let default_value = self .u .arbitrary() .unwrap_or(false) .then(|| self.input_value()) .transpose()?; input_values.push(InputValueDef { description, name, ty, default_value, directives, }); } Ok(input_values) } pub fn input_value_def(&mut self) -> Result<InputValueDef> { let description = self .u .arbitrary() .unwrap_or(false) .then(|| self.description()) .transpose()?; let name = self.name()?; let ty = self.choose_ty(&self.list_existing_types())?; let directives = self.directives()?; let default_value = self .u .arbitrary() .unwrap_or(false) .then(|| self.input_value()) .transpose()?; Ok(InputValueDef { description, name, ty, default_value, directives, }) } }
fn from(input_value: InputValue) -> Self { match input_value { InputValue::Variable(v) => Self::Variable(v.into()), InputValue::Int(i) => Self::Int(i), InputValue::Float(f) => Self::Float(f), InputValue::String(s) => Self::String(s), InputValue::Boolean(b) => Self::Boolean(b), InputValue::Null => Self::Null, InputValue::Enum(enm) => Self::Enum(enm.into()), InputValue::List(l) => Self::List(l.into_iter().map(Into::into).collect()), InputValue::Object(o) => { Self::Object(o.into_iter().map(|(n, i)| (n.into(), i.into())).collect()) } } }
function_block-full_function
[ { "content": "enum NapSpots @testDirective(first: \"one\") {\n\n \"Top bunk of a cat tree.\"\n\n CAT_TREE\n\n BED\n\n CARDBOARD_BOX @deprecated(reason: \"Box was recycled.\")\n\n}\n\n\"#\n\n );\n\n }\n\n\n\n #[test]\n\n fn it_encodes_enum_extension() {\n\n let mut enum_ty_1 = EnumValue::new(\"CAT_TREE\".to_string());\n\n enum_ty_1.description(Some(\"Top bunk of a cat tree.\".to_string()));\n\n let enum_ty_2 = EnumValue::new(\"BED\".to_string());\n\n let mut enum_ty_3 = EnumValue::new(\"CARDBOARD_BOX\".to_string());\n\n let mut deprecated_directive = Directive::new(String::from(\"deprecated\"));\n\n deprecated_directive.arg(Argument::new(\n\n String::from(\"reason\"),\n\n Value::String(String::from(\"Box was recycled.\")),\n", "file_path": "crates/apollo-encoder/src/enum_def.rs", "rank": 0, "score": 71746.42646470584 }, { "content": "#[derive(Debug)]\n\nenum TokenTy {\n\n List {\n\n nullable: Option<Token>,\n\n open_token: Token,\n\n close_token: Option<Token>,\n\n inner: Box<TokenTy>,\n\n comma: Option<Token>,\n\n trailing_ws: Option<Token>,\n\n },\n\n Named {\n\n nullable: Option<Token>,\n\n token: Token,\n\n comma: Option<Token>,\n\n trailing_ws: Option<Token>,\n\n },\n\n}\n\n\n", "file_path": "crates/apollo-parser/src/parser/grammar/ty.rs", "rank": 1, "score": 68852.10320530162 }, { "content": "enum join__Graph {\n\n ACCOUNTS @join__graph(name: \"accounts\")\n\n}\n\n \"#;\n\n let parser = crate::Parser::new(schema);\n\n let ast = parser.parse();\n\n\n\n assert!(ast.errors.is_empty());\n\n\n\n let document = ast.document();\n\n for definition in document.definitions() {\n\n if let ast::Definition::EnumTypeDefinition(enum_type) = definition {\n\n let enum_name = enum_type\n\n .name()\n\n .expect(\"Could not get Enum Type Definition's Name\");\n\n\n\n assert_eq!(\"join__Graph\", enum_name.text().as_ref());\n\n\n\n if enum_name.text().as_ref() == \"join__Graph\" {\n\n if let Some(enums) = enum_type.enum_values_definition() {\n", "file_path": "crates/apollo-parser/src/parser/grammar/document.rs", "rank": 2, "score": 68852.10320530162 }, { "content": "/// Like `AstNode`, but wraps tokens rather than interior nodes.\n\npub trait AstToken {\n\n fn can_cast(token: SyntaxKind) -> bool\n\n where\n\n Self: Sized;\n\n\n\n fn cast(syntax: SyntaxToken) -> Option<Self>\n\n where\n\n Self: Sized;\n\n\n\n fn syntax(&self) -> &SyntaxToken;\n\n\n\n fn text(&self) -> &str {\n\n self.syntax().text()\n\n }\n\n}\n\n\n\n/// An iterator over `SyntaxNode` children of a particular AST type.\n\n#[derive(Debug, Clone)]\n\npub struct AstChildren<N> {\n\n inner: SyntaxNodeChildren,\n", "file_path": "crates/apollo-parser/src/ast/mod.rs", "rank": 3, "score": 67441.17654753379 }, { "content": "/// The main trait to go from untyped `SyntaxNode` to a typed ast. The\n\n/// conversion itself has zero runtime cost: ast and syntax nodes have exactly\n\n/// the same representation: a pointer to the tree root and a pointer to the\n\n/// node itself.\n\npub trait AstNode {\n\n fn can_cast(kind: SyntaxKind) -> bool\n\n where\n\n Self: Sized;\n\n\n\n fn cast(syntax: SyntaxNode) -> Option<Self>\n\n where\n\n Self: Sized;\n\n\n\n fn syntax(&self) -> &SyntaxNode;\n\n\n\n fn clone_for_update(&self) -> Self\n\n where\n\n Self: Sized,\n\n {\n\n Self::cast(self.syntax().clone_for_update()).unwrap()\n\n }\n\n\n\n fn clone_subtree(&self) -> Self\n\n where\n\n Self: Sized,\n\n {\n\n Self::cast(self.syntax().clone_subtree()).unwrap()\n\n }\n\n}\n\n\n", "file_path": "crates/apollo-parser/src/ast/mod.rs", "rank": 4, "score": 67441.17654753379 }, { "content": "enum Test @dir__one(int_value: -10) {\n\n INVENTORY\n\n} \"#;\n\n let parser = Parser::new(schema);\n\n let ast = parser.parse();\n\n\n\n assert!(ast.errors.is_empty());\n\n\n\n let document = ast.document();\n\n for definition in document.definitions() {\n\n if let ast::Definition::EnumTypeDefinition(enum_) = definition {\n\n for directive in enum_.directives().unwrap().directives() {\n\n for argument in directive.arguments().unwrap().arguments() {\n\n if let ast::Value::IntValue(val) =\n\n argument.value().expect(\"Cannot get argument value.\")\n\n {\n\n let i: i64 = val.into();\n\n assert_eq!(i, -10);\n\n }\n\n }\n", "file_path": "crates/apollo-parser/src/parser/grammar/value.rs", "rank": 5, "score": 61685.63211316687 }, { "content": "enum Test @dir__one(bool_value: false) { \n\n INVENTORY\n\n} \"#;\n\n let parser = Parser::new(schema);\n\n let ast = parser.parse();\n\n\n\n assert!(ast.errors.is_empty());\n\n\n\n let document = ast.document();\n\n for definition in document.definitions() {\n\n if let ast::Definition::EnumTypeDefinition(enum_) = definition {\n\n for directive in enum_.directives().unwrap().directives() {\n\n for argument in directive.arguments().unwrap().arguments() {\n\n if let ast::Value::BooleanValue(val) =\n\n argument.value().expect(\"Cannot get argument value.\")\n\n {\n\n let b: bool = val.into();\n\n assert!(!b);\n\n }\n\n }\n", "file_path": "crates/apollo-parser/src/parser/grammar/value.rs", "rank": 6, "score": 61052.627101347905 }, { "content": "enum Test @dir__one(float_value: -1.123E4) { \n\n INVENTORY\n\n} \"#;\n\n let parser = Parser::new(schema);\n\n let ast = parser.parse();\n\n\n\n assert!(ast.errors.is_empty());\n\n\n\n let document = ast.document();\n\n for definition in document.definitions() {\n\n if let ast::Definition::EnumTypeDefinition(enum_) = definition {\n\n for directive in enum_.directives().unwrap().directives() {\n\n for argument in directive.arguments().unwrap().arguments() {\n\n if let ast::Value::FloatValue(val) =\n\n argument.value().expect(\"Cannot get argument value.\")\n\n {\n\n let f: f64 = val.into();\n\n assert_eq!(f, -1.123E4);\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n #[test]\n\n fn it_returns_bool_for_boolean_values() {\n\n let schema = r#\"\n", "file_path": "crates/apollo-parser/src/parser/grammar/value.rs", "rank": 7, "score": 61052.627101347905 }, { "content": "/// This generate an arbitrary valid GraphQL document\n\npub fn generate_valid_document(input: &[u8]) -> Result<String> {\n\n drop(env_logger::try_init());\n\n\n\n let mut u = Unstructured::new(input);\n\n let gql_doc = DocumentBuilder::new(&mut u)?;\n\n let document = gql_doc.finish();\n\n\n\n Ok(document.into())\n\n}\n\n\n", "file_path": "fuzz/src/lib.rs", "rank": 8, "score": 60344.65154238968 }, { "content": "/// Log the error and the document generated for these errors\n\n/// Save it into files\n\npub fn log_gql_doc(gql_doc: &str, errors: &str) {\n\n log::debug!(\"writing test case to test.graphql ...\");\n\n std::fs::write(\"test_case.graphql\", gql_doc).unwrap();\n\n std::fs::write(\"test_case_error.log\", errors).unwrap();\n\n}\n", "file_path": "fuzz/src/lib.rs", "rank": 9, "score": 59686.276079007075 }, { "content": "enum Test @dir__one(string: \"string value\", int_value: -10, float_value: -1.123e+4, bool: false) {\n\n INVENTORY\n\n} \"#;\n\n let parser = Parser::new(schema);\n\n let ast = parser.parse();\n\n\n\n assert!(ast.errors.is_empty());\n\n\n\n let document = ast.document();\n\n for definition in document.definitions() {\n\n if let ast::Definition::EnumTypeDefinition(enum_) = definition {\n\n for directive in enum_.directives().unwrap().directives() {\n\n for argument in directive.arguments().unwrap().arguments() {\n\n if let ast::Value::StringValue(val) =\n\n argument.value().expect(\"Cannot get argument value.\")\n\n {\n\n let s: String = val.into();\n\n assert_eq!(s, \"string value\".to_string());\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n #[test]\n\n fn it_returns_i64_for_int_values() {\n\n let schema = r#\"\n", "file_path": "crates/apollo-parser/src/parser/grammar/value.rs", "rank": 10, "score": 50228.01538961344 }, { "content": "use std::{collections::HashSet, hash::Hash};\n\n\n\nuse apollo_encoder::{EnumDefinition, EnumValue};\n\nuse arbitrary::Result;\n\n\n\nuse crate::{description::Description, directive::Directive, name::Name, DocumentBuilder};\n\n\n\n/// Enums are special scalars that can only have a defined set of values.\n\n///\n\n/// *EnumTypeDefinition*:\n\n/// Description? **enum** Name Directives? EnumValuesDefinition?\n\n///\n\n/// Detailed documentation can be found in [GraphQL spec](https://spec.graphql.org/October2021/#sec-Enums).\n\n#[derive(Debug, Clone)]\n\npub struct EnumTypeDef {\n\n pub(crate) description: Option<Description>,\n\n pub(crate) name: Name,\n\n pub(crate) directives: Vec<Directive>,\n\n pub(crate) enum_values_def: HashSet<EnumValueDefinition>,\n\n pub(crate) extend: bool,\n", "file_path": "crates/apollo-smith/src/enum_.rs", "rank": 11, "score": 44557.37082101929 }, { "content": "}\n\n\n\n/// The __EnumValue type represents one of possible values of an enum.\n\n///\n\n/// *EnumValueDefinition*:\n\n/// Description? EnumValue Directives?\n\n///\n\n/// Detailed documentation can be found in [GraphQL spec](https://spec.graphql.org/October2021/#sec-The-__EnumValue-Type).\n\n#[derive(Debug, Clone)]\n\npub struct EnumValueDefinition {\n\n pub(crate) description: Option<Description>,\n\n pub(crate) value: Name,\n\n pub(crate) directives: Vec<Directive>,\n\n}\n\n\n\nimpl From<EnumValueDefinition> for EnumValue {\n\n fn from(enum_val: EnumValueDefinition) -> Self {\n\n let mut new_enum_val = Self::new(enum_val.value.into());\n\n new_enum_val.description(enum_val.description.map(String::from));\n\n enum_val\n", "file_path": "crates/apollo-smith/src/enum_.rs", "rank": 12, "score": 44555.61581817411 }, { "content": "}\n\n\n\nimpl<'a> DocumentBuilder<'a> {\n\n /// Create an arbitrary `EnumTypeDef`\n\n pub fn enum_type_definition(&mut self) -> Result<EnumTypeDef> {\n\n let description = self\n\n .u\n\n .arbitrary()\n\n .unwrap_or(false)\n\n .then(|| self.description())\n\n .transpose()?;\n\n let name = self.type_name()?;\n\n let enum_values_def = self.enum_values_definition()?;\n\n let directives = self.directives()?;\n\n\n\n Ok(EnumTypeDef {\n\n description,\n\n name,\n\n enum_values_def,\n\n directives,\n", "file_path": "crates/apollo-smith/src/enum_.rs", "rank": 13, "score": 44551.49615836989 }, { "content": " .directives\n\n .into_iter()\n\n .for_each(|directive| new_enum_val.directive(directive.into()));\n\n\n\n new_enum_val\n\n }\n\n}\n\n\n\nimpl PartialEq for EnumValueDefinition {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.value == other.value\n\n }\n\n}\n\n\n\nimpl Eq for EnumValueDefinition {}\n\n\n\nimpl Hash for EnumValueDefinition {\n\n fn hash<H: std::hash::Hasher>(&self, state: &mut H) {\n\n self.value.hash(state);\n\n }\n", "file_path": "crates/apollo-smith/src/enum_.rs", "rank": 14, "score": 44549.675738338075 }, { "content": "}\n\n\n\nimpl From<EnumTypeDef> for EnumDefinition {\n\n fn from(enum_: EnumTypeDef) -> Self {\n\n let mut new_enum = EnumDefinition::new(enum_.name.into());\n\n new_enum.description(enum_.description.map(String::from));\n\n enum_\n\n .enum_values_def\n\n .into_iter()\n\n .for_each(|val| new_enum.value(val.into()));\n\n enum_\n\n .directives\n\n .into_iter()\n\n .for_each(|directive| new_enum.directive(directive.into()));\n\n if enum_.extend {\n\n new_enum.extend();\n\n }\n\n\n\n new_enum\n\n }\n", "file_path": "crates/apollo-smith/src/enum_.rs", "rank": 15, "score": 44548.61033015946 }, { "content": " extend: self.u.arbitrary().unwrap_or(false),\n\n })\n\n }\n\n\n\n /// Choose an arbitrary `EnumTypeDef` in existings (already created) enum definitions\n\n pub fn choose_enum(&mut self) -> Result<&EnumTypeDef> {\n\n self.u.choose(&self.enum_type_defs)\n\n }\n\n\n\n /// Create an arbitrary variant `Name` given an enum\n\n pub fn arbitrary_variant<'b>(&mut self, enum_: &'b EnumTypeDef) -> Result<&'b Name> {\n\n let arbitrary_idx = self.u.int_in_range(0..=(enum_.enum_values_def.len() - 1))?;\n\n Ok(enum_\n\n .enum_values_def\n\n .iter()\n\n .nth(arbitrary_idx)\n\n .map(|e| &e.value)\n\n .expect(\"cannot get variant\"))\n\n }\n\n\n", "file_path": "crates/apollo-smith/src/enum_.rs", "rank": 16, "score": 44548.546445473956 }, { "content": " /// Create an arbitrary `EnumValueDefinition`\n\n pub fn enum_values_definition(&mut self) -> Result<HashSet<EnumValueDefinition>> {\n\n let mut enum_values_def = HashSet::with_capacity(self.u.int_in_range(2..=10usize)?);\n\n for i in 0..self.u.int_in_range(2..=10usize)? {\n\n let description = self\n\n .u\n\n .arbitrary()\n\n .unwrap_or(false)\n\n .then(|| self.description())\n\n .transpose()?;\n\n let value = self.name_with_index(i)?;\n\n let directives = self.directives()?;\n\n\n\n enum_values_def.insert(EnumValueDefinition {\n\n description,\n\n value,\n\n directives,\n\n });\n\n }\n\n\n\n Ok(enum_values_def)\n\n }\n\n}\n", "file_path": "crates/apollo-smith/src/enum_.rs", "rank": 17, "score": 44547.46644436564 }, { "content": "/// ```\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct EnumDefinition {\n\n // Name must return a String.\n\n name: String,\n\n // Description may return a String or null.\n\n description: StringValue,\n\n // A vector of EnumValue. There must be at least one and they must have\n\n // unique names.\n\n values: Vec<EnumValue>,\n\n /// The vector of directives\n\n directives: Vec<Directive>,\n\n extend: bool,\n\n}\n\n\n\nimpl EnumDefinition {\n\n /// Create a new instance of Enum Definition.\n\n pub fn new(name: String) -> Self {\n\n Self {\n\n name,\n", "file_path": "crates/apollo-encoder/src/enum_def.rs", "rank": 18, "score": 43754.04901097617 }, { "content": "/// Value::String(String::from(\n\n/// \"Box was recycled.\",\n\n/// )),\n\n/// ));\n\n/// enum_ty.directive(deprecated_directive);\n\n///\n\n/// assert_eq!(\n\n/// enum_ty.to_string(),\n\n/// r#\" \"Box nap spot.\"\n\n/// CARDBOARD_BOX @deprecated(reason: \"Box was recycled.\")\"#\n\n/// );\n\n/// ```\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct EnumValue {\n\n // Name must return a String.\n\n name: String,\n\n // Description may return a String or null.\n\n description: StringValue,\n\n /// The vector of directives\n\n directives: Vec<Directive>,\n", "file_path": "crates/apollo-encoder/src/enum_value.rs", "rank": 19, "score": 43748.31535537426 }, { "content": "}\n\n\n\nimpl EnumValue {\n\n /// Create a new instance of EnumValue.\n\n pub fn new(name: String) -> Self {\n\n Self {\n\n name,\n\n description: StringValue::Field { source: None },\n\n directives: Vec::new(),\n\n }\n\n }\n\n\n\n /// Set the Enum Value's description.\n\n pub fn description(&mut self, description: Option<String>) {\n\n self.description = StringValue::Field {\n\n source: description,\n\n };\n\n }\n\n\n\n /// Add a directive.\n", "file_path": "crates/apollo-encoder/src/enum_value.rs", "rank": 20, "score": 43747.308865670595 }, { "content": " pub fn value(&mut self, value: EnumValue) {\n\n self.values.push(value)\n\n }\n\n\n\n /// Add a directive.\n\n pub fn directive(&mut self, directive: Directive) {\n\n self.directives.push(directive)\n\n }\n\n}\n\n\n\nimpl fmt::Display for EnumDefinition {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n if self.extend {\n\n write!(f, \"extend \")?;\n\n } else {\n\n // No description when it's a extension\n\n write!(f, \"{}\", self.description)?;\n\n }\n\n\n\n write!(f, \"enum {}\", self.name)?;\n", "file_path": "crates/apollo-encoder/src/enum_def.rs", "rank": 21, "score": 43747.01288135655 }, { "content": " pub fn directive(&mut self, directive: Directive) {\n\n self.directives.push(directive)\n\n }\n\n}\n\n\n\nimpl fmt::Display for EnumValue {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self.description)?;\n\n write!(f, \" {}\", self.name)?;\n\n\n\n for directive in &self.directives {\n\n write!(f, \" {}\", directive)?;\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "crates/apollo-encoder/src/enum_value.rs", "rank": 22, "score": 43745.57762244684 }, { "content": " use crate::{Argument, Value};\n\n\n\n use super::*;\n\n use pretty_assertions::assert_eq;\n\n\n\n #[test]\n\n fn it_encodes_an_enum_value() {\n\n let enum_ty = EnumValue::new(\"CAT_TREE\".to_string());\n\n assert_eq!(enum_ty.to_string(), \" CAT_TREE\");\n\n }\n\n\n\n #[test]\n\n fn it_encodes_an_enum_value_with_desciption() {\n\n let mut enum_ty = EnumValue::new(\"CAT_TREE\".to_string());\n\n enum_ty.description(Some(\"Top bunk of a cat tree.\".to_string()));\n\n assert_eq!(\n\n enum_ty.to_string(),\n\n r#\" \"Top bunk of a cat tree.\"\n\n CAT_TREE\"#\n\n );\n", "file_path": "crates/apollo-encoder/src/enum_value.rs", "rank": 23, "score": 43744.230900773626 }, { "content": "use std::fmt;\n\n\n\nuse crate::{Directive, EnumValue, StringValue};\n\n\n\n/// Enums are special scalars that can only have a defined set of values.\n\n///\n\n/// *EnumTypeDefinition*:\n\n/// Description? **enum** Name Directives? EnumValuesDefinition?\n\n///\n\n/// Detailed documentation can be found in [GraphQL spec](https://spec.graphql.org/October2021/#sec-Enums).\n\n///\n\n/// ### Example\n\n/// ```rust\n\n/// use apollo_encoder::{Argument, Directive, EnumValue, EnumDefinition, Value};\n\n///\n\n/// let mut enum_ty_1 = EnumValue::new(\"CAT_TREE\".to_string());\n\n/// enum_ty_1.description(Some(\"Top bunk of a cat tree.\".to_string()));\n\n/// let enum_ty_2 = EnumValue::new(\"BED\".to_string());\n\n/// let mut deprecated_directive = Directive::new(String::from(\"deprecated\"));\n\n/// deprecated_directive.arg(Argument::new(String::from(\"reason\"), Value::String(String::from(\"Box was recycled.\"))));\n", "file_path": "crates/apollo-encoder/src/enum_def.rs", "rank": 24, "score": 43744.22150771963 }, { "content": "use std::fmt;\n\n\n\nuse crate::{Directive, StringValue};\n\n\n\n/// The EnumValue type represents one of possible values of an enum.\n\n///\n\n/// *EnumValueDefinition*:\n\n/// Description? EnumValue Directives?\n\n///\n\n/// Detailed documentation can be found in [GraphQL spec](https://spec.graphql.org/October2021/#sec-The-__EnumValue-Type).\n\n///\n\n/// ### Example\n\n/// ```rust\n\n/// use apollo_encoder::{Argument, Directive, EnumValue, Value};\n\n///\n\n/// let mut enum_ty = EnumValue::new(\"CARDBOARD_BOX\".to_string());\n\n/// enum_ty.description(Some(\"Box nap spot.\".to_string()));\n\n/// let mut deprecated_directive = Directive::new(String::from(\"deprecated\"));\n\n/// deprecated_directive.arg(Argument::new(\n\n/// String::from(\"reason\"),\n", "file_path": "crates/apollo-encoder/src/enum_value.rs", "rank": 25, "score": 43744.22205614881 }, { "content": " description: StringValue::Top { source: None },\n\n values: Vec::new(),\n\n directives: Vec::new(),\n\n extend: false,\n\n }\n\n }\n\n\n\n /// Set the enum type as an extension\n\n pub fn extend(&mut self) {\n\n self.extend = true;\n\n }\n\n\n\n /// Set the Enum Definition's description.\n\n pub fn description(&mut self, description: Option<String>) {\n\n self.description = StringValue::Top {\n\n source: description,\n\n };\n\n }\n\n\n\n /// Set the Enum Definitions's values.\n", "file_path": "crates/apollo-encoder/src/enum_def.rs", "rank": 26, "score": 43743.299201606045 }, { "content": " fn it_encodes_a_simple_enum() {\n\n let enum_ty_1 = EnumValue::new(\"CAT_TREE\".to_string());\n\n let enum_ty_2 = EnumValue::new(\"BED\".to_string());\n\n let enum_ty_3 = EnumValue::new(\"CARDBOARD_BOX\".to_string());\n\n\n\n let mut enum_ = EnumDefinition::new(\"NapSpots\".to_string());\n\n enum_.value(enum_ty_1);\n\n enum_.value(enum_ty_2);\n\n enum_.value(enum_ty_3);\n\n\n\n assert_eq!(\n\n enum_.to_string(),\n\n r#\"enum NapSpots {\n\n CAT_TREE\n\n BED\n\n CARDBOARD_BOX\n\n}\n\n\"#\n\n );\n\n }\n", "file_path": "crates/apollo-encoder/src/enum_def.rs", "rank": 27, "score": 43739.69391771536 }, { "content": " for directive in &self.directives {\n\n write!(f, \" {}\", directive)?;\n\n }\n\n\n\n write!(f, \" {{\")?;\n\n for value in &self.values {\n\n write!(f, \"\\n{}\", value)?;\n\n }\n\n writeln!(f, \"\\n}}\")\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::{Argument, Value};\n\n\n\n use super::*;\n\n use pretty_assertions::assert_eq;\n\n\n\n #[test]\n", "file_path": "crates/apollo-encoder/src/enum_def.rs", "rank": 28, "score": 43739.42612369946 }, { "content": " #[test]\n\n fn it_encodes_enum_with_descriptions() {\n\n let mut enum_ty_1 = EnumValue::new(\"CAT_TREE\".to_string());\n\n enum_ty_1.description(Some(\"Top bunk of a cat tree.\".to_string()));\n\n let enum_ty_2 = EnumValue::new(\"BED\".to_string());\n\n let mut enum_ty_3 = EnumValue::new(\"CARDBOARD_BOX\".to_string());\n\n let mut deprecated_directive = Directive::new(String::from(\"deprecated\"));\n\n deprecated_directive.arg(Argument::new(\n\n String::from(\"reason\"),\n\n Value::String(String::from(\"Box was recycled.\")),\n\n ));\n\n enum_ty_3.directive(deprecated_directive);\n\n let mut directive = Directive::new(String::from(\"testDirective\"));\n\n directive.arg(Argument::new(\n\n String::from(\"first\"),\n\n Value::String(\"one\".to_string()),\n\n ));\n\n\n\n let mut enum_ = EnumDefinition::new(\"NapSpots\".to_string());\n\n enum_.description(Some(\"Favourite cat nap spots.\".to_string()));\n\n enum_.value(enum_ty_1);\n\n enum_.value(enum_ty_2);\n\n enum_.value(enum_ty_3);\n\n enum_.directive(directive);\n\n\n\n assert_eq!(\n\n enum_.to_string(),\n\n r#\"\"Favourite cat nap spots.\"\n", "file_path": "crates/apollo-encoder/src/enum_def.rs", "rank": 29, "score": 43739.17883953597 }, { "content": "/// let mut enum_ty_3 = EnumValue::new(\"CARDBOARD_BOX\".to_string());\n\n/// enum_ty_3.directive(deprecated_directive);\n\n///\n\n/// let mut enum_ = EnumDefinition::new(\"NapSpots\".to_string());\n\n/// enum_.description(Some(\"Favourite cat nap spots.\".to_string()));\n\n/// enum_.value(enum_ty_1);\n\n/// enum_.value(enum_ty_2);\n\n/// enum_.value(enum_ty_3);\n\n///\n\n/// assert_eq!(\n\n/// enum_.to_string(),\n\n/// r#\"\"Favourite cat nap spots.\"\n\n/// enum NapSpots {\n\n/// \"Top bunk of a cat tree.\"\n\n/// CAT_TREE\n\n/// BED\n\n/// CARDBOARD_BOX @deprecated(reason: \"Box was recycled.\")\n\n/// }\n\n/// \"#\n\n/// );\n", "file_path": "crates/apollo-encoder/src/enum_def.rs", "rank": 30, "score": 43738.62134393371 }, { "content": " }\n\n\n\n #[test]\n\n fn it_encodes_an_enum_value_with_directive() {\n\n let mut enum_ty = EnumValue::new(\"CARDBOARD_BOX\".to_string());\n\n let mut directive = Directive::new(String::from(\"testDirective\"));\n\n directive.arg(Argument::new(\n\n String::from(\"first\"),\n\n Value::List(vec![Value::Int(1), Value::Int(2)]),\n\n ));\n\n enum_ty.description(Some(\"Box nap\\nspot.\".to_string()));\n\n enum_ty.directive(directive);\n\n\n\n assert_eq!(\n\n enum_ty.to_string(),\n\n r#\" \"\"\"\n\n Box nap\n\n spot.\n\n \"\"\"\n\n CARDBOARD_BOX @testDirective(first: [1, 2])\"#\n", "file_path": "crates/apollo-encoder/src/enum_value.rs", "rank": 31, "score": 43738.60455913843 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n fn it_encodes_an_enum_value_with_deprecated_block_string_value() {\n\n let mut enum_ty = EnumValue::new(\"CARDBOARD_BOX\".to_string());\n\n enum_ty.description(Some(\"Box nap\\nspot.\".to_string()));\n\n let mut deprecated_directive = Directive::new(String::from(\"deprecated\"));\n\n deprecated_directive.arg(Argument::new(\n\n String::from(\"reason\"),\n\n Value::String(String::from(r#\"Box was \"recycled\".\"#)),\n\n ));\n\n enum_ty.directive(deprecated_directive);\n\n\n\n assert_eq!(\n\n enum_ty.to_string(),\n\n r#\" \"\"\"\n\n Box nap\n\n spot.\n\n \"\"\"\n\n CARDBOARD_BOX @deprecated(reason: \"\"\"Box was \"recycled\".\"\"\")\"#\n\n );\n\n }\n\n}\n", "file_path": "crates/apollo-encoder/src/enum_value.rs", "rank": 32, "score": 43738.53864770192 }, { "content": " ));\n\n enum_ty_3.directive(deprecated_directive);\n\n let mut directive = Directive::new(String::from(\"testDirective\"));\n\n directive.arg(Argument::new(\n\n String::from(\"first\"),\n\n Value::String(\"one\".to_string()),\n\n ));\n\n\n\n let mut enum_ = EnumDefinition::new(\"NapSpots\".to_string());\n\n enum_.description(Some(\"Favourite cat nap spots.\".to_string()));\n\n enum_.value(enum_ty_1);\n\n enum_.value(enum_ty_2);\n\n enum_.value(enum_ty_3);\n\n enum_.directive(directive);\n\n enum_.extend();\n\n\n\n assert_eq!(\n\n enum_.to_string(),\n\n r#\"extend enum NapSpots @testDirective(first: \"one\") {\n\n \"Top bunk of a cat tree.\"\n\n CAT_TREE\n\n BED\n\n CARDBOARD_BOX @deprecated(reason: \"Box was recycled.\")\n\n}\n\n\"#\n\n );\n\n }\n\n}\n", "file_path": "crates/apollo-encoder/src/enum_def.rs", "rank": 33, "score": 43737.33160652064 }, { "content": "#![allow(clippy::needless_return)]\n\n\n\nuse crate::{\n\n parser::grammar::{description, directive, name, value},\n\n Parser, SyntaxKind, TokenKind, S, T,\n\n};\n\n\n\n/// See: https://spec.graphql.org/October2021/#EnumTypeDefinition\n\n///\n\n/// *EnumTypeDefinition*:\n\n/// Description? **enum** Name Directives? EnumValuesDefinition?\n\npub(crate) fn enum_type_definition(p: &mut Parser) {\n\n let _g = p.start_node(SyntaxKind::ENUM_TYPE_DEFINITION);\n\n\n\n if let Some(TokenKind::StringValue) = p.peek() {\n\n description::description(p);\n\n }\n\n\n\n if let Some(\"enum\") = p.peek_data().as_deref() {\n\n p.bump(SyntaxKind::enum_KW);\n", "file_path": "crates/apollo-parser/src/parser/grammar/enum_.rs", "rank": 34, "score": 42969.203299750436 }, { "content": " enum_values_definition(p);\n\n }\n\n\n\n if !meets_requirements {\n\n p.err(\"expected Directive or Enum Values Definition\");\n\n }\n\n}\n\n\n\n/// See: https://spec.graphql.org/October2021/#EnumValuesDefinition\n\n///\n\n/// *EnumValuesDefinition*:\n\n/// **{** EnumValueDefinition* **}**\n\npub(crate) fn enum_values_definition(p: &mut Parser) {\n\n let _g = p.start_node(SyntaxKind::ENUM_VALUES_DEFINITION);\n\n p.bump(S!['{']);\n\n\n\n match p.peek() {\n\n Some(TokenKind::Name | TokenKind::StringValue) => enum_value_definition(p),\n\n _ => p.err(\"expected Enum Value Definition\"),\n\n }\n", "file_path": "crates/apollo-parser/src/parser/grammar/enum_.rs", "rank": 35, "score": 42966.57410649957 }, { "content": "\n\n p.expect(T!['}'], S!['}']);\n\n}\n\n\n\n/// See: https://spec.graphql.org/October2021/#EnumValueDefinition\n\n///\n\n/// *EnumValueDefinition*:\n\n/// Description? EnumValue Directives?\n\npub(crate) fn enum_value_definition(p: &mut Parser) {\n\n if let Some(TokenKind::Name | TokenKind::StringValue) = p.peek() {\n\n let guard = p.start_node(SyntaxKind::ENUM_VALUE_DEFINITION);\n\n\n\n if let Some(TokenKind::StringValue) = p.peek() {\n\n description::description(p);\n\n }\n\n\n\n value::enum_value(p);\n\n\n\n if let Some(T![@]) = p.peek() {\n\n directive::directives(p);\n", "file_path": "crates/apollo-parser/src/parser/grammar/enum_.rs", "rank": 36, "score": 42966.28950099166 }, { "content": "/// **extend** **enum** Name Directives?\n\npub(crate) fn enum_type_extension(p: &mut Parser) {\n\n let _g = p.start_node(SyntaxKind::ENUM_TYPE_EXTENSION);\n\n p.bump(SyntaxKind::extend_KW);\n\n p.bump(SyntaxKind::enum_KW);\n\n\n\n let mut meets_requirements = false;\n\n\n\n match p.peek() {\n\n Some(TokenKind::Name) => name::name(p),\n\n _ => p.err(\"expected a Name\"),\n\n }\n\n\n\n if let Some(T![@]) = p.peek() {\n\n meets_requirements = true;\n\n directive::directives(p);\n\n }\n\n\n\n if let Some(T!['{']) = p.peek() {\n\n meets_requirements = true;\n", "file_path": "crates/apollo-parser/src/parser/grammar/enum_.rs", "rank": 37, "score": 42964.543948546 }, { "content": " }\n\n\n\n match p.peek() {\n\n Some(TokenKind::Name) => name::name(p),\n\n _ => p.err(\"expected a Name\"),\n\n }\n\n\n\n if let Some(T![@]) = p.peek() {\n\n directive::directives(p);\n\n }\n\n\n\n if let Some(T!['{']) = p.peek() {\n\n enum_values_definition(p);\n\n }\n\n}\n\n\n\n/// See: https://spec.graphql.org/October2021/#EnumTypeExtension\n\n///\n\n// *EnumTypeExtension*:\n\n/// **extend** **enum** Name Directives? EnumValuesDefinition\n", "file_path": "crates/apollo-parser/src/parser/grammar/enum_.rs", "rank": 38, "score": 42962.992648302374 }, { "content": " }\n\n if p.peek().is_some() {\n\n guard.finish_node();\n\n return enum_value_definition(p);\n\n }\n\n }\n\n\n\n if let Some(T!['}']) = p.peek() {\n\n return;\n\n }\n\n}\n", "file_path": "crates/apollo-parser/src/parser/grammar/enum_.rs", "rank": 39, "score": 42957.71624400635 }, { "content": "fn extract_enums(ast: &mut AstSrc) {\n\n for node in &mut ast.nodes {\n\n for enm in &ast.enums {\n\n let mut to_remove = Vec::new();\n\n for (i, field) in node.fields.iter().enumerate() {\n\n let ty = field.ty().to_string();\n\n if enm.variants.iter().any(|it| it == &ty) {\n\n to_remove.push(i);\n\n }\n\n }\n\n if to_remove.len() == enm.variants.len() {\n\n node.remove_field(to_remove);\n\n let ty = enm.name.clone();\n\n let name = to_lower_snake_case(&ty);\n\n node.fields.push(Field::Node {\n\n name,\n\n ty,\n\n cardinality: Cardinality::Optional,\n\n });\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "xtask/src/codegen/mod.rs", "rank": 40, "score": 40785.59619918273 }, { "content": "fn extract_enum_traits(ast: &mut AstSrc) {\n\n let enums = ast.enums.clone();\n\n for enm in &mut ast.enums {\n\n if enm.name == \"Stmt\" {\n\n continue;\n\n }\n\n let nodes = &ast.nodes;\n\n\n\n let mut variant_traits = enm.variants.iter().map(|var| {\n\n nodes\n\n .iter()\n\n .find_map(|node| {\n\n if &node.name != var {\n\n return None;\n\n }\n\n Some(node.traits.iter().cloned().collect::<BTreeSet<_>>())\n\n })\n\n .unwrap_or_else(|| {\n\n enums\n\n .iter()\n", "file_path": "xtask/src/codegen/mod.rs", "rank": 41, "score": 40109.56102545059 }, { "content": "fn lower_enum(grammar: &Grammar, rule: &Rule) -> Option<Vec<String>> {\n\n let alternatives = match rule {\n\n Rule::Alt(it) => it,\n\n _ => return None,\n\n };\n\n let mut variants = Vec::new();\n\n for alternative in alternatives {\n\n match alternative {\n\n Rule::Node(it) => variants.push(grammar[*it].name.clone()),\n\n Rule::Token(it) if grammar[*it].name == \";\" => (),\n\n _ => return None,\n\n }\n\n }\n\n Some(variants)\n\n}\n\n\n", "file_path": "xtask/src/codegen/mod.rs", "rank": 42, "score": 38822.56645368697 }, { "content": "pub struct NullValue {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl NullValue {\n\n pub fn null_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![null]) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct EnumValue {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl EnumValue {\n\n pub fn name(&self) -> Option<Name> { support::child(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct ListValue {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl ListValue {\n\n pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S!['[']) }\n\n pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![']']) }\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 43, "score": 23.016711713854452 }, { "content": "}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct EnumValueDefinition {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl EnumValueDefinition {\n\n pub fn description(&self) -> Option<Description> { support::child(&self.syntax) }\n\n pub fn enum_value(&self) -> Option<EnumValue> { support::child(&self.syntax) }\n\n pub fn directives(&self) -> Option<Directives> { support::child(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct InputFieldsDefinition {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl InputFieldsDefinition {\n\n pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S!['{']) }\n\n pub fn input_value_definitions(&self) -> AstChildren<InputValueDefinition> {\n\n support::children(&self.syntax)\n\n }\n\n pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S!['}']) }\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 45, "score": 17.632573163548535 }, { "content": "pub enum Selection {\n\n Field(Field),\n\n FragmentSpread(FragmentSpread),\n\n InlineFragment(InlineFragment),\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub enum Value {\n\n Variable(Variable),\n\n StringValue(StringValue),\n\n FloatValue(FloatValue),\n\n IntValue(IntValue),\n\n BooleanValue(BooleanValue),\n\n NullValue(NullValue),\n\n EnumValue(EnumValue),\n\n ListValue(ListValue),\n\n ObjectValue(ObjectValue),\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub enum Type {\n\n NamedType(NamedType),\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 46, "score": 17.444672261589965 }, { "content": "}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct UnionMemberTypes {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl UnionMemberTypes {\n\n pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![=]) }\n\n pub fn pipe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![|]) }\n\n pub fn named_types(&self) -> AstChildren<NamedType> { support::children(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct EnumValuesDefinition {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl EnumValuesDefinition {\n\n pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S!['{']) }\n\n pub fn enum_value_definitions(&self) -> AstChildren<EnumValueDefinition> {\n\n support::children(&self.syntax)\n\n }\n\n pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S!['}']) }\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 47, "score": 17.34945138755258 }, { "content": "use std::fmt;\n\n\n\nuse crate::TokenKind;\n\n\n\n/// A token generated by the lexer.\n\n#[derive(Clone)]\n\npub struct Token {\n\n pub(crate) kind: TokenKind,\n\n pub(crate) data: String,\n\n pub(crate) index: usize,\n\n}\n\n\n\nimpl Token {\n\n pub(crate) fn new(kind: TokenKind, data: String) -> Self {\n\n Self {\n\n kind,\n\n data,\n\n index: 0,\n\n }\n\n }\n", "file_path": "crates/apollo-parser/src/lexer/token.rs", "rank": 48, "score": 17.221366233921337 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct EnumTypeExtension {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl EnumTypeExtension {\n\n pub fn extend_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![extend]) }\n\n pub fn enum_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![enum]) }\n\n pub fn name(&self) -> Option<Name> { support::child(&self.syntax) }\n\n pub fn directives(&self) -> Option<Directives> { support::child(&self.syntax) }\n\n pub fn enum_values_definition(&self) -> Option<EnumValuesDefinition> {\n\n support::child(&self.syntax)\n\n }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct InputObjectTypeExtension {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl InputObjectTypeExtension {\n\n pub fn extend_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![extend]) }\n\n pub fn input_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![input]) }\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 49, "score": 17.139047493251113 }, { "content": "pub struct ListType {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl ListType {\n\n pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S!['[']) }\n\n pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }\n\n pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![']']) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct NonNullType {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl NonNullType {\n\n pub fn named_type(&self) -> Option<NamedType> { support::child(&self.syntax) }\n\n pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![!]) }\n\n pub fn list_type(&self) -> Option<ListType> { support::child(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct Directive {\n\n pub(crate) syntax: SyntaxNode,\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 50, "score": 16.93298218425587 }, { "content": "//! This is a generated file, please do not edit manually. Changes can be\n\n//! made in codegeneration that lives in `xtask` top-level dir.\n\n\n\nuse crate::{\n\n ast::{support, AstChildren, AstNode},\n\n SyntaxKind::{self, *},\n\n SyntaxNode, SyntaxToken, S,\n\n};\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct Name {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl Name {\n\n pub fn ident_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![ident]) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct Document {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl Document {\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 51, "score": 16.404523334911968 }, { "content": " pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl UnionTypeDefinition {\n\n pub fn description(&self) -> Option<Description> { support::child(&self.syntax) }\n\n pub fn union_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![union]) }\n\n pub fn name(&self) -> Option<Name> { support::child(&self.syntax) }\n\n pub fn directives(&self) -> Option<Directives> { support::child(&self.syntax) }\n\n pub fn union_member_types(&self) -> Option<UnionMemberTypes> { support::child(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct EnumTypeDefinition {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl EnumTypeDefinition {\n\n pub fn description(&self) -> Option<Description> { support::child(&self.syntax) }\n\n pub fn enum_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![enum]) }\n\n pub fn name(&self) -> Option<Name> { support::child(&self.syntax) }\n\n pub fn directives(&self) -> Option<Directives> { support::child(&self.syntax) }\n\n pub fn enum_values_definition(&self) -> Option<EnumValuesDefinition> {\n\n support::child(&self.syntax)\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 52, "score": 15.979064305111633 }, { "content": "use std::fmt;\n\n\n\n/// The Value type represents available values you could give as an input.\n\n///\n\n/// *Value*:\n\n/// Variable | IntValue | FloatValue | StringValue | BooleanValue |\n\n/// NullValue | EnumValue | ListValue | ObjectValue\n\n///\n\n/// Detailed documentation can be found in [GraphQL spec](https://spec.graphql.org/October2021/#Value).\n\n#[derive(Debug, PartialEq, Clone)]\n\npub enum Value {\n\n /// Name of a variable example: `varName`\n\n Variable(String),\n\n /// Int value example: `7`\n\n Int(i64),\n\n /// Float value example: `25.4`\n\n Float(f64),\n\n /// String value example: `\"My string\"`\n\n String(String),\n\n /// Boolean value example: `false`\n", "file_path": "crates/apollo-encoder/src/value.rs", "rank": 53, "score": 15.373481866357801 }, { "content": "mod scalar_def;\n\nmod schema_def;\n\nmod selection_set;\n\nmod string_value;\n\nmod union_def;\n\nmod value;\n\nmod variable;\n\n\n\npub use argument::Argument;\n\npub use argument::ArgumentsDefinition;\n\npub use directive::Directive;\n\npub use directive_def::DirectiveDefinition;\n\npub use document::Document;\n\npub use enum_def::EnumDefinition;\n\npub use enum_value::EnumValue;\n\npub use field::{Field, FieldDefinition};\n\npub use field_value::Type_;\n\npub use fragment::{FragmentDefinition, FragmentSpread, InlineFragment, TypeCondition};\n\npub use input_field::InputField;\n\npub use input_object_def::InputObjectDefinition;\n", "file_path": "crates/apollo-encoder/src/lib.rs", "rank": 54, "score": 15.299774781436085 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub enum Ty {\n\n /// The Non-Null field type.\n\n Named(Name),\n\n /// The List field type.\n\n List(Box<Ty>),\n\n /// The Named field type.\n\n NonNull(Box<Ty>),\n\n}\n\n\n\nimpl From<Ty> for Type_ {\n\n fn from(val: Ty) -> Self {\n\n match val {\n\n Ty::Named(name) => Type_::NamedType { name: name.into() },\n\n Ty::List(ty) => Type_::List {\n\n ty: Box::new((*ty).into()),\n\n },\n\n Ty::NonNull(ty) => Type_::NonNull {\n\n ty: Box::new((*ty).into()),\n\n },\n", "file_path": "crates/apollo-smith/src/ty.rs", "rank": 55, "score": 14.941482477469297 }, { "content": "use self::SyntaxKind::*;\n\nimpl SyntaxKind {\n\n pub fn is_keyword(self) -> bool {\n\n matches!(\n\n self,\n\n query_KW\n\n | mutation_KW\n\n | repeatable_KW\n\n | subscription_KW\n\n | fragment_KW\n\n | on_KW\n\n | null_KW\n\n | extend_KW\n\n | schema_KW\n\n | scalar_KW\n\n | implements_KW\n\n | interface_KW\n\n | union_KW\n\n | enum_KW\n\n | input_KW\n", "file_path": "crates/apollo-parser/src/parser/generated/syntax_kind.rs", "rank": 56, "score": 14.900793087344677 }, { "content": "/// scalar.to_string(),\n\n/// r#\"\"Int representing number of treats received.\"\n\n/// scalar NumberOfTreatsPerDay\n\n/// \"#\n\n/// );\n\n/// ```\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct ScalarDefinition {\n\n // Name must return a String.\n\n name: String,\n\n // Description may return a String or null.\n\n description: StringValue,\n\n directives: Vec<Directive>,\n\n extend: bool,\n\n}\n\n\n\nimpl ScalarDefinition {\n\n /// Create a new instance of Scalar Definition.\n\n pub fn new(name: String) -> Self {\n\n Self {\n", "file_path": "crates/apollo-encoder/src/scalar_def.rs", "rank": 57, "score": 14.746419991941377 }, { "content": "}\n\nimpl NamedType {\n\n pub fn name(&self) -> Option<Name> { support::child(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct Variable {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl Variable {\n\n pub fn dollar_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![$]) }\n\n pub fn name(&self) -> Option<Name> { support::child(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct StringValue {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl StringValue {}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct FloatValue {\n\n pub(crate) syntax: SyntaxNode,\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 58, "score": 14.730470908330696 }, { "content": "use arbitrary::{Arbitrary, Result, Unstructured};\n\n\n\nuse crate::DocumentBuilder;\n\n\n\nconst CHARSET: &[u8] =\n\n b\"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz_\\n\\r\\t/$#!.-+='\";\n\n\n\n/// The `__Description` type represents a description\n\n///\n\n/// *Description*:\n\n/// \"string\"\n\n///\n\n/// Detailed documentation can be found in [GraphQL spec](https://spec.graphql.org/October2021/#sec-Descriptions).\n\n///\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash, Arbitrary)]\n\npub struct Description(StringValue);\n\n\n\nimpl From<Description> for String {\n\n fn from(desc: Description) -> Self {\n\n desc.0.into()\n", "file_path": "crates/apollo-smith/src/description.rs", "rank": 59, "score": 14.682735818669467 }, { "content": " fn from(node: StringValue) -> Value { Value::StringValue(node) }\n\n}\n\nimpl From<FloatValue> for Value {\n\n fn from(node: FloatValue) -> Value { Value::FloatValue(node) }\n\n}\n\nimpl From<IntValue> for Value {\n\n fn from(node: IntValue) -> Value { Value::IntValue(node) }\n\n}\n\nimpl From<BooleanValue> for Value {\n\n fn from(node: BooleanValue) -> Value { Value::BooleanValue(node) }\n\n}\n\nimpl From<NullValue> for Value {\n\n fn from(node: NullValue) -> Value { Value::NullValue(node) }\n\n}\n\nimpl From<EnumValue> for Value {\n\n fn from(node: EnumValue) -> Value { Value::EnumValue(node) }\n\n}\n\nimpl From<ListValue> for Value {\n\n fn from(node: ListValue) -> Value { Value::ListValue(node) }\n\n}\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 60, "score": 14.64520223297549 }, { "content": "\n\nimpl<'a> DocumentBuilder<'a> {\n\n /// Create an arbitrary `Ty`\n\n pub fn ty(&mut self) -> Result<Ty> {\n\n self.generate_ty(true)\n\n }\n\n\n\n /// Choose an arbitrary existing `Ty` given a slice of existing types\n\n pub fn choose_ty(&mut self, existing_types: &[Ty]) -> Result<Ty> {\n\n self.choose_ty_given_nullable(existing_types, true)\n\n }\n\n\n\n /// Choose an arbitrary existing named `Ty` given a slice of existing types\n\n pub fn choose_named_ty(&mut self, existing_types: &[Ty]) -> Result<Ty> {\n\n let used_type_names: Vec<&Ty> = existing_types\n\n .iter()\n\n .chain(BUILTIN_SCALAR_NAMES.iter())\n\n .collect();\n\n\n\n Ok(self.u.choose(&used_type_names)?.to_owned().clone())\n", "file_path": "crates/apollo-smith/src/ty.rs", "rank": 61, "score": 14.57356775496303 }, { "content": "pub(crate) mod variable;\n\n\n\nuse arbitrary::Unstructured;\n\n\n\npub use arbitrary::Result;\n\npub use directive::DirectiveDef;\n\npub use document::Document;\n\npub use enum_::EnumTypeDef;\n\npub use fragment::FragmentDef;\n\npub use input_object::InputObjectTypeDef;\n\npub use interface::InterfaceTypeDef;\n\npub use object::ObjectTypeDef;\n\npub use operation::OperationDef;\n\npub use scalar::ScalarTypeDef;\n\npub use schema::SchemaDef;\n\npub use union::UnionTypeDef;\n\n\n\n/// DocumentBuilder is a struct to build an arbitrary valid GraphQL document\n\n///\n\n/// ```compile_fail\n", "file_path": "crates/apollo-smith/src/lib.rs", "rank": 62, "score": 14.458022018872954 }, { "content": "/// \"Best playime spots, e.g. tree, bed.\"\n\n/// playSpot: FavouriteSpots\n\n/// }\n\n/// \"#}\n\n/// );\n\n/// ```\n\n#[derive(Debug, Clone)]\n\npub struct InputObjectDefinition {\n\n // Name must return a String.\n\n name: String,\n\n // Description may return a String or null.\n\n description: StringValue,\n\n // A vector of fields\n\n fields: Vec<InputField>,\n\n /// Contains all directives.\n\n directives: Vec<Directive>,\n\n extend: bool,\n\n}\n\n\n\nimpl InputObjectDefinition {\n", "file_path": "crates/apollo-encoder/src/input_object_def.rs", "rank": 63, "score": 14.257528469875375 }, { "content": "}\n\nimpl FloatValue {\n\n pub fn float_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![float]) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct IntValue {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl IntValue {\n\n pub fn int_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![int]) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct BooleanValue {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl BooleanValue {\n\n pub fn true_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![true]) }\n\n pub fn false_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![false]) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 64, "score": 14.244227650193332 }, { "content": "use crate::{\n\n parser::grammar::{name, variable},\n\n Parser, SyntaxKind, TokenKind, S, T,\n\n};\n\n\n\n/// See: https://spec.graphql.org/October2021/#Value\n\n///\n\n/// *Value*\n\n/// Variable\n\n/// IntValue\n\n/// FloatValue\n\n/// StringValue\n\n/// BooleanValue\n\n/// NullValue\n\n/// EnumValue\n\n/// ListValue\n\n/// ObjectValue\n\npub(crate) fn value(p: &mut Parser) {\n\n match p.peek() {\n\n Some(T![$]) => variable::variable(p),\n", "file_path": "crates/apollo-parser/src/parser/grammar/value.rs", "rank": 65, "score": 14.06221005124545 }, { "content": " pub fn value(&self) -> Option<Value> { support::child(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct FragmentName {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl FragmentName {\n\n pub fn name(&self) -> Option<Name> { support::child(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct TypeCondition {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl TypeCondition {\n\n pub fn on_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![on]) }\n\n pub fn named_type(&self) -> Option<NamedType> { support::child(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct NamedType {\n\n pub(crate) syntax: SyntaxNode,\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 66, "score": 14.03252980396781 }, { "content": "//! A version of owned string built on top of rowan's `GreenNode`. This is used\n\n//! when displaying text of a given node.\n\n\n\nuse std::{cmp::Ordering, fmt, ops};\n\n\n\npub struct TokenText(pub(crate) rowan::GreenToken);\n\n\n\nimpl TokenText {\n\n pub fn as_str(&self) -> &str {\n\n self.0.text()\n\n }\n\n}\n\n\n\nimpl ops::Deref for TokenText {\n\n type Target = str;\n\n\n\n fn deref(&self) -> &str {\n\n self.as_str()\n\n }\n\n}\n", "file_path": "crates/apollo-parser/src/parser/token_text.rs", "rank": 67, "score": 13.965417309414146 }, { "content": "}\n\nimpl Directive {\n\n pub fn at_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![@]) }\n\n pub fn name(&self) -> Option<Name> { support::child(&self.syntax) }\n\n pub fn arguments(&self) -> Option<Arguments> { support::child(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct RootOperationTypeDefinition {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl RootOperationTypeDefinition {\n\n pub fn operation_type(&self) -> Option<OperationType> { support::child(&self.syntax) }\n\n pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![:]) }\n\n pub fn named_type(&self) -> Option<NamedType> { support::child(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct Description {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl Description {\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 68, "score": 13.774896863387529 }, { "content": "impl Alias {\n\n pub fn name(&self) -> Option<Name> { support::child(&self.syntax) }\n\n pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![:]) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct Arguments {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl Arguments {\n\n pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S!['(']) }\n\n pub fn arguments(&self) -> AstChildren<Argument> { support::children(&self.syntax) }\n\n pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![')']) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct Argument {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl Argument {\n\n pub fn name(&self) -> Option<Name> { support::child(&self.syntax) }\n\n pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![:]) }\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 69, "score": 13.689582402654812 }, { "content": "}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct DirectiveLocations {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl DirectiveLocations {\n\n pub fn directive_locations(&self) -> AstChildren<DirectiveLocation> {\n\n support::children(&self.syntax)\n\n }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct DirectiveLocation {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl DirectiveLocation {\n\n pub fn query_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![QUERY]) }\n\n pub fn mutation_token(&self) -> Option<SyntaxToken> {\n\n support::token(&self.syntax, S![MUTATION])\n\n }\n\n pub fn subscription_token(&self) -> Option<SyntaxToken> {\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 70, "score": 13.619866887224433 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct VariableDefinition {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl VariableDefinition {\n\n pub fn variable(&self) -> Option<Variable> { support::child(&self.syntax) }\n\n pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![:]) }\n\n pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }\n\n pub fn default_value(&self) -> Option<DefaultValue> { support::child(&self.syntax) }\n\n pub fn directives(&self) -> Option<Directives> { support::child(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct DefaultValue {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl DefaultValue {\n\n pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![=]) }\n\n pub fn value(&self) -> Option<Value> { support::child(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 71, "score": 13.600314146802495 }, { "content": "pub struct VariableDefinitions {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl VariableDefinitions {\n\n pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S!['(']) }\n\n pub fn variable_definitions(&self) -> AstChildren<VariableDefinition> {\n\n support::children(&self.syntax)\n\n }\n\n pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![')']) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct Directives {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl Directives {\n\n pub fn directives(&self) -> AstChildren<Directive> { support::children(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct SelectionSet {\n\n pub(crate) syntax: SyntaxNode,\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 72, "score": 13.553084269140342 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl Ty {\n\n pub(crate) fn name(&self) -> &Name {\n\n match self {\n\n Ty::Named(name) => name,\n\n Ty::List(list) => list.name(),\n\n Ty::NonNull(non_null) => non_null.name(),\n\n }\n\n }\n\n\n\n /// Returns `true` if the ty is [`Named`].\n\n ///\n\n /// [`Named`]: Ty::Named\n\n pub fn is_named(&self) -> bool {\n\n matches!(self, Self::Named(..))\n\n }\n\n}\n", "file_path": "crates/apollo-smith/src/ty.rs", "rank": 73, "score": 13.534908821146987 }, { "content": " }\n\n }\n\n }\n\n });\n\n (\n\n quote! {\n\n #[pretty_doc_comment_placeholder_workaround]\n\n #[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\n pub struct #name {\n\n pub(crate) syntax: SyntaxNode,\n\n }\n\n\n\n #(#traits)*\n\n\n\n impl #name {\n\n #(#methods)*\n\n }\n\n },\n\n quote! {\n\n impl AstNode for #name {\n", "file_path": "xtask/src/codegen/gen_syntax_nodes.rs", "rank": 74, "score": 13.501275286174323 }, { "content": "use std::str::Chars;\n\n\n\nuse crate::Error;\n\n/// Peekable iterator over a char sequence.\n\npub(crate) struct Cursor<'a> {\n\n chars: Chars<'a>,\n\n pub(crate) err: Option<Error>,\n\n}\n\n\n\nimpl<'a> Cursor<'a> {\n\n pub(crate) fn new(input: &'a str) -> Cursor<'a> {\n\n Cursor {\n\n chars: input.chars(),\n\n err: None,\n\n }\n\n }\n\n}\n\n\n\npub(crate) const EOF_CHAR: char = '\\0';\n\n\n", "file_path": "crates/apollo-parser/src/lexer/cursor.rs", "rank": 75, "score": 13.451153226583457 }, { "content": " pub fn definitions(&self) -> AstChildren<Definition> { support::children(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct OperationDefinition {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl OperationDefinition {\n\n pub fn operation_type(&self) -> Option<OperationType> { support::child(&self.syntax) }\n\n pub fn name(&self) -> Option<Name> { support::child(&self.syntax) }\n\n pub fn variable_definitions(&self) -> Option<VariableDefinitions> {\n\n support::child(&self.syntax)\n\n }\n\n pub fn directives(&self) -> Option<Directives> { support::child(&self.syntax) }\n\n pub fn selection_set(&self) -> Option<SelectionSet> { support::child(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct FragmentDefinition {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl FragmentDefinition {\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 76, "score": 13.392634215511912 }, { "content": " }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct InputObjectTypeDefinition {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl InputObjectTypeDefinition {\n\n pub fn description(&self) -> Option<Description> { support::child(&self.syntax) }\n\n pub fn input_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![input]) }\n\n pub fn name(&self) -> Option<Name> { support::child(&self.syntax) }\n\n pub fn directives(&self) -> Option<Directives> { support::child(&self.syntax) }\n\n pub fn input_fields_definition(&self) -> Option<InputFieldsDefinition> {\n\n support::child(&self.syntax)\n\n }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct SchemaExtension {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl SchemaExtension {\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 77, "score": 13.392600683371956 }, { "content": "/// let mut value = InputValueDefinition::new(\"cat\".to_string(), ty_2);\n\n/// value.description(Some(\"Very good cats\".to_string()));\n\n///\n\n/// assert_eq!(\n\n/// value.to_string(),\n\n/// r#\"\"Very good cats\" cat: [SpaceProgram]\"#\n\n/// );\n\n/// ```\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct InputValueDefinition {\n\n // Name must return a String.\n\n name: String,\n\n // Description may return a String.\n\n description: StringValue,\n\n // Type must return a __Type that represents the type this input value expects.\n\n type_: Type_,\n\n // Default may return a String encoding (using the GraphQL language) of\n\n // the default value used by this input value in the condition a value is\n\n // not provided at runtime. If this input value has no default value,\n\n // returns null.\n", "file_path": "crates/apollo-encoder/src/input_value.rs", "rank": 78, "score": 13.328015721816238 }, { "content": " let _g = p.start_node(SyntaxKind::BOOLEAN_VALUE);\n\n p.bump(SyntaxKind::false_KW);\n\n }\n\n \"null\" => {\n\n let _g = p.start_node(SyntaxKind::NULL_VALUE);\n\n p.bump(SyntaxKind::null_KW)\n\n }\n\n _ => enum_value(p),\n\n }\n\n }\n\n Some(T!['[']) => list_value(p),\n\n Some(T!['{']) => object_value(p),\n\n _ => p.err(\"expected a valid Value\"),\n\n }\n\n}\n\n/// See: https://spec.graphql.org/October2021/#EnumValue\n\n///\n\n/// *EnumValue*:\n\n/// Name *but not* **true** *or* **false** *or* **null**\n\npub(crate) fn enum_value(p: &mut Parser) {\n", "file_path": "crates/apollo-parser/src/parser/grammar/value.rs", "rank": 79, "score": 13.223531428626735 }, { "content": "\n\n#[derive(Debug, Eq, PartialEq)]\n\npub(crate) enum Cardinality {\n\n Optional,\n\n Many,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub(crate) struct AstEnumSrc {\n\n pub(crate) doc: Vec<String>,\n\n pub(crate) name: String,\n\n pub(crate) traits: Vec<String>,\n\n pub(crate) variants: Vec<String>,\n\n}\n", "file_path": "xtask/src/ast_src.rs", "rank": 80, "score": 13.182149090289208 }, { "content": " \"mutation\",\n\n \"subscription\",\n\n \"schema\",\n\n \"interface\",\n\n];\n\n\n\n/// Name is useful to name different elements.\n\n///\n\n/// GraphQL Documents are full of named things: operations, fields, arguments, types, directives, fragments, and variables.\n\n/// All names must follow the same grammatical form.\n\n/// Names in GraphQL are case-sensitive. That is to say name, Name, and NAME all refer to different names.\n\n/// Underscores are significant, which means other_name and othername are two different names\n\n///\n\n/// Detailed documentation can be found in [GraphQL spec](https://spec.graphql.org/October2021/#Name).\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct Name {\n\n pub(crate) name: String,\n\n}\n\n\n\nimpl From<Name> for String {\n", "file_path": "crates/apollo-smith/src/name.rs", "rank": 81, "score": 13.1477616972425 }, { "content": " pub fn string_value(&self) -> Option<StringValue> { support::child(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct ImplementsInterfaces {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl ImplementsInterfaces {\n\n pub fn implements_token(&self) -> Option<SyntaxToken> {\n\n support::token(&self.syntax, S![implements])\n\n }\n\n pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![&]) }\n\n pub fn named_types(&self) -> AstChildren<NamedType> { support::children(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct FieldsDefinition {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl FieldsDefinition {\n\n pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S!['{']) }\n\n pub fn field_definitions(&self) -> AstChildren<FieldDefinition> {\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 82, "score": 13.145487528016947 }, { "content": "}\n\nimpl FragmentSpread {\n\n pub fn dotdotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![...]) }\n\n pub fn fragment_name(&self) -> Option<FragmentName> { support::child(&self.syntax) }\n\n pub fn directives(&self) -> Option<Directives> { support::child(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct InlineFragment {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl InlineFragment {\n\n pub fn dotdotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![...]) }\n\n pub fn type_condition(&self) -> Option<TypeCondition> { support::child(&self.syntax) }\n\n pub fn directives(&self) -> Option<Directives> { support::child(&self.syntax) }\n\n pub fn selection_set(&self) -> Option<SelectionSet> { support::child(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct Alias {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 83, "score": 13.143126806275045 }, { "content": "/// *FragmentSpread*:\n\n/// ... FragmentName Directives?\n\n///\n\n/// Detailed documentation can be found in [GraphQL spec](https://spec.graphql.org/October2021/#FragmentSpread).\n\n///\n\n/// ### Example\n\n/// ```rust\n\n/// use apollo_encoder::FragmentSpread;\n\n///\n\n/// let fragment = FragmentSpread::new(String::from(\"myFragment\"));\n\n/// assert_eq!(fragment.to_string(), r#\"...myFragment\"#);\n\n/// ```\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct FragmentSpread {\n\n name: String,\n\n directives: Vec<Directive>,\n\n}\n\n\n\nimpl FragmentSpread {\n\n /// Create a new instance of FragmentSpread\n", "file_path": "crates/apollo-encoder/src/fragment.rs", "rank": 84, "score": 13.128286573017746 }, { "content": "use crate::SyntaxKind;\n\n\n\n/// A node in the immutable tree. It has other nodes and tokens as children.\n\npub type SyntaxNode = rowan::SyntaxNode<GraphQLLanguage>;\n\n/// A leaf node in the AST.\n\npub type SyntaxToken = rowan::SyntaxToken<GraphQLLanguage>;\n\n/// A `SyntaxNode` or a `SyntaxToken`.\n\npub type SyntaxElement = rowan::SyntaxElement<GraphQLLanguage>;\n\n/// Children of a `SyntaxNode`.\n\npub type SyntaxNodeChildren = rowan::SyntaxNodeChildren<GraphQLLanguage>;\n\n\n\n/// A language implementation for use in `Rowan`.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub enum GraphQLLanguage {}\n\n\n\nimpl rowan::Language for GraphQLLanguage {\n\n type Kind = SyntaxKind;\n\n fn kind_from_raw(raw: rowan::SyntaxKind) -> Self::Kind {\n\n assert!(raw.0 <= (SyntaxKind::__LAST as u16));\n\n unsafe { std::mem::transmute::<u16, SyntaxKind>(raw.0) }\n\n }\n\n fn kind_to_raw(kind: Self::Kind) -> rowan::SyntaxKind {\n\n rowan::SyntaxKind(kind.into())\n\n }\n\n}\n", "file_path": "crates/apollo-parser/src/parser/language.rs", "rank": 85, "score": 13.124445283539266 }, { "content": " pub fn values(&self) -> AstChildren<Value> { support::children(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct ObjectValue {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl ObjectValue {\n\n pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S!['{']) }\n\n pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S!['}']) }\n\n pub fn object_fields(&self) -> AstChildren<ObjectField> { support::children(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct ObjectField {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl ObjectField {\n\n pub fn name(&self) -> Option<Name> { support::child(&self.syntax) }\n\n pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![:]) }\n\n pub fn value(&self) -> Option<Value> { support::child(&self.syntax) }\n\n}\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 86, "score": 13.084497169587149 }, { "content": "}\n\nimpl SelectionSet {\n\n pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S!['{']) }\n\n pub fn selections(&self) -> AstChildren<Selection> { support::children(&self.syntax) }\n\n pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S!['}']) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct Field {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl Field {\n\n pub fn alias(&self) -> Option<Alias> { support::child(&self.syntax) }\n\n pub fn name(&self) -> Option<Name> { support::child(&self.syntax) }\n\n pub fn arguments(&self) -> Option<Arguments> { support::child(&self.syntax) }\n\n pub fn directives(&self) -> Option<Directives> { support::child(&self.syntax) }\n\n pub fn selection_set(&self) -> Option<SelectionSet> { support::child(&self.syntax) }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct FragmentSpread {\n\n pub(crate) syntax: SyntaxNode,\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 87, "score": 13.076789708840991 }, { "content": " Value::NullValue(it) => it.syntax(),\n\n Value::EnumValue(it) => it.syntax(),\n\n Value::ListValue(it) => it.syntax(),\n\n Value::ObjectValue(it) => it.syntax(),\n\n }\n\n }\n\n}\n\nimpl From<NamedType> for Type {\n\n fn from(node: NamedType) -> Type { Type::NamedType(node) }\n\n}\n\nimpl From<ListType> for Type {\n\n fn from(node: ListType) -> Type { Type::ListType(node) }\n\n}\n\nimpl From<NonNullType> for Type {\n\n fn from(node: NonNullType) -> Type { Type::NonNullType(node) }\n\n}\n\nimpl AstNode for Type {\n\n fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, NAMED_TYPE | LIST_TYPE | NON_NULL_TYPE) }\n\n fn cast(syntax: SyntaxNode) -> Option<Self> {\n\n let res = match syntax.kind() {\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 88, "score": 13.038061807745112 }, { "content": "///\n\n/// assert_eq!(non_null.to_string(), \"[String]!\");\n\n/// ```\n\n#[derive(Debug, PartialEq, Clone)]\n\npub enum Type_ {\n\n /// The Non-Null field type.\n\n NonNull {\n\n /// Null inner type.\n\n ty: Box<Type_>,\n\n },\n\n /// The List field type.\n\n List {\n\n /// List inner type.\n\n ty: Box<Type_>,\n\n },\n\n /// The Named field type.\n\n NamedType {\n\n /// NamedType type.\n\n name: String,\n\n },\n", "file_path": "crates/apollo-encoder/src/field_value.rs", "rank": 89, "score": 12.884857985715907 }, { "content": " Boolean(bool),\n\n /// Null value example: `null`\n\n Null,\n\n /// Enum value example: `\"VARIANT_EXAMPLE\"`\n\n Enum(String),\n\n /// List value example: `[1, 2, 3]`\n\n List(Vec<Value>),\n\n /// Object value example: `{ first: 1, second: 2 }`\n\n Object(Vec<(String, Value)>),\n\n}\n\n\n\nimpl fmt::Display for Value {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Self::Variable(v) => write!(f, \"${v}\"),\n\n Self::Int(i) => write!(f, \"{i}\"),\n\n Self::Float(fl) => write!(f, \"{fl}\"),\n\n Self::String(s) => {\n\n if s.contains('\"') | s.contains('\\n') | s.contains('\\r') {\n\n write!(f, r#\"\"\"\"{s}\"\"\"\"#)\n", "file_path": "crates/apollo-encoder/src/value.rs", "rank": 90, "score": 12.75108715501701 }, { "content": " \"FIELD_DEFINITION\",\n\n \"ARGUMENTS_DEFINITION\",\n\n \"UNION_MEMBER_TYPES\",\n\n \"ENUM_VALUES_DEFINITION\",\n\n \"ENUM_VALUE_DEFINITION\",\n\n \"INPUT_FIELDS_DEFINITION\",\n\n \"INPUT_VALUE_DEFINITION\",\n\n \"DIRECTIVE_LOCATIONS\",\n\n \"DIRECTIVE_LOCATION\",\n\n \"EXECUTABLE_DIRECTIVE_LOCATION\",\n\n \"TYPE_SYSTEM_DIRECTIVE_LOCATION\",\n\n ],\n\n};\n\n\n\n// pub(crate) tokens is actually used once the code is generated.\n\n#[allow(dead_code)]\n\n#[derive(Default, Debug)]\n\npub(crate) struct AstSrc {\n\n pub(crate) tokens: Vec<String>,\n\n pub(crate) nodes: Vec<AstNodeSrc>,\n", "file_path": "xtask/src/ast_src.rs", "rank": 91, "score": 12.675504314282843 }, { "content": " __LAST,\n\n }\n\n use self::SyntaxKind::*;\n\n\n\n impl SyntaxKind {\n\n pub fn is_keyword(self) -> bool {\n\n matches!(self, #(#all_keywords)|*)\n\n }\n\n\n\n pub fn is_punct(self) -> bool {\n\n matches!(self, #(#punctuation)|*)\n\n }\n\n\n\n pub fn is_literal(self) -> bool {\n\n matches!(self, #(#literals)|*)\n\n }\n\n\n\n pub fn from_keyword(ident: &str) -> Option<SyntaxKind> {\n\n let kw = match ident {\n\n #(#full_keywords_values => #full_keywords,)*\n", "file_path": "xtask/src/codegen/gen_syntax_kinds.rs", "rank": 92, "score": 12.643665122579353 }, { "content": "pub mod syntax_kind;\n\nuse syntax_kind::SyntaxKind;\n\n\n\nimpl From<u16> for SyntaxKind {\n\n #[inline]\n\n fn from(d: u16) -> SyntaxKind {\n\n assert!(d <= (SyntaxKind::__LAST as u16));\n\n unsafe { std::mem::transmute::<u16, SyntaxKind>(d) }\n\n }\n\n}\n\n\n\nimpl From<SyntaxKind> for u16 {\n\n #[inline]\n\n fn from(k: SyntaxKind) -> u16 {\n\n k as u16\n\n }\n\n}\n\n\n\nimpl SyntaxKind {\n\n #[inline]\n\n pub fn is_trivia(self) -> bool {\n\n matches!(self, SyntaxKind::WHITESPACE | SyntaxKind::COMMENT)\n\n }\n\n}\n", "file_path": "crates/apollo-parser/src/parser/generated/mod.rs", "rank": 93, "score": 12.57035155526907 }, { "content": "impl SelectionSet {\n\n /// Create an instance of SelectionSet\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n\n\n /// Create an instance of SelectionSet given its selections\n\n pub fn with_selections(selections: Vec<Selection>) -> Self {\n\n Self { selections }\n\n }\n\n\n\n /// Add a selection in the SelectionSet\n\n pub fn selection(&mut self, selection: Selection) {\n\n self.selections.push(selection);\n\n }\n\n\n\n /// Should be used everywhere in this crate isntead of the Display implementation\n\n /// Display implementation is only useful as a public api\n\n pub(crate) fn format_with_indent(&self, mut indent_level: usize) -> String {\n\n let mut text = String::from(\"{\\n\");\n", "file_path": "crates/apollo-encoder/src/selection_set.rs", "rank": 94, "score": 12.491054997345191 }, { "content": "\n\n/// A wrapper around the SyntaxTreeBuilder used to self-close nodes.\n\n///\n\n/// When the NodeGuard goes out of scope, it automatically runs `finish_node()`\n\n/// on the SyntaxTreeBuilder. This ensures that nodes are not forgotten to be\n\n/// closed.\n\n#[must_use]\n\npub(crate) struct NodeGuard {\n\n builder: Rc<RefCell<SyntaxTreeBuilder>>,\n\n}\n\n\n\nimpl NodeGuard {\n\n fn new(builder: Rc<RefCell<SyntaxTreeBuilder>>) -> Self {\n\n Self { builder }\n\n }\n\n\n\n pub(crate) fn finish_node(self) {\n\n drop(self);\n\n }\n\n}\n\n\n\nimpl Drop for NodeGuard {\n\n fn drop(&mut self) {\n\n self.builder.borrow_mut().finish_node();\n\n }\n\n}\n", "file_path": "crates/apollo-parser/src/parser/mod.rs", "rank": 95, "score": 12.458205963665565 }, { "content": " pub fn new(name: String) -> Self {\n\n Self { name }\n\n }\n\n}\n\n\n\nimpl fmt::Display for TypeCondition {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"on {}\", self.name)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use indoc::indoc;\n\n\n\n use crate::{field::Field, Argument, Selection, Value};\n\n\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "crates/apollo-encoder/src/fragment.rs", "rank": 96, "score": 12.451762260410886 }, { "content": "/// assert_eq!(argument.to_string(), r#\"argName: \"value\"\"#);\n\n/// ```\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct Argument {\n\n name: String,\n\n value: Value,\n\n}\n\n\n\nimpl Argument {\n\n /// Create a new instance of Argument.\n\n pub fn new(name: String, value: Value) -> Self {\n\n Self { name, value }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Argument {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}: {}\", self.name, self.value)\n\n }\n\n}\n", "file_path": "crates/apollo-encoder/src/argument.rs", "rank": 97, "score": 12.421953000534964 }, { "content": "#[derive(Debug, PartialEq, Clone)]\n\npub struct Directive {\n\n name: String,\n\n args: Vec<Argument>,\n\n}\n\n\n\nimpl Directive {\n\n /// Create an instance of Directive\n\n pub fn new(name: String) -> Self {\n\n Self {\n\n name,\n\n args: Vec::new(),\n\n }\n\n }\n\n\n\n /// Add an argument to the directive\n\n pub fn arg(&mut self, arg: Argument) {\n\n self.args.push(arg);\n\n }\n\n}\n", "file_path": "crates/apollo-encoder/src/directive.rs", "rank": 98, "score": 12.412591181478698 }, { "content": " pub fn name(&self) -> Option<Name> { support::child(&self.syntax) }\n\n pub fn directives(&self) -> Option<Directives> { support::child(&self.syntax) }\n\n pub fn input_fields_definition(&self) -> Option<InputFieldsDefinition> {\n\n support::child(&self.syntax)\n\n }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct OperationType {\n\n pub(crate) syntax: SyntaxNode,\n\n}\n\nimpl OperationType {\n\n pub fn query_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, S![query]) }\n\n pub fn mutation_token(&self) -> Option<SyntaxToken> {\n\n support::token(&self.syntax, S![mutation])\n\n }\n\n pub fn subscription_token(&self) -> Option<SyntaxToken> {\n\n support::token(&self.syntax, S![subscription])\n\n }\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n", "file_path": "crates/apollo-parser/src/ast/generated/nodes.rs", "rank": 99, "score": 12.250780920325806 } ]
Rust
crates/store/src/state/mem_pool_state_db.rs
driftluo/godwoken
667a4bc435a9894b131cd804daf6403fd5cd4026
use crate::mem_pool_store::{ Value, MEM_POOL_COL_DATA, MEM_POOL_COL_SCRIPT, MEM_POOL_COL_SCRIPT_PREFIX, }; use crate::smt::mem_pool_smt_store::MemPoolSMTStore; use crate::{traits::KVStore, transaction::StoreTransaction}; use anyhow::Result; use gw_common::{error::Error as StateError, smt::SMT, state::State, H256}; use gw_db::error::Error; use gw_db::schema::{COLUMN_DATA, COLUMN_SCRIPT, COLUMN_SCRIPT_PREFIX}; use gw_traits::CodeStore; use gw_types::{ bytes::Bytes, packed::{self, AccountMerkleState}, prelude::*, }; use super::state_tracker::StateTracker; pub struct MemPoolStateTree<'a> { tree: SMT<MemPoolSMTStore<'a>>, account_count: u32, tracker: StateTracker, } impl<'a> MemPoolStateTree<'a> { pub fn new(tree: SMT<MemPoolSMTStore<'a>>, account_count: u32) -> Self { MemPoolStateTree { tree, account_count, tracker: Default::default(), } } pub fn tracker_mut(&mut self) -> &mut StateTracker { &mut self.tracker } pub fn get_merkle_state(&self) -> AccountMerkleState { AccountMerkleState::new_builder() .merkle_root(self.tree.root().pack()) .count(self.account_count.pack()) .build() } pub fn submit_tree_to_mem_block(&self) -> Result<(), Error> { self.db() .set_mem_block_account_smt_root(*self.tree.root()) .expect("set smt root"); self.db() .set_mem_block_account_count(self.account_count) .expect("set smt root"); Ok(()) } fn db(&self) -> &StoreTransaction { self.tree.store().inner_store() } } impl<'a> State for MemPoolStateTree<'a> { fn get_raw(&self, key: &H256) -> Result<H256, StateError> { self.tracker.touch_key(key); let v = self.tree.get(key)?; Ok(v) } fn update_raw(&mut self, key: H256, value: H256) -> Result<(), StateError> { self.tracker.touch_key(&key); self.tree.update(key, value)?; Ok(()) } fn get_account_count(&self) -> Result<u32, StateError> { Ok(self.account_count) } fn set_account_count(&mut self, count: u32) -> Result<(), StateError> { self.account_count = count; Ok(()) } fn calculate_root(&self) -> Result<H256, StateError> { let root = self.tree.root(); Ok(*root) } } impl<'a> CodeStore for MemPoolStateTree<'a> { fn insert_script(&mut self, script_hash: H256, script: packed::Script) { let mem_pool_store = self.db().mem_pool.load(); mem_pool_store.insert( MEM_POOL_COL_SCRIPT, script_hash.as_slice().to_vec().into(), Value::Exist(script.as_slice().to_vec().into()), ); mem_pool_store.insert( MEM_POOL_COL_SCRIPT_PREFIX, script_hash.as_slice()[..20].to_vec().into(), Value::Exist(script_hash.as_slice().to_vec().into()), ); } fn get_script(&self, script_hash: &H256) -> Option<packed::Script> { let mem_pool_store = self.db().mem_pool.load(); mem_pool_store .get(MEM_POOL_COL_SCRIPT, script_hash.as_slice()) .and_then(|v| v.to_opt()) .or_else(|| { self.db() .get(COLUMN_SCRIPT, script_hash.as_slice()) .map(Into::into) }) .map(|slice| packed::ScriptReader::from_slice_should_be_ok(slice.as_ref()).to_entity()) } fn get_script_hash_by_short_address(&self, script_hash_prefix: &[u8]) -> Option<H256> { let mem_pool_store = self.db().mem_pool.load(); match mem_pool_store .get(MEM_POOL_COL_SCRIPT_PREFIX, script_hash_prefix) .and_then(|v| v.to_opt()) .or_else(|| { self.db() .get(COLUMN_SCRIPT_PREFIX, script_hash_prefix) .map(Into::into) }) { Some(slice) => { let mut hash = [0u8; 32]; hash.copy_from_slice(slice.as_ref()); Some(hash.into()) } None => None, } } fn insert_data(&mut self, data_hash: H256, code: Bytes) { let mem_pool_store = self.db().mem_pool.load(); mem_pool_store.insert( MEM_POOL_COL_DATA, data_hash.as_slice().to_vec().into(), Value::Exist(code), ); } fn get_data(&self, data_hash: &H256) -> Option<Bytes> { let mem_pool_store = self.db().mem_pool.load(); mem_pool_store .get(MEM_POOL_COL_DATA, data_hash.as_slice()) .and_then(|v| v.to_opt()) .or_else(|| { self.db() .get(COLUMN_DATA, data_hash.as_slice()) .map(Into::into) }) .map(|slice| Bytes::from(slice.to_vec())) } }
use crate::mem_pool_store::{ Value, MEM_POOL_COL_DATA, MEM_POOL_COL_SCRIPT, MEM_POOL_COL_SCRIPT_PREFIX, }; use crate::smt::mem_pool_smt_store::MemPoolSMTStore; use crate::{traits::KVStore, transaction::StoreTransaction}; use anyhow::Result; use gw_common::{error::Error as StateError, smt::SMT, state::State, H256}; use gw_db::error::Error; use gw_db::schema::{COLUMN_DATA, COLUMN_SCRIPT, COLUMN_SCRIPT_PREFIX}; use gw_traits::CodeStore; use gw_types::{ bytes::Bytes, packed::{self, AccountMerkleState}, prelude::*, }; use super::state_tracker::StateTracker; pub struct MemPoolStateTree<'a> { tree: SMT<MemPoolSMTStore<'a>>, account_count: u32, tracker: StateTracker, } impl<'a> MemPoolStateTree<'a> { pub fn new(tree: SMT<MemPoolSMTStore<'a>>, account_count: u32) -> Self { MemPoolStateTree { tree, account_count, tracker: Default::default(), } } pub fn tracker_mut(&mut self) -> &mut StateTracker { &mut self.tracker } pub fn get_merkle_state(&self) -> AccountMerkleState { AccountMerkleState::new_builder() .merkle_root(self.tree.root().pack()) .count(self.account_count.pack()) .build() } pub fn submit_tree_to_mem_block(&self) -> Result<(), Error> { self.db() .set_mem_block_account_smt_root(*self.tree.root()) .expect("set smt root"); self.db() .set_mem_block_account_count(self.account_count) .expect("set smt root"); Ok(()) } fn db(&self) -> &StoreTransaction { self.tree.store().inner_store() } } impl<'a> State for MemPoolStateTree<'a> { fn get_raw(&self, key: &H256) -> Result<H256, StateError> { self.tracker.touch_key(key); let v = self.tree.get(key)?; Ok(v) } fn update_raw(&mut self, key: H256, value: H256) -> Result<(), StateError> { self.tracker.touch_key(&key); self.tree.update(key, value)?; Ok(()) } fn get_account_count(&self) -> Result<u32, StateError> { Ok(self.account_count) } fn set_account_count(&mut self, count: u32) -> Result<(), StateError> { self.account_count = count; Ok(()) } fn calculate_root(&self) -> Result<H256, StateError> { let root = self.tree.root(); Ok(*root) } } impl<'a> CodeStore for MemPoolStateTree<'a> { fn insert_script(&mut self, script_hash: H256, script: packed::Script) { let mem_pool_store = self.db().mem_pool.load(); mem_pool_store.insert( MEM_POOL_COL_SCRIPT, script_hash.as_slice().to_vec().into(), Value::Exist(script.as_slice().to_vec().into()), ); mem_pool_store.insert( MEM_POOL_COL_SCRIPT_PREFIX, script_hash.as_slice()[..20].to_vec().into(), Value::Exist(script_hash.as_slice().to_vec().into()), ); }
fn get_script_hash_by_short_address(&self, script_hash_prefix: &[u8]) -> Option<H256> { let mem_pool_store = self.db().mem_pool.load(); match mem_pool_store .get(MEM_POOL_COL_SCRIPT_PREFIX, script_hash_prefix) .and_then(|v| v.to_opt()) .or_else(|| { self.db() .get(COLUMN_SCRIPT_PREFIX, script_hash_prefix) .map(Into::into) }) { Some(slice) => { let mut hash = [0u8; 32]; hash.copy_from_slice(slice.as_ref()); Some(hash.into()) } None => None, } } fn insert_data(&mut self, data_hash: H256, code: Bytes) { let mem_pool_store = self.db().mem_pool.load(); mem_pool_store.insert( MEM_POOL_COL_DATA, data_hash.as_slice().to_vec().into(), Value::Exist(code), ); } fn get_data(&self, data_hash: &H256) -> Option<Bytes> { let mem_pool_store = self.db().mem_pool.load(); mem_pool_store .get(MEM_POOL_COL_DATA, data_hash.as_slice()) .and_then(|v| v.to_opt()) .or_else(|| { self.db() .get(COLUMN_DATA, data_hash.as_slice()) .map(Into::into) }) .map(|slice| Bytes::from(slice.to_vec())) } }
fn get_script(&self, script_hash: &H256) -> Option<packed::Script> { let mem_pool_store = self.db().mem_pool.load(); mem_pool_store .get(MEM_POOL_COL_SCRIPT, script_hash.as_slice()) .and_then(|v| v.to_opt()) .or_else(|| { self.db() .get(COLUMN_SCRIPT, script_hash.as_slice()) .map(Into::into) }) .map(|slice| packed::ScriptReader::from_slice_should_be_ok(slice.as_ref()).to_entity()) }
function_block-full_function
[ { "content": "// Calculate compacted account root\n\npub fn calculate_state_checkpoint(root: &H256, count: u32) -> H256 {\n\n let mut hash = [0u8; 32];\n\n let mut hasher = new_blake2b();\n\n hasher.update(root.as_slice());\n\n hasher.update(&count.to_le_bytes());\n\n hasher.finalize(&mut hash);\n\n hash.into()\n\n}\n\n\n", "file_path": "crates/common/src/merkle_utils.rs", "rank": 0, "score": 485035.1151312861 }, { "content": "pub fn build_account_key(id: u32, key: &[u8]) -> H256 {\n\n let mut raw_key = [0u8; 32];\n\n let mut hasher = new_blake2b();\n\n hasher.update(&id.to_le_bytes());\n\n hasher.update(&[GW_ACCOUNT_KV_TYPE]);\n\n hasher.update(key);\n\n hasher.finalize(&mut raw_key);\n\n raw_key.into()\n\n}\n\n\n", "file_path": "crates/common/src/state.rs", "rank": 1, "score": 453369.5525780851 }, { "content": "pub fn build_script_hash_to_account_id_key(script_hash: &[u8]) -> H256 {\n\n let mut key: [u8; 32] = H256::zero().into();\n\n let mut hasher = new_blake2b();\n\n hasher.update(&GW_NON_ACCOUNT_PLACEHOLDER);\n\n hasher.update(&[GW_SCRIPT_HASH_TO_ID_TYPE]);\n\n hasher.update(script_hash);\n\n hasher.finalize(&mut key);\n\n key.into()\n\n}\n\n\n", "file_path": "crates/common/src/state.rs", "rank": 2, "score": 429969.20202992077 }, { "content": "pub fn build_account_field_key(id: u32, type_: u8) -> H256 {\n\n let mut key: [u8; 32] = H256::zero().into();\n\n key[..size_of::<u32>()].copy_from_slice(&id.to_le_bytes());\n\n key[size_of::<u32>()] = type_;\n\n key.into()\n\n}\n\n\n", "file_path": "crates/common/src/state.rs", "rank": 3, "score": 407835.52968074795 }, { "content": "/// Compute merkle root from vectors\n\npub fn calculate_merkle_root(leaves: Vec<H256>) -> Result<H256, Error> {\n\n if leaves.is_empty() {\n\n return Ok(H256::zero());\n\n }\n\n let mut tree = SMT::<DefaultStore<H256>>::default();\n\n for (i, leaf) in leaves.into_iter().enumerate() {\n\n tree.update(H256::from_u32(i as u32), leaf)?;\n\n }\n\n Ok(*tree.root())\n\n}\n\n\n\npub struct MergeH256;\n\n\n\nimpl Merge for MergeH256 {\n\n type Item = H256;\n\n fn merge(left: &Self::Item, right: &Self::Item) -> Self::Item {\n\n let mut hash = [0u8; 32];\n\n let mut blake2b = new_blake2b();\n\n\n\n blake2b.update(left.as_slice());\n\n blake2b.update(right.as_slice());\n\n blake2b.finalize(&mut hash);\n\n hash.into()\n\n }\n\n}\n\n\n\npub type CBMT = ExCBMT<H256, MergeH256>;\n\npub type CBMTMerkleProof = ExMerkleProof<H256, MergeH256>;\n\n\n", "file_path": "crates/common/src/merkle_utils.rs", "rank": 4, "score": 402468.53314964275 }, { "content": "pub fn calculate_ckb_merkle_root(leaves: Vec<H256>) -> Result<H256, Error> {\n\n let tree = CBMT::build_merkle_tree(&leaves);\n\n Ok(tree.root())\n\n}\n\n\n", "file_path": "crates/common/src/merkle_utils.rs", "rank": 5, "score": 397741.5684583661 }, { "content": "fn get_script(state: &MemStateTree<'_>, account_id: u32) -> Result<Script> {\n\n let script_hash = state.get_script_hash(account_id)?;\n\n state\n\n .get_script(&script_hash)\n\n .ok_or_else(|| anyhow!(\"tx script not found\"))\n\n}\n\n\n", "file_path": "crates/challenge/src/offchain/mock_block.rs", "rank": 6, "score": 396298.097652869 }, { "content": "pub fn build_short_script_hash_to_script_hash_key(short_script_hash: &[u8]) -> H256 {\n\n let mut key: [u8; 32] = H256::zero().into();\n\n let mut hasher = new_blake2b();\n\n hasher.update(&GW_NON_ACCOUNT_PLACEHOLDER);\n\n hasher.update(&[GW_SHORT_SCRIPT_HASH_TO_SCRIPT_HASH_TYPE]);\n\n let len = short_script_hash.len() as u32;\n\n hasher.update(&len.to_le_bytes());\n\n hasher.update(short_script_hash);\n\n hasher.finalize(&mut key);\n\n key.into()\n\n}\n\n\n", "file_path": "crates/common/src/state.rs", "rank": 7, "score": 396208.7319019262 }, { "content": "pub fn build_data_hash_key(data_hash: &[u8]) -> H256 {\n\n let mut key: [u8; 32] = H256::zero().into();\n\n let mut hasher = new_blake2b();\n\n hasher.update(&GW_NON_ACCOUNT_PLACEHOLDER);\n\n hasher.update(&[GW_DATA_HASH_TYPE]);\n\n hasher.update(data_hash);\n\n hasher.finalize(&mut key);\n\n key.into()\n\n}\n\n\n", "file_path": "crates/common/src/state.rs", "rank": 8, "score": 353954.16571081313 }, { "content": "/// NOTE: the length `20` is a hard-coded value, may be `16` for some LockAlgorithm.\n\npub fn to_short_address(script_hash: &H256) -> &[u8] {\n\n &script_hash.as_slice()[0..20]\n\n}\n\n\n\npub struct PrepareWithdrawalRecord {\n\n pub withdrawal_lock_hash: H256,\n\n pub amount: u128,\n\n pub block_number: u64,\n\n}\n\n\n", "file_path": "crates/common/src/state.rs", "rank": 9, "score": 348316.8686994063 }, { "content": "pub fn build_sudt_key(key_flag: u32, short_address: &[u8]) -> Vec<u8> {\n\n let mut key = Vec::with_capacity(short_address.len() + 8);\n\n key.extend(&key_flag.to_le_bytes());\n\n key.extend(&(short_address.len() as u32).to_le_bytes());\n\n key.extend(short_address);\n\n key\n\n}\n\n\n", "file_path": "crates/common/src/state.rs", "rank": 10, "score": 337564.84370561654 }, { "content": "pub fn build_l2_sudt_script(rollup_context: &RollupContext, l1_sudt_script_hash: &H256) -> Script {\n\n let args = {\n\n let mut args = Vec::with_capacity(64);\n\n args.extend(rollup_context.rollup_script_hash.as_slice());\n\n args.extend(l1_sudt_script_hash.as_slice());\n\n Bytes::from(args)\n\n };\n\n Script::new_builder()\n\n .args(args.pack())\n\n .code_hash(\n\n rollup_context\n\n .rollup_config\n\n .l2_sudt_validator_script_type_hash(),\n\n )\n\n .hash_type(ScriptHashType::Type.into())\n\n .build()\n\n}\n", "file_path": "crates/generator/src/sudt.rs", "rank": 11, "score": 321973.4643358217 }, { "content": "fn build_merkle_proof(leaves: &[H256], indices: &[u32]) -> Result<CKBMerkleProof> {\n\n let proof = CBMT::build_merkle_proof(leaves, indices)\n\n .ok_or_else(|| anyhow!(\"Build merkle proof failed.\"))?;\n\n let proof = CKBMerkleProof::new_builder()\n\n .lemmas(proof.lemmas().pack())\n\n .indices(proof.indices().pack())\n\n .build();\n\n Ok(proof)\n\n}\n\n\n", "file_path": "crates/challenge/src/context.rs", "rank": 12, "score": 316674.09586659283 }, { "content": "fn build_cbmt_merkle_proof(leaves: &[H256], leaf_indices: &[u32]) -> Result<CKBMerkleProof> {\n\n let proof = CBMT::build_merkle_proof(leaves, leaf_indices)\n\n .ok_or_else(|| anyhow!(\"build cbmt proof fail\"))?;\n\n\n\n Ok(CKBMerkleProof::new_builder()\n\n .lemmas(proof.lemmas().pack())\n\n .indices(proof.indices().pack())\n\n .build())\n\n}\n", "file_path": "crates/challenge/src/offchain/mock_block.rs", "rank": 13, "score": 301450.1915512554 }, { "content": "pub fn l2_script_hash_to_short_address(script_hash: &H256) -> GwBytes {\n\n let short_address = &script_hash.as_bytes()[..20];\n\n\n\n GwBytes::from(short_address.to_vec())\n\n}\n\n\n", "file_path": "crates/tools/src/account.rs", "rank": 14, "score": 301328.42150877044 }, { "content": "pub fn global_state_from_slice(slice: &[u8]) -> Result<GlobalState, VerificationError> {\n\n match GlobalState::from_slice(slice) {\n\n Ok(state) => Ok(state),\n\n Err(_) => GlobalStateV0::from_slice(slice).map(Into::into),\n\n }\n\n}\n\n\n\nimpl From<MemBlock> for CompactMemBlock {\n\n fn from(block: MemBlock) -> Self {\n\n CompactMemBlock::new_builder()\n\n .txs(block.txs())\n\n .withdrawals(block.withdrawals())\n\n .deposits(block.deposits())\n\n .build()\n\n }\n\n}\n\n\n\nimpl CompactMemBlock {\n\n pub fn from_full_compatible_slice(slice: &[u8]) -> Result<CompactMemBlock, VerificationError> {\n\n match CompactMemBlock::from_slice(slice) {\n\n Ok(block) => Ok(block),\n\n Err(_) => MemBlock::from_slice(slice).map(Into::into),\n\n }\n\n }\n\n}\n", "file_path": "crates/types/src/offchain/extension.rs", "rank": 15, "score": 299558.4403817676 }, { "content": "/// blake2b(index(u32) | item_hash)\n\npub fn ckb_merkle_leaf_hash(index: u32, item_hash: &H256) -> H256 {\n\n let mut hasher = new_blake2b();\n\n hasher.update(&index.to_le_bytes());\n\n hasher.update(item_hash.as_slice());\n\n let mut buf = [0u8; 32];\n\n hasher.finalize(&mut buf);\n\n buf.into()\n\n}\n\n\n\nmod tests {\n\n\n\n #[test]\n\n fn merkle_proof_test() {\n\n let leaves: Vec<crate::smt::H256> = vec![\n\n [0u8; 32].into(),\n\n [1u8; 32].into(),\n\n [2u8; 32].into(),\n\n [3u8; 32].into(),\n\n [4u8; 32].into(),\n\n ];\n", "file_path": "crates/common/src/merkle_utils.rs", "rank": 16, "score": 295249.68005087366 }, { "content": "fn load_data_h256<Mac: SupportMachine>(machine: &mut Mac, addr: u64) -> Result<H256, VMError> {\n\n let mut data = [0u8; 32];\n\n for (i, c) in data.iter_mut().enumerate() {\n\n *c = machine\n\n .memory_mut()\n\n .load8(&Mac::REG::from_u64(addr).overflowing_add(&Mac::REG::from_u64(i as u64)))?\n\n .to_u8();\n\n }\n\n Ok(H256::from(data))\n\n}\n\n\n", "file_path": "crates/generator/src/syscalls/mod.rs", "rank": 17, "score": 291639.8329903508 }, { "content": "pub fn eth_sign(msg: &H256, privkey: H256) -> Result<[u8; 65], String> {\n\n let mut signature = sign_message(msg, privkey)?;\n\n let v = &mut signature[64];\n\n if *v >= 27 {\n\n *v -= 27;\n\n }\n\n Ok(signature)\n\n}\n\n\n", "file_path": "crates/tools/src/account.rs", "rank": 18, "score": 284997.1132239585 }, { "content": "pub fn read_privkey(privkey_path: &Path) -> Result<H256, String> {\n\n let privkey_string = fs::read_to_string(privkey_path)\n\n .map_err(|err| err.to_string())?\n\n .split_whitespace()\n\n .next()\n\n .map(ToOwned::to_owned)\n\n .ok_or_else(|| \"Privkey file is empty\".to_string())?;\n\n let privkey = H256::from_str(privkey_string.trim().trim_start_matches(\"0x\"))\n\n .map_err(|err| err.to_string())?;\n\n Ok(privkey)\n\n}\n", "file_path": "crates/tools/src/account.rs", "rank": 19, "score": 279866.8180628284 }, { "content": "/// Build genesis block\n\npub fn build_genesis(config: &GenesisConfig, secp_data: Bytes) -> Result<GenesisWithGlobalState> {\n\n let store = Store::open_tmp()?;\n\n let db = store.begin_transaction();\n\n build_genesis_from_store(db, config, secp_data)\n\n .map(|(_db, genesis_with_state)| genesis_with_state)\n\n}\n\n\n\npub struct GenesisWithGlobalState {\n\n pub genesis: L2Block,\n\n pub global_state: GlobalState,\n\n}\n\n\n", "file_path": "crates/generator/src/genesis.rs", "rank": 20, "score": 277606.475497161 }, { "content": "pub fn privkey_to_eth_address(privkey: &H256) -> Result<CKBBytes, String> {\n\n let privkey = secp256k1::SecretKey::from_slice(privkey.as_bytes())\n\n .map_err(|err| format!(\"Invalid secp256k1 secret key format, error: {}\", err))?;\n\n let pubkey = secp256k1::PublicKey::from_secret_key(&SECP256K1, &privkey);\n\n let pubkey_hash = {\n\n let mut hasher = Keccak256::new();\n\n hasher.update(&pubkey.serialize_uncompressed()[1..]);\n\n let buf = hasher.finalize();\n\n let mut pubkey_hash = [0u8; 20];\n\n pubkey_hash.copy_from_slice(&buf[12..]);\n\n pubkey_hash\n\n };\n\n let s = CKBBytes::from(pubkey_hash.to_vec());\n\n Ok(s)\n\n}\n\n\n", "file_path": "crates/tools/src/account.rs", "rank": 21, "score": 276396.2855375266 }, { "content": "pub fn branch_key_to_vec(key: &BranchKey) -> Vec<u8> {\n\n let mut ret = Vec::with_capacity(33);\n\n ret.extend_from_slice(key.node_key.as_slice());\n\n ret.extend_from_slice(&[key.height]);\n\n ret\n\n}\n\n\n", "file_path": "crates/store/src/smt/serde.rs", "rank": 22, "score": 275807.1630050981 }, { "content": "pub fn store_data<Mac: SupportMachine>(machine: &mut Mac, data: &[u8]) -> Result<u64, VMError> {\n\n let addr = machine.registers()[A0].to_u64();\n\n let size_addr = machine.registers()[A1].clone();\n\n let data_len = data.len() as u64;\n\n let offset = cmp::min(data_len, machine.registers()[A2].to_u64());\n\n\n\n let size = machine.memory_mut().load64(&size_addr)?.to_u64();\n\n let full_size = data_len - offset;\n\n let real_size = cmp::min(size, full_size);\n\n machine\n\n .memory_mut()\n\n .store64(&size_addr, &Mac::REG::from_u64(full_size))?;\n\n machine\n\n .memory_mut()\n\n .store_bytes(addr, &data[offset as usize..(offset + real_size) as usize])?;\n\n Ok(real_size)\n\n}\n\n\n\nimpl<'a, S: State, C: ChainView, Mac: SupportMachine> Syscalls<Mac> for L2Syscalls<'a, S, C> {\n\n fn initialize(&mut self, _machine: &mut Mac) -> Result<(), VMError> {\n", "file_path": "crates/generator/src/syscalls/mod.rs", "rank": 23, "score": 273301.0571175882 }, { "content": "pub fn bench_ckb_transfer(c: &mut Criterion) {\n\n let config = StoreConfig {\n\n path: \"./smt_data/db\".parse().unwrap(),\n\n options_file: Some(\"./smt_data/db.toml\".parse().unwrap()),\n\n cache_size: Some(1073741824),\n\n ..Default::default()\n\n };\n\n let store = Store::new(RocksDB::open(&config, COLUMNS));\n\n let ee = BenchExecutionEnvironment::new_with_accounts(store, 7000);\n\n\n\n let mut group = c.benchmark_group(\"ckb_transfer\");\n\n for txs in (500..=5000).step_by(500) {\n\n group.sample_size(10);\n\n group.throughput(Throughput::Elements(txs));\n\n group.bench_with_input(BenchmarkId::from_parameter(txs), &txs, |b, txs| {\n\n b.iter(|| {\n\n ee.accounts_transfer(7000, *txs as usize);\n\n });\n\n });\n\n }\n\n group.finish();\n\n}\n\n\n", "file_path": "crates/benches/benches/benchmarks/smt.rs", "rank": 24, "score": 272313.6375328945 }, { "content": "pub fn account_script_hash_to_eth_address(account_script_hash: H256) -> [u8; 20] {\n\n let mut data = [0u8; 20];\n\n data.copy_from_slice(&account_script_hash.as_slice()[0..20]);\n\n data\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum GwLog {\n\n SudtTransfer {\n\n sudt_id: u32,\n\n from_address: [u8; 20],\n\n to_address: [u8; 20],\n\n amount: u128,\n\n },\n\n SudtPayFee {\n\n sudt_id: u32,\n\n from_address: [u8; 20],\n\n block_producer_address: [u8; 20],\n\n amount: u128,\n\n },\n", "file_path": "crates/web3-indexer/src/helper.rs", "rank": 25, "score": 262706.76519917394 }, { "content": "fn build_block(state: &impl State, block_number: u64, prev_txs_state_checkpoint: H256) -> L2Block {\n\n let post_state = AccountMerkleState::new_builder()\n\n .merkle_root(state.calculate_root().unwrap().pack())\n\n .count(state.get_account_count().unwrap().pack())\n\n .build();\n\n L2Block::new_builder()\n\n .raw(\n\n RawL2Block::new_builder()\n\n .number(block_number.pack())\n\n .post_account(post_state)\n\n .submit_transactions(\n\n SubmitTransactions::new_builder()\n\n .prev_state_checkpoint(prev_txs_state_checkpoint.pack())\n\n .build(),\n\n )\n\n .build(),\n\n )\n\n .build()\n\n}\n\n\n", "file_path": "crates/store/src/tests/state_db.rs", "rank": 26, "score": 258816.46524483553 }, { "content": "pub fn build_finalized_custodian_lock(rollup_context: &RollupContext) -> Script {\n\n let rollup_type_hash = rollup_context.rollup_script_hash.as_slice().iter();\n\n let custodian_lock_args = CustodianLockArgs::default();\n\n\n\n let args: Bytes = rollup_type_hash\n\n .chain(custodian_lock_args.as_slice().iter())\n\n .cloned()\n\n .collect();\n\n\n\n Script::new_builder()\n\n .code_hash(rollup_context.rollup_config.custodian_script_type_hash())\n\n .hash_type(ScriptHashType::Type.into())\n\n .args(args.pack())\n\n .build()\n\n}\n\n\n", "file_path": "crates/mem-pool/src/custodian.rs", "rank": 27, "score": 258081.33841956046 }, { "content": "pub fn to_h256(input: &str) -> Result<[u8; 32]> {\n\n let input = hex::decode(input.trim_start_matches(\"0x\"))?;\n\n if input.len() != 32 {\n\n bail!(\"invalid input len: {}\", input.len());\n\n }\n\n let mut buf = [0u8; 32];\n\n buf.copy_from_slice(&input);\n\n Ok(buf)\n\n}\n", "file_path": "crates/tools/src/utils/cli_args.rs", "rank": 28, "score": 257511.15330599967 }, { "content": "// Get max mature block number\n\npub fn get_max_mature_number(rpc_client: &mut HttpRpcClient) -> Result<u64, String> {\n\n let tip_epoch = rpc_client\n\n .get_tip_header()\n\n .map(|header| EpochNumberWithFraction::from_full_value(header.inner.epoch.0))?;\n\n let tip_epoch_number = tip_epoch.number();\n\n if tip_epoch_number < 4 {\n\n // No cellbase live cell is mature\n\n Ok(0)\n\n } else {\n\n let max_mature_epoch = rpc_client\n\n .get_epoch_by_number(tip_epoch_number - 4)?\n\n .ok_or_else(|| \"Can not get epoch less than current epoch number\".to_string())?;\n\n let start_number = max_mature_epoch.start_number;\n\n let length = max_mature_epoch.length;\n\n Ok(calc_max_mature_number(\n\n tip_epoch,\n\n Some((start_number, length)),\n\n CELLBASE_MATURITY,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "crates/tools/src/deploy_genesis.rs", "rank": 29, "score": 255088.07889839628 }, { "content": "pub fn get_network_type(rpc_client: &mut HttpRpcClient) -> Result<NetworkType, String> {\n\n let chain_info = rpc_client.get_blockchain_info()?;\n\n NetworkType::from_raw_str(chain_info.chain.as_str())\n\n .ok_or_else(|| format!(\"Unexpected network type: {}\", chain_info.chain))\n\n}\n\n\n", "file_path": "crates/tools/src/utils/transaction.rs", "rank": 30, "score": 255088.07889839628 }, { "content": "pub fn get_balance(godwoken_rpc_url: &str, account: &str, sudt_id: u32) -> Result<(), String> {\n\n let mut godwoken_rpc_client = GodwokenRpcClient::new(godwoken_rpc_url);\n\n let short_address = parse_account_short_address(&mut godwoken_rpc_client, account)?;\n\n let addr = JsonBytes::from_bytes(short_address);\n\n let balance = godwoken_rpc_client.get_balance(addr, sudt_id)?;\n\n log::info!(\"Balance: {}\", balance);\n\n\n\n Ok(())\n\n}\n", "file_path": "crates/tools/src/get_balance.rs", "rank": 31, "score": 245059.70190225082 }, { "content": "fn random_always_success_script(rollup_script_hash: &H256) -> Script {\n\n let random_bytes: [u8; 32] = rand::random();\n\n Script::new_builder()\n\n .code_hash(ALWAYS_SUCCESS_CODE_HASH.clone().pack())\n\n .hash_type(ScriptHashType::Type.into())\n\n .args({\n\n let mut args = rollup_script_hash.as_slice().to_vec();\n\n args.extend_from_slice(&random_bytes);\n\n args.pack()\n\n })\n\n .build()\n\n}\n\n\n", "file_path": "crates/tests/src/tests/restore_mem_block.rs", "rank": 32, "score": 240553.38189798692 }, { "content": "fn random_always_success_script(rollup_script_hash: &H256) -> Script {\n\n let random_bytes: [u8; 32] = rand::random();\n\n Script::new_builder()\n\n .code_hash(ALWAYS_SUCCESS_CODE_HASH.clone().pack())\n\n .hash_type(ScriptHashType::Type.into())\n\n .args({\n\n let mut args = rollup_script_hash.as_slice().to_vec();\n\n args.extend_from_slice(&random_bytes);\n\n args.pack()\n\n })\n\n .build()\n\n}\n\n\n", "file_path": "crates/tests/src/tests/mem_block_repackage.rs", "rank": 33, "score": 240553.38189798692 }, { "content": "fn privkey_to_lock_hash(privkey: &H256) -> Result<H256, String> {\n\n let privkey = secp256k1::SecretKey::from_slice(privkey.as_bytes())\n\n .map_err(|err| format!(\"Invalid secp256k1 secret key format, error: {}\", err))?;\n\n let pubkey = secp256k1::PublicKey::from_secret_key(&SECP256K1, &privkey);\n\n let address_payload = AddressPayload::from_pubkey(&pubkey);\n\n\n\n let lock_hash: H256 = CKBScript::from(&address_payload)\n\n .calc_script_hash()\n\n .unpack();\n\n Ok(lock_hash)\n\n}\n\n\n", "file_path": "crates/tools/src/deposit_ckb.rs", "rank": 34, "score": 239737.156020371 }, { "content": "fn h256_to_byte32(hash: &H256) -> Result<Byte32, String> {\n\n let value = Byte32::from_slice(hash.as_bytes()).map_err(|err| err.to_string())?;\n\n Ok(value)\n\n}\n\n\n", "file_path": "crates/tools/src/withdraw.rs", "rank": 35, "score": 233497.18911979004 }, { "content": "pub fn deploy_scripts(\n\n privkey_path: &Path,\n\n ckb_rpc_url: &str,\n\n scripts_result: &BuildScriptsResult,\n\n) -> Result<ScriptsDeploymentResult, String> {\n\n if let Err(err) = run_cmd(vec![\"--version\"]) {\n\n return Err(format!(\n\n \"Please install ckb-cli (cargo install ckb-cli) first: {}\",\n\n err\n\n ));\n\n }\n\n\n\n let mut rpc_client = HttpRpcClient::new(ckb_rpc_url.to_string());\n\n let network_type = get_network_type(&mut rpc_client)?;\n\n let target_lock = packed::Script::from(scripts_result.lock.clone());\n\n let address_payload = AddressPayload::from(target_lock.clone());\n\n let target_address = Address::new(network_type, address_payload);\n\n\n\n let mut total_file_size = 0;\n\n for path in &[\n", "file_path": "crates/tools/src/deploy_scripts.rs", "rank": 36, "score": 233029.63693299546 }, { "content": "pub fn prepare_scripts(\n\n mode: ScriptsBuildMode,\n\n scripts_lock: ckb_jsonrpc_types::Script,\n\n build_config_path: &Path,\n\n build_dir: &Path,\n\n scripts_output_dir: &Path,\n\n) -> Result<BuildScriptsResult> {\n\n let scripts_build_config = read_script_build_config(build_config_path);\n\n match mode {\n\n ScriptsBuildMode::Build => {\n\n prepare_scripts_in_build_mode(&scripts_build_config, build_dir, scripts_output_dir);\n\n }\n\n ScriptsBuildMode::Copy => {\n\n prepare_scripts_in_copy_mode(scripts_build_config.prebuild_image, scripts_output_dir);\n\n }\n\n }\n\n check_scripts(scripts_output_dir, &scripts_build_config.scripts);\n\n generate_script_deploy_config(\n\n scripts_output_dir,\n\n scripts_lock,\n\n &scripts_build_config.scripts,\n\n )\n\n}\n\n\n", "file_path": "crates/tools/src/prepare_scripts.rs", "rank": 37, "score": 233029.63693299546 }, { "content": "pub fn bench(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"throughput\");\n\n group.throughput(Throughput::Elements(1u64));\n\n group.bench_function(\"sudt\", move |b| {\n\n b.iter_batched(\n\n || {\n\n let mut tree = DummyState::default();\n\n\n\n let rollup_config = RollupConfig::new_builder()\n\n .l2_sudt_validator_script_type_hash(\n\n DUMMY_SUDT_VALIDATOR_SCRIPT_TYPE_HASH.pack(),\n\n )\n\n .build();\n\n\n\n let init_a_balance: u128 = 10000;\n\n\n\n // init accounts\n\n let sudt_id = tree\n\n .create_account_from_script(\n\n Script::new_builder()\n", "file_path": "crates/benches/benches/benchmarks/sudt.rs", "rank": 38, "score": 231880.9579390171 }, { "content": "// Build proof with ckb merkle tree.\n\nfn build_tx_proof(block: &L2Block, tx_index: u32) -> Result<(L2Transaction, CKBMerkleProof)> {\n\n let mut target_tx = None;\n\n let leaves: Vec<H256> = block\n\n .transactions()\n\n .into_iter()\n\n .enumerate()\n\n .map(|(idx, tx)| {\n\n let hash: H256 = tx.witness_hash().into();\n\n if idx == tx_index as usize {\n\n target_tx = Some(tx);\n\n }\n\n ckb_merkle_leaf_hash(idx as u32, &hash)\n\n })\n\n .collect();\n\n let tx = target_tx.ok_or_else(|| anyhow!(\"tx not found in block\"))?;\n\n let proof = build_merkle_proof(&leaves, &[tx_index])?;\n\n Ok((tx, proof))\n\n}\n\n\n", "file_path": "crates/challenge/src/context.rs", "rank": 39, "score": 231195.23488765146 }, { "content": "pub fn setup_chain(rollup_type_script: Script) -> Chain {\n\n let mut account_lock_manage = AccountLockManage::default();\n\n let rollup_config = RollupConfig::new_builder()\n\n .allowed_eoa_type_hashes(vec![*ALWAYS_SUCCESS_CODE_HASH].pack())\n\n .finality_blocks(DEFAULT_FINALITY_BLOCKS.pack())\n\n .build();\n\n account_lock_manage\n\n .register_lock_algorithm((*ALWAYS_SUCCESS_CODE_HASH).into(), Box::new(AlwaysSuccess));\n\n let mut chain = setup_chain_with_account_lock_manage(\n\n rollup_type_script,\n\n rollup_config,\n\n account_lock_manage,\n\n None,\n\n None,\n\n None,\n\n );\n\n chain.complete_initial_syncing().unwrap();\n\n chain\n\n}\n\n\n", "file_path": "crates/tests/src/testing_tool/chain.rs", "rank": 40, "score": 231158.22014698124 }, { "content": "pub fn bench(c: &mut Criterion) {\n\n c.bench_function(\"db init\", |b| b.iter(|| Store::open_tmp().unwrap()));\n\n}\n\n\n\ncriterion_group! {\n\n name = init_db;\n\n config = Criterion::default().sample_size(10);\n\n targets = bench\n\n}\n", "file_path": "crates/benches/benches/benchmarks/init_db.rs", "rank": 41, "score": 228814.79276562613 }, { "content": "fn sign_message(msg: &H256, privkey_data: H256) -> Result<[u8; 65], String> {\n\n let privkey = Privkey::from(privkey_data);\n\n let signature = privkey\n\n .sign_recoverable(msg)\n\n .map_err(|err| err.to_string())?;\n\n let mut inner = [0u8; 65];\n\n inner.copy_from_slice(&signature.serialize());\n\n Ok(inner)\n\n}\n\n\n", "file_path": "crates/tools/src/account.rs", "rank": 42, "score": 223583.3773978409 }, { "content": "pub fn detach_chain(ctx: ChainContext) -> Result<()> {\n\n let ChainContext {\n\n chain: _,\n\n from_store: _,\n\n local_store,\n\n } = ctx;\n\n let tip = local_store.get_tip_block()?;\n\n let mut number = tip.raw().number().unpack();\n\n let hash: Byte32 = tip.hash().pack();\n\n println!(\"Detach from block: #{} {}\", number, hash);\n\n\n\n // query next block\n\n while number > 0 {\n\n let db = local_store.begin_transaction();\n\n let detach_block = {\n\n let block_hash = db.get_block_hash_by_number(number)?.unwrap();\n\n db.get_block(&block_hash)?.unwrap()\n\n };\n\n let hash: Byte32 = detach_block.hash().pack();\n\n number = detach_block.raw().number().unpack();\n", "file_path": "crates/replay-chain/src/replay.rs", "rank": 43, "score": 222725.87653946073 }, { "content": "pub fn replay_chain(ctx: ChainContext) -> Result<()> {\n\n let ChainContext {\n\n mut chain,\n\n from_store,\n\n local_store,\n\n } = ctx;\n\n let tip = local_store.get_tip_block()?;\n\n let number = {\n\n let block_hash = from_store.get_block_hash_by_number(tip.raw().number().unpack())?;\n\n assert_eq!(H256::from(tip.hash()), block_hash.unwrap());\n\n tip.raw().number().unpack()\n\n };\n\n\n\n let hash: Byte32 = tip.hash().pack();\n\n println!(\"Replay from block: #{} {}\", number, hash);\n\n\n\n // query next block\n\n let mut replay_number = number + 1;\n\n\n\n loop {\n", "file_path": "crates/replay-chain/src/replay.rs", "rank": 44, "score": 222725.87653946073 }, { "content": "/// build genesis from store\n\n/// This function initialize db to genesis state\n\npub fn build_genesis_from_store(\n\n db: StoreTransaction,\n\n config: &GenesisConfig,\n\n secp_data: Bytes,\n\n) -> Result<(StoreTransaction, GenesisWithGlobalState)> {\n\n let rollup_context = RollupContext {\n\n rollup_script_hash: {\n\n let rollup_script_hash: [u8; 32] = config.rollup_type_hash.clone().into();\n\n rollup_script_hash.into()\n\n },\n\n rollup_config: config.rollup_config.clone().into(),\n\n };\n\n // initialize store\n\n db.set_block_smt_root(H256::zero())?;\n\n db.set_reverted_block_smt_root(H256::zero())?;\n\n\n\n let mut tree = {\n\n let smt = db.account_smt_with_merkle_state(AccountMerkleState::default())?;\n\n StateTree::new(smt, 0, StateContext::AttachBlock(0))\n\n };\n", "file_path": "crates/generator/src/genesis.rs", "rank": 45, "score": 220651.00520655286 }, { "content": "/// NOTE: Caller should rollback db, only update reverted_block_smt in L1ActionContext::Revert\n\npub fn build_revert_context(\n\n db: &StoreTransaction,\n\n reverted_blocks: &[L2Block],\n\n) -> Result<RevertContext> {\n\n // Build main chain block proof\n\n let reverted_blocks = reverted_blocks.iter();\n\n let reverted_raw_blocks: Vec<RawL2Block> = reverted_blocks.map(|rb| rb.raw()).collect();\n\n let (_, block_proof) = build_block_proof(db, &reverted_raw_blocks)?;\n\n log::debug!(\"build main chain block proof\");\n\n\n\n // Build reverted block proof\n\n let (post_reverted_block_root, reverted_block_proof) = {\n\n let mut smt = db.reverted_block_smt()?;\n\n let to_key = |b: &RawL2Block| H256::from(b.hash());\n\n let to_leave = |b: &RawL2Block| (to_key(b), H256::one());\n\n\n\n let keys: Vec<H256> = reverted_raw_blocks.iter().map(to_key).collect();\n\n for key in keys.iter() {\n\n smt.update(key.to_owned(), H256::one())?;\n\n }\n", "file_path": "crates/challenge/src/context.rs", "rank": 46, "score": 220645.11753314408 }, { "content": "pub fn build_challenge_context(\n\n db: &StoreTransaction,\n\n target: ChallengeTarget,\n\n) -> Result<ChallengeContext> {\n\n let block_hash: H256 = target.block_hash().unpack();\n\n let block = {\n\n let opt_ = db.get_block(&block_hash)?;\n\n opt_.ok_or_else(|| anyhow!(\"bad block {} not found\", hex::encode(block_hash.as_slice())))?\n\n };\n\n\n\n let block_smt = db.block_smt()?;\n\n let block_proof = block_smt\n\n .merkle_proof(vec![block.smt_key().into()])?\n\n .compile(vec![(block.smt_key().into(), block.hash().into())])?;\n\n\n\n let witness = ChallengeWitness::new_builder()\n\n .raw_l2block(block.raw())\n\n .block_proof(block_proof.0.pack())\n\n .build();\n\n\n\n Ok(ChallengeContext { target, witness })\n\n}\n\n\n", "file_path": "crates/challenge/src/context.rs", "rank": 47, "score": 220639.3526784457 }, { "content": "pub fn build_verify_context(\n\n generator: Arc<Generator>,\n\n db: &StoreTransaction,\n\n target: &ChallengeTarget,\n\n) -> Result<VerifyContext> {\n\n let challenge_type = target.target_type().try_into();\n\n let block_hash: [u8; 32] = target.block_hash().unpack();\n\n let target_index = target.target_index().unpack();\n\n\n\n match challenge_type.map_err(|_| anyhow!(\"invalid challenge type\"))? {\n\n ChallengeTargetType::TxExecution => {\n\n build_verify_transaction_witness(generator, db, block_hash.into(), target_index)\n\n }\n\n ChallengeTargetType::TxSignature => {\n\n build_verify_transaction_signature_witness(db, block_hash.into(), target_index)\n\n }\n\n ChallengeTargetType::Withdrawal => {\n\n build_verify_withdrawal_witness(db, block_hash.into(), target_index)\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/challenge/src/context.rs", "rank": 48, "score": 220639.3526784457 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn build_output(\n\n rollup_context: &RollupContext,\n\n prev_global_state: GlobalState,\n\n challenge_cell: &CellInfo,\n\n burn_lock: Script,\n\n owner_lock: Script,\n\n context: VerifyContext,\n\n builtin_load_data: &HashMap<H256, CellDep>,\n\n load_data_strategy: Option<LoadDataStrategy>,\n\n) -> Result<CancelChallengeOutput> {\n\n match context.verify_witness {\n\n VerifyWitness::Withdrawal(witness) => {\n\n let verifier_lock = context.sender_script;\n\n\n\n let verifier_witness = {\n\n let signature = witness.withdrawal_request().signature();\n\n WitnessArgs::new_builder()\n\n .lock(Some(signature).pack())\n\n .build()\n\n };\n", "file_path": "crates/challenge/src/cancel_challenge.rs", "rank": 49, "score": 220639.3526784457 }, { "content": "pub fn deploy_program(\n\n privkey_path: &Path,\n\n rpc_client: &mut HttpRpcClient,\n\n binary_path: &Path,\n\n target_lock: &packed::Script,\n\n target_address: &Address,\n\n) -> Result<DeployItem, String> {\n\n log::info!(\"deploy binary {:?}\", binary_path);\n\n let file_size = fs::metadata(binary_path)\n\n .map_err(|err| err.to_string())?\n\n .len();\n\n let min_output_capacity = {\n\n let data_capacity = Capacity::bytes(file_size as usize).map_err(|err| err.to_string())?;\n\n let type_script = packed::Script::new_builder()\n\n .code_hash(TYPE_ID_CODE_HASH.pack())\n\n .hash_type(ScriptHashType::Type.into())\n\n .args(Bytes::from(vec![0u8; 32]).pack())\n\n .build();\n\n let output = packed::CellOutput::new_builder()\n\n .lock(target_lock.clone())\n", "file_path": "crates/tools/src/deploy_scripts.rs", "rank": 50, "score": 220626.96162313188 }, { "content": "pub fn check_script(\n\n script_config: &ContractTypeScriptConfig,\n\n rollup_config: &RollupConfig,\n\n rollup_type_script: &Script,\n\n) -> Result<()> {\n\n if script_config.state_validator.hash() != rollup_type_script.code_hash {\n\n bail!(\"state validator hash not match\");\n\n }\n\n if script_config.deposit_lock.hash().pack() != rollup_config.deposit_script_type_hash() {\n\n bail!(\"deposit lock hash not match one in rollup config\");\n\n }\n\n if script_config.stake_lock.hash().pack() != rollup_config.stake_script_type_hash() {\n\n bail!(\"stake lock hash not match one in rollup config\");\n\n }\n\n if script_config.custodian_lock.hash().pack() != rollup_config.custodian_script_type_hash() {\n\n bail!(\"custodian lock hash not match one in rollup config\");\n\n }\n\n if script_config.withdrawal_lock.hash().pack() != rollup_config.withdrawal_script_type_hash() {\n\n bail!(\"withdrawal lock hash not match one in rollup config\");\n\n }\n", "file_path": "crates/rpc-client/src/contract.rs", "rank": 51, "score": 220626.96162313188 }, { "content": "pub fn hex(raw: &[u8]) -> Result<String> {\n\n Ok(format!(\"0x{}\", faster_hex::hex_string(raw)?))\n\n}\n", "file_path": "crates/web3-indexer/src/helper.rs", "rank": 52, "score": 219831.7500418799 }, { "content": "fn load_data_u128<Mac: SupportMachine>(machine: &mut Mac, addr: u64) -> Result<u128, VMError> {\n\n let mut data = [0u8; 16];\n\n for (i, c) in data.iter_mut().enumerate() {\n\n *c = machine\n\n .memory_mut()\n\n .load8(&Mac::REG::from_u64(addr).overflowing_add(&Mac::REG::from_u64(i as u64)))?\n\n .to_u8();\n\n }\n\n Ok(u128::from_le_bytes(data))\n\n}\n\n\n", "file_path": "crates/generator/src/syscalls/mod.rs", "rank": 53, "score": 217693.54258556492 }, { "content": "pub fn privkey_to_l2_script_hash(\n\n privkey: &H256,\n\n rollup_type_hash: &H256,\n\n scripts_deployment: &ScriptsDeploymentResult,\n\n) -> Result<H256, String> {\n\n let eth_address = privkey_to_eth_address(privkey)?;\n\n\n\n let code_hash = Byte32::from_slice(\n\n scripts_deployment\n\n .eth_account_lock\n\n .script_type_hash\n\n .as_bytes(),\n\n )\n\n .map_err(|err| err.to_string())?;\n\n\n\n let mut args_vec = rollup_type_hash.as_bytes().to_vec();\n\n args_vec.append(&mut eth_address.to_vec());\n\n let args = GwPack::pack(&GwBytes::from(args_vec));\n\n\n\n let script = Script::new_builder()\n\n .code_hash(code_hash)\n\n .hash_type(ScriptHashType::Type.into())\n\n .args(args)\n\n .build();\n\n\n\n let script_hash = CkbHasher::new().update(script.as_slice()).finalize();\n\n\n\n Ok(script_hash)\n\n}\n\n\n", "file_path": "crates/tools/src/account.rs", "rank": 54, "score": 217426.72437474487 }, { "content": "pub fn setup(args: SetupArgs) -> Result<Context> {\n\n let SetupArgs {\n\n from_db_store,\n\n to_db_store,\n\n config,\n\n from_db_columns,\n\n } = args;\n\n\n\n let store_config = StoreConfig {\n\n path: to_db_store,\n\n options: config.store.options.clone(),\n\n options_file: config.store.options_file.clone(),\n\n cache_size: config.store.cache_size,\n\n };\n\n let local_store = Store::new(RocksDB::open(&store_config, COLUMNS));\n\n let rollup_type_script = {\n\n let script: gw_types::packed::Script = config.chain.rollup_type_script.clone().into();\n\n script\n\n };\n\n let rollup_config: RollupConfig = config.genesis.rollup_config.clone().into();\n", "file_path": "crates/replay-chain/src/setup.rs", "rank": 55, "score": 217013.45275898345 }, { "content": "pub fn build_sync_tx(\n\n rollup_cell: CellOutput,\n\n produce_block_result: ProduceBlockResult,\n\n) -> Transaction {\n\n let ProduceBlockResult {\n\n block,\n\n global_state,\n\n } = produce_block_result;\n\n let rollup_action = {\n\n let submit_block = RollupSubmitBlock::new_builder().block(block).build();\n\n RollupAction::new_builder()\n\n .set(RollupActionUnion::RollupSubmitBlock(submit_block))\n\n .build()\n\n };\n\n let witness = WitnessArgs::new_builder()\n\n .output_type(Pack::<_>::pack(&Some(rollup_action.as_bytes())))\n\n .build();\n\n let raw = RawTransaction::new_builder()\n\n .outputs(vec![rollup_cell].pack())\n\n .outputs_data(vec![global_state.as_bytes()].pack())\n\n .build();\n\n Transaction::new_builder()\n\n .raw(raw)\n\n .witnesses(vec![witness.as_bytes()].pack())\n\n .build()\n\n}\n\n\n", "file_path": "crates/tests/src/testing_tool/chain.rs", "rank": 56, "score": 214373.7674447057 }, { "content": "pub fn apply_block_result(\n\n chain: &mut Chain,\n\n rollup_cell: CellOutput,\n\n block_result: ProduceBlockResult,\n\n deposit_requests: Vec<DepositRequest>,\n\n deposit_asset_scripts: HashSet<Script>,\n\n) {\n\n let l2block = block_result.block.clone();\n\n let transaction = build_sync_tx(rollup_cell, block_result);\n\n let l2block_committed_info = L2BlockCommittedInfo::default();\n\n\n\n let update = L1Action {\n\n context: L1ActionContext::SubmitBlock {\n\n l2block,\n\n deposit_requests,\n\n deposit_asset_scripts,\n\n },\n\n transaction,\n\n l2block_committed_info,\n\n };\n\n let param = SyncParam {\n\n updates: vec![update],\n\n reverts: Default::default(),\n\n };\n\n chain.sync(param).unwrap();\n\n assert!(chain.last_sync_event().is_success());\n\n}\n\n\n", "file_path": "crates/tests/src/testing_tool/chain.rs", "rank": 57, "score": 214348.52196588216 }, { "content": "pub fn parse_log(item: &LogItem) -> Result<GwLog> {\n\n let service_flag: u8 = item.service_flag().into();\n\n let raw_data = item.data().raw_data();\n\n let data = raw_data.as_ref();\n\n match service_flag {\n\n GW_LOG_SUDT_TRANSFER => {\n\n let sudt_id: u32 = item.account_id().unpack();\n\n if data.len() != (1 + 20 + 20 + 16) {\n\n return Err(anyhow!(\"Invalid data length: {}\", data.len()));\n\n }\n\n let (from_address, to_address, amount) = parse_sudt_log_data(data);\n\n Ok(GwLog::SudtTransfer {\n\n sudt_id,\n\n from_address,\n\n to_address,\n\n amount,\n\n })\n\n }\n\n GW_LOG_SUDT_PAY_FEE => {\n\n let sudt_id: u32 = item.account_id().unpack();\n", "file_path": "crates/web3-indexer/src/helper.rs", "rank": 58, "score": 211706.70143042458 }, { "content": "fn to_h256(v: JsonH256) -> H256 {\n\n let h: [u8; 32] = v.into();\n\n h.into()\n\n}\n\n\n", "file_path": "crates/rpc-server/src/registry.rs", "rank": 59, "score": 209215.44299925456 }, { "content": "pub fn chain_generator(chain: &Chain, rollup_type_script: Script) -> Arc<Generator> {\n\n let rollup_config = chain.generator().rollup_context().rollup_config.to_owned();\n\n let mut account_lock_manage = AccountLockManage::default();\n\n account_lock_manage\n\n .register_lock_algorithm((*ALWAYS_SUCCESS_CODE_HASH).into(), Box::new(AlwaysSuccess));\n\n let backend_manage = build_backend_manage(&rollup_config);\n\n let rollup_context = RollupContext {\n\n rollup_script_hash: rollup_type_script.hash().into(),\n\n rollup_config,\n\n };\n\n Arc::new(Generator::new(\n\n backend_manage,\n\n account_lock_manage,\n\n rollup_context,\n\n Default::default(),\n\n ))\n\n}\n\n\n", "file_path": "crates/tests/src/testing_tool/chain.rs", "rank": 60, "score": 209051.34200922895 }, { "content": "pub fn deploy_rollup_cell(args: DeployRollupCellArgs) -> Result<RollupDeploymentResult, String> {\n\n let DeployRollupCellArgs {\n\n privkey_path,\n\n ckb_rpc_url,\n\n scripts_result,\n\n user_rollup_config,\n\n poa_config,\n\n timestamp,\n\n skip_config_check,\n\n } = args;\n\n\n\n let poa_setup = poa_config.poa_setup.clone();\n\n\n\n let burn_lock_hash: [u8; 32] = {\n\n let lock: ckb_types::packed::Script = user_rollup_config.burn_lock.clone().into();\n\n lock.calc_script_hash().unpack()\n\n };\n\n // check config\n\n if !skip_config_check {\n\n let expected_burn_lock_script = ckb_packed::Script::new_builder()\n", "file_path": "crates/tools/src/deploy_genesis.rs", "rank": 61, "score": 208995.01861509902 }, { "content": "pub fn generate_node_config(args: GenerateNodeConfigArgs) -> Result<Config> {\n\n let GenerateNodeConfigArgs {\n\n rollup_result,\n\n scripts_deployment,\n\n privkey_path,\n\n ckb_url,\n\n indexer_url,\n\n database_url,\n\n build_scripts_result,\n\n server_url,\n\n user_rollup_config,\n\n node_mode,\n\n } = args;\n\n\n\n let mut rpc_client = HttpRpcClient::new(ckb_url.to_string());\n\n let tx_with_status = rpc_client\n\n .get_transaction(rollup_result.tx_hash.clone())\n\n .map_err(|err| anyhow!(\"get transaction error: {}\", err))?\n\n .ok_or_else(|| anyhow!(\"can't find genesis block transaction\"))?;\n\n let block_hash = tx_with_status.tx_status.block_hash.ok_or_else(|| {\n", "file_path": "crates/tools/src/generate_config.rs", "rank": 62, "score": 206798.4586556895 }, { "content": "pub fn run(config: Config, skip_config_check: bool) -> Result<()> {\n\n // Set up sentry.\n\n let _guard = match &config.sentry_dsn.as_ref() {\n\n Some(sentry_dsn) => sentry::init((\n\n sentry_dsn.as_str(),\n\n sentry::ClientOptions {\n\n release: sentry::release_name!(),\n\n ..Default::default()\n\n },\n\n )),\n\n None => sentry::init(()),\n\n };\n\n // Enable smol threads before smol::spawn\n\n let runtime_threads = match std::env::var(SMOL_THREADS_ENV_VAR) {\n\n Ok(s) => s.parse()?,\n\n Err(_) => {\n\n let threads = DEFAULT_RUNTIME_THREADS;\n\n std::env::set_var(SMOL_THREADS_ENV_VAR, format!(\"{}\", threads));\n\n threads\n\n }\n", "file_path": "crates/block-producer/src/runner.rs", "rank": 63, "score": 206700.51677158484 }, { "content": "pub fn run<I, S>(bin: &str, args: I) -> Result<()>\n\nwhere\n\n I: IntoIterator<Item = S> + std::fmt::Debug,\n\n S: AsRef<OsStr>,\n\n{\n\n log::debug!(\"[Execute]: {} {:?}\", bin, args);\n\n let status = Command::new(bin.to_owned())\n\n .env(\"RUST_BACKTRACE\", \"full\")\n\n .args(args)\n\n .status()\n\n .expect(\"run command\");\n\n if !status.success() {\n\n Err(anyhow::anyhow!(\n\n \"Exited with status code: {:?}\",\n\n status.code()\n\n ))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "crates/tools/src/utils/transaction.rs", "rank": 64, "score": 203647.37540109907 }, { "content": "fn to_jsonh256(v: H256) -> JsonH256 {\n\n let h: [u8; 32] = v.into();\n\n h.into()\n\n}\n\n\n\npub struct RegistryArgs<T> {\n\n pub store: Store,\n\n pub mem_pool: MemPool,\n\n pub generator: Arc<Generator>,\n\n pub tests_rpc_impl: Option<Box<T>>,\n\n pub rollup_config: RollupConfig,\n\n pub mem_pool_config: MemPoolConfig,\n\n pub node_mode: NodeMode,\n\n pub rpc_client: RPCClient,\n\n pub send_tx_rate_limit: Option<RPCRateLimit>,\n\n pub server_config: RPCServerConfig,\n\n pub fee_config: FeeConfig,\n\n pub last_submitted_tx_hash: Option<Arc<smol::lock::RwLock<H256>>>,\n\n}\n\n\n", "file_path": "crates/rpc-server/src/registry.rs", "rank": 65, "score": 203400.02129311225 }, { "content": "#[derive(Clone, Serialize, Deserialize, PartialEq, Eq, Debug)]\n\nstruct ScriptsBuildConfig {\n\n prebuild_image: PathBuf,\n\n repos: ReposUrl,\n\n\n\n #[serde(default)]\n\n scripts: HashMap<String, ScriptsInfo>,\n\n}\n\n\n\nimpl Default for ScriptsBuildConfig {\n\n fn default() -> Self {\n\n ScriptsBuildConfig {\n\n prebuild_image: PathBuf::from(\"nervos/godwoken-prebuilds:v0.6.7-rc1\"),\n\n repos: ReposUrl {\n\n godwoken_scripts: Url::parse(\n\n \"https://github.com/nervosnetwork/godwoken-scripts#v0.8.4-rc1\",\n\n )\n\n .expect(\"url parse\"),\n\n godwoken_polyjuice: Url::parse(\n\n \"https://github.com/nervosnetwork/godwoken-polyjuice#v0.8.8\",\n\n )\n", "file_path": "crates/tools/src/prepare_scripts.rs", "rank": 66, "score": 202191.55400697968 }, { "content": "pub fn run_cmd<I, S>(args: I) -> Result<String, String>\n\nwhere\n\n I: IntoIterator<Item = S> + std::fmt::Debug,\n\n S: AsRef<OsStr>,\n\n{\n\n let bin = \"ckb-cli\";\n\n log::debug!(\"[Execute]: {} {:?}\", bin, args);\n\n let init_output = Command::new(bin.to_owned())\n\n .env(\"RUST_BACKTRACE\", \"full\")\n\n .args(args)\n\n .output()\n\n .expect(\"Run command failed\");\n\n\n\n if !init_output.status.success() {\n\n Err(format!(\n\n \"{}\",\n\n String::from_utf8_lossy(init_output.stderr.as_slice())\n\n ))\n\n } else {\n\n let stdout = String::from_utf8_lossy(init_output.stdout.as_slice()).to_string();\n\n log::debug!(\"stdout: {}\", stdout);\n\n Ok(stdout)\n\n }\n\n}\n\n\n", "file_path": "crates/tools/src/utils/transaction.rs", "rank": 67, "score": 200941.21317701213 }, { "content": "fn produce_empty_block(chain: &mut Chain, rollup_cell: CellOutput) -> Result<()> {\n\n let block_result = {\n\n let mem_pool = chain.mem_pool().as_ref().unwrap();\n\n let mut mem_pool = smol::block_on(mem_pool.lock());\n\n construct_block(chain, &mut mem_pool, Default::default())?\n\n };\n\n let asset_scripts = HashSet::new();\n\n\n\n // deposit\n\n apply_block_result(chain, rollup_cell, block_result, vec![], asset_scripts);\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/tests/src/tests/deposit_withdrawal.rs", "rank": 68, "score": 200207.16145201918 }, { "content": "fn prepare_scripts_in_build_mode(\n\n scripts_build_config: &ScriptsBuildConfig,\n\n repos_dir: &Path,\n\n target_dir: &Path,\n\n) {\n\n log::info!(\"Build scripts...\");\n\n run_pull_code(\n\n scripts_build_config.repos.godwoken_scripts.clone(),\n\n true,\n\n repos_dir,\n\n GODWOKEN_SCRIPTS,\n\n );\n\n run_pull_code(\n\n scripts_build_config.repos.godwoken_polyjuice.clone(),\n\n true,\n\n repos_dir,\n\n GODWOKEN_POLYJUICE,\n\n );\n\n run_pull_code(\n\n scripts_build_config.repos.clerkb.clone(),\n\n true,\n\n repos_dir,\n\n CLERKB,\n\n );\n\n build_godwoken_scripts(repos_dir, GODWOKEN_SCRIPTS);\n\n build_godwoken_polyjuice(repos_dir, GODWOKEN_POLYJUICE);\n\n build_clerkb(repos_dir, CLERKB);\n\n collect_scripts_to_target(repos_dir, target_dir, &scripts_build_config.scripts);\n\n}\n\n\n", "file_path": "crates/tools/src/prepare_scripts.rs", "rank": 69, "score": 196801.13611693642 }, { "content": "fn run_contract_get_result<S: State + CodeStore>(\n\n rollup_config: &RollupConfig,\n\n tree: &mut S,\n\n from_id: u32,\n\n to_id: u32,\n\n args: Bytes,\n\n block_info: &BlockInfo,\n\n) -> Result<RunResult, TransactionError> {\n\n let raw_tx = RawL2Transaction::new_builder()\n\n .from_id(from_id.pack())\n\n .to_id(to_id.pack())\n\n .args(args.pack())\n\n .build();\n\n let backend_manage = build_backend_manage(rollup_config);\n\n let account_lock_manage = AccountLockManage::default();\n\n let rollup_ctx = RollupContext {\n\n rollup_config: rollup_config.clone(),\n\n rollup_script_hash: [42u8; 32].into(),\n\n };\n\n let generator = Generator::new(\n", "file_path": "crates/benches/benches/benchmarks/sudt.rs", "rank": 70, "score": 195680.3130304589 }, { "content": "fn package_bad_blocks(db: &StoreTransaction, start_block_hash: &H256) -> Result<Vec<L2Block>> {\n\n let tip_block = db.get_tip_block()?;\n\n if tip_block.hash() == start_block_hash.as_slice() {\n\n return Ok(vec![tip_block]);\n\n }\n\n\n\n let tip_block_number = tip_block.raw().number().unpack();\n\n let start_block_number = {\n\n let number = db.get_block_number(start_block_hash)?;\n\n number.ok_or_else(|| anyhow!(\"challenge block number not found\"))?\n\n };\n\n assert!(start_block_number < tip_block_number);\n\n\n\n let to_block = |number: u64| {\n\n let hash = db.get_block_hash_by_number(number)?;\n\n let block = hash.map(|h| db.get_block(&h)).transpose()?.flatten();\n\n block.ok_or_else(|| anyhow!(\"block {} not found\", number))\n\n };\n\n\n\n (start_block_number..=tip_block_number)\n\n .map(to_block)\n\n .collect()\n\n}\n", "file_path": "crates/chain/src/chain.rs", "rank": 71, "score": 193382.90203046874 }, { "content": "pub fn slice_to_branch_node(slice: &[u8]) -> BranchNode {\n\n match slice[0] {\n\n 0 => {\n\n let left: [u8; 32] = slice[1..33].try_into().expect(\"checked slice\");\n\n let right: [u8; 32] = slice[33..65].try_into().expect(\"checked slice\");\n\n BranchNode {\n\n left: MergeValue::Value(left.into()),\n\n right: MergeValue::Value(right.into()),\n\n }\n\n }\n\n 1 => {\n\n let left: [u8; 32] = slice[1..33].try_into().expect(\"checked slice\");\n\n let base_node: [u8; 32] = slice[33..65].try_into().expect(\"checked slice\");\n\n let zero_bits: [u8; 32] = slice[65..97].try_into().expect(\"checked slice\");\n\n let zero_count = slice[97];\n\n BranchNode {\n\n left: MergeValue::Value(left.into()),\n\n right: MergeValue::MergeWithZero {\n\n base_node: base_node.into(),\n\n zero_bits: zero_bits.into(),\n", "file_path": "crates/store/src/smt/serde.rs", "rank": 72, "score": 190556.29421264966 }, { "content": "pub trait State {\n\n // KV interface\n\n fn get_raw(&self, key: &H256) -> Result<H256, Error>;\n\n fn update_raw(&mut self, key: H256, value: H256) -> Result<(), Error>;\n\n fn get_account_count(&self) -> Result<u32, Error>;\n\n fn set_account_count(&mut self, count: u32) -> Result<(), Error>;\n\n fn calculate_root(&self) -> Result<H256, Error>;\n\n\n\n // implementations\n\n fn get_value(&self, id: u32, key: &H256) -> Result<H256, Error> {\n\n let raw_key = build_account_key(id, key.as_slice());\n\n self.get_raw(&raw_key)\n\n }\n\n fn update_value(&mut self, id: u32, key: &H256, value: H256) -> Result<(), Error> {\n\n let raw_key = build_account_key(id, key.as_slice());\n\n self.update_raw(raw_key, value)?;\n\n Ok(())\n\n }\n\n /// Create a new account\n\n fn create_account(&mut self, script_hash: H256) -> Result<u32, Error> {\n", "file_path": "crates/common/src/state.rs", "rank": 73, "score": 189922.10260452263 }, { "content": "// Read config.toml\n\npub fn read_config<P: AsRef<Path>>(path: P) -> Result<Config, String> {\n\n let content = fs::read(&path).map_err(|err| err.to_string())?;\n\n let config = toml::from_slice(&content).map_err(|err| err.to_string())?;\n\n Ok(config)\n\n}\n\n\n", "file_path": "crates/tools/src/utils/transaction.rs", "rank": 74, "score": 189906.33404823265 }, { "content": "fn parse_global_state(tx: &Transaction, rollup_id: &[u8; 32]) -> Result<GlobalState> {\n\n // find rollup state cell from outputs\n\n let (i, _) = tx\n\n .raw()\n\n .outputs()\n\n .into_iter()\n\n .enumerate()\n\n .find(|(_i, output)| {\n\n output.type_().to_opt().map(|type_| type_.hash()).as_ref() == Some(rollup_id)\n\n })\n\n .ok_or_else(|| anyhow!(\"no rollup cell found\"))?;\n\n\n\n let output_data: Bytes = tx\n\n .raw()\n\n .outputs_data()\n\n .get(i)\n\n .ok_or_else(|| anyhow!(\"no output data\"))?\n\n .unpack();\n\n\n\n global_state_from_slice(&output_data).map_err(|_| anyhow!(\"global state unpacking error\"))\n\n}\n\n\n", "file_path": "crates/chain/src/chain.rs", "rank": 75, "score": 189388.0790789979 }, { "content": "pub fn run_in_dir<I, S>(bin: &str, args: I, target_dir: &str) -> Result<()>\n\nwhere\n\n I: IntoIterator<Item = S> + std::fmt::Debug,\n\n S: AsRef<OsStr>,\n\n{\n\n let working_dir = env::current_dir().expect(\"get working dir\");\n\n env::set_current_dir(&target_dir).expect(\"set target dir\");\n\n let result = run(bin, args);\n\n env::set_current_dir(&working_dir).expect(\"set working dir\");\n\n result\n\n}\n\n\n", "file_path": "crates/tools/src/utils/transaction.rs", "rank": 76, "score": 186729.79792233443 }, { "content": "fn run_git_checkout(repo_dir: &str, commit: &str) -> Result<()> {\n\n utils::transaction::run(\"git\", vec![\"-C\", repo_dir, \"fetch\"])?;\n\n utils::transaction::run(\"git\", vec![\"-C\", repo_dir, \"checkout\", commit])?;\n\n utils::transaction::run(\n\n \"git\",\n\n vec![\"-C\", repo_dir, \"submodule\", \"update\", \"--recursive\"],\n\n )\n\n}\n", "file_path": "crates/tools/src/prepare_scripts.rs", "rank": 77, "score": 184643.8142801151 }, { "content": "pub fn branch_node_to_vec(node: &BranchNode) -> Vec<u8> {\n\n match (&node.left, &node.right) {\n\n (MergeValue::Value(left), MergeValue::Value(right)) => {\n\n let mut ret = Vec::with_capacity(33);\n\n ret.extend_from_slice(&[0]);\n\n ret.extend_from_slice(left.as_slice());\n\n ret.extend_from_slice(right.as_slice());\n\n ret\n\n }\n\n (\n\n MergeValue::Value(left),\n\n MergeValue::MergeWithZero {\n\n base_node,\n\n zero_bits,\n\n zero_count,\n\n },\n\n ) => {\n\n let mut ret = Vec::with_capacity(98);\n\n ret.extend_from_slice(&[1]);\n\n ret.extend_from_slice(left.as_slice());\n", "file_path": "crates/store/src/smt/serde.rs", "rank": 78, "score": 184185.76432070028 }, { "content": "pub fn build_backend_manage(rollup_config: &RollupConfig) -> BackendManage {\n\n let sudt_validator_script_type_hash: [u8; 32] =\n\n rollup_config.l2_sudt_validator_script_type_hash().unpack();\n\n let configs = vec![\n\n BackendConfig {\n\n validator_path: META_VALIDATOR_PATH.into(),\n\n generator_path: META_GENERATOR_PATH.into(),\n\n validator_script_type_hash: META_VALIDATOR_SCRIPT_TYPE_HASH.into(),\n\n backend_type: gw_config::BackendType::Meta,\n\n },\n\n BackendConfig {\n\n validator_path: SUDT_VALIDATOR_PATH.into(),\n\n generator_path: SUDT_GENERATOR_PATH.into(),\n\n validator_script_type_hash: sudt_validator_script_type_hash.into(),\n\n backend_type: gw_config::BackendType::Sudt,\n\n },\n\n ];\n\n BackendManage::from_config(configs).expect(\"default backend\")\n\n}\n\n\n", "file_path": "crates/tests/src/testing_tool/chain.rs", "rank": 79, "score": 183044.7320919828 }, { "content": "pub fn verify(config: Config, from_block: Option<u64>, to_block: Option<u64>) -> Result<()> {\n\n if config.store.path.as_os_str().is_empty() {\n\n bail!(\"empty store path, no db block to verify\");\n\n }\n\n if config.block_producer.is_none() {\n\n bail!(\"db block validator require block producer config\");\n\n }\n\n\n\n let validator = build_validator(config)?;\n\n validator.verify_db(from_block, to_block)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/block-producer/src/db_block_validator.rs", "rank": 80, "score": 182176.799379104 }, { "content": "fn build_cell(data: Bytes, lock: Script) -> (CellOutput, Bytes) {\n\n let dummy_output = CellOutput::new_builder()\n\n .capacity(100_000_000u64.pack())\n\n .lock(lock)\n\n .build();\n\n\n\n let capacity = dummy_output\n\n .occupied_capacity(data.len())\n\n .expect(\"impossible cancel challenge verify cell overflow\");\n\n\n\n let output = dummy_output.as_builder().capacity(capacity.pack()).build();\n\n\n\n (output, data)\n\n}\n\n\n", "file_path": "crates/challenge/src/cancel_challenge.rs", "rank": 81, "score": 181575.3117148314 }, { "content": "pub fn run_in_output_mode<I, S>(bin: &str, args: I) -> Result<(String, String), String>\n\nwhere\n\n I: IntoIterator<Item = S> + std::fmt::Debug,\n\n S: AsRef<OsStr>,\n\n{\n\n log::debug!(\"[Execute]: {} {:?}\", bin, args);\n\n let init_output = Command::new(bin.to_owned())\n\n .env(\"RUST_BACKTRACE\", \"full\")\n\n .args(args)\n\n .output()\n\n .expect(\"Run command failed\");\n\n\n\n if !init_output.status.success() {\n\n Err(format!(\n\n \"{}\",\n\n String::from_utf8_lossy(init_output.stderr.as_slice())\n\n ))\n\n } else {\n\n let stdout = String::from_utf8_lossy(init_output.stdout.as_slice()).to_string();\n\n let stderr = String::from_utf8_lossy(init_output.stderr.as_slice()).to_string();\n\n log::debug!(\"stdout: {}\", stdout);\n\n log::debug!(\"stderr: {}\", stderr);\n\n Ok((stdout, stderr))\n\n }\n\n}\n\n\n", "file_path": "crates/tools/src/utils/transaction.rs", "rank": 82, "score": 181534.81021450434 }, { "content": "fn build_validator(config: Config) -> Result<DBBlockCancelChallengeValidator> {\n\n let base = BaseInitComponents::init(&config, true)?;\n\n let block_producer_config = config.block_producer.expect(\"block producer config\");\n\n\n\n let wallet =\n\n Wallet::from_config(&block_producer_config.wallet_config).with_context(|| \"init wallet\")?;\n\n let poa = base.init_poa(&wallet, &block_producer_config);\n\n let mut offchain_mock_context = smol::block_on(async {\n\n let poa = poa.lock().await;\n\n base.init_offchain_mock_context(&poa, &block_producer_config)\n\n .await\n\n })?;\n\n\n\n let validator_config = config.db_block_validator.as_ref();\n\n if let Some(Some(scripts)) = validator_config.map(|c| c.replace_scripts.as_ref()) {\n\n offchain_mock_context = offchain_mock_context.replace_scripts(scripts)?;\n\n }\n\n let validator = DBBlockCancelChallengeValidator::new(\n\n base.generator,\n\n base.store,\n\n offchain_mock_context,\n\n config.debug,\n\n config.db_block_validator.unwrap_or_default(),\n\n );\n\n\n\n Ok(validator)\n\n}\n\n\n", "file_path": "crates/block-producer/src/db_block_validator.rs", "rank": 83, "score": 181218.43830761663 }, { "content": "fn read_script_build_config<P: AsRef<Path>>(input_path: P) -> ScriptsBuildConfig {\n\n let input = fs::read_to_string(input_path).expect(\"read config file\");\n\n let mut scripts_build_config: ScriptsBuildConfig =\n\n serde_json::from_str(&input).expect(\"parse scripts build config\");\n\n let default_build_config: ScriptsBuildConfig = ScriptsBuildConfig::default();\n\n default_build_config\n\n .scripts\n\n .iter()\n\n .for_each(\n\n |(key, default_value)| match scripts_build_config.scripts.get(key) {\n\n Some(value) => {\n\n if PathBuf::default() == value.source {\n\n let mut new = value.to_owned();\n\n new.source.clone_from(&default_value.source);\n\n scripts_build_config.scripts.insert(key.to_owned(), new);\n\n }\n\n }\n\n None => {\n\n scripts_build_config\n\n .scripts\n\n .insert(key.to_owned(), default_value.to_owned());\n\n }\n\n },\n\n );\n\n scripts_build_config\n\n}\n\n\n", "file_path": "crates/tools/src/prepare_scripts.rs", "rank": 84, "score": 180710.43120355398 }, { "content": "fn build_post_global_state(prev_global_state: GlobalState) -> GlobalState {\n\n let running_status: u8 = Status::Running.into();\n\n\n\n prev_global_state\n\n .as_builder()\n\n .status(running_status.into())\n\n .build()\n\n}\n\n\n", "file_path": "crates/challenge/src/cancel_challenge.rs", "rank": 85, "score": 180624.14388385005 }, { "content": "/// An alias of `from_slice(..)` to mark where we are really have confidence to do unwrap on the result of `from_slice(..)`.\n\npub trait FromSliceShouldBeOk<'r>: Reader<'r> {\n\n /// Unwraps the result of `from_slice(..)` with confidence and we assume that it's impossible to fail.\n\n fn from_slice_should_be_ok(slice: &'r [u8]) -> Self;\n\n}\n\n\n\nimpl<'r, R> FromSliceShouldBeOk<'r> for R\n\nwhere\n\n R: Reader<'r>,\n\n{\n\n fn from_slice_should_be_ok(slice: &'r [u8]) -> Self {\n\n match Self::from_slice(slice) {\n\n Ok(ret) => ret,\n\n Err(_err) => panic!(\"invalid molecule structure\"),\n\n }\n\n }\n\n}\n", "file_path": "crates/types/src/prelude.rs", "rank": 86, "score": 177630.94716184327 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn transfer(\n\n godwoken_rpc_url: &str,\n\n privkey_path: &Path,\n\n to: &str,\n\n sudt_id: u32,\n\n amount: &str,\n\n fee: &str,\n\n config_path: &Path,\n\n scripts_deployment_path: &Path,\n\n) -> Result<(), String> {\n\n let amount: u128 = amount.parse().expect(\"sUDT amount format error\");\n\n let fee: u128 = fee.parse().expect(\"fee format error\");\n\n\n\n let scripts_deployment_content =\n\n std::fs::read_to_string(scripts_deployment_path).map_err(|err| err.to_string())?;\n\n let scripts_deployment: ScriptsDeploymentResult =\n\n serde_json::from_str(&scripts_deployment_content).map_err(|err| err.to_string())?;\n\n\n\n let mut godwoken_rpc_client = GodwokenRpcClient::new(godwoken_rpc_url);\n\n\n", "file_path": "crates/tools/src/transfer.rs", "rank": 87, "score": 175681.30975032935 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn deploy(\n\n godwoken_rpc_url: &str,\n\n config_path: &Path,\n\n scripts_deployment_path: &Path,\n\n privkey_path: &Path,\n\n creator_account_id: u32,\n\n gas_limit: u64,\n\n gas_price: u128,\n\n data: &str,\n\n value: u128,\n\n) -> Result<(), String> {\n\n let data = GwBytes::from(\n\n hex::decode(data.trim_start_matches(\"0x\").as_bytes()).map_err(|err| err.to_string())?,\n\n );\n\n\n\n let scripts_deployment_string =\n\n std::fs::read_to_string(scripts_deployment_path).map_err(|err| err.to_string())?;\n\n let scripts_deployment: ScriptsDeploymentResult =\n\n serde_json::from_str(&scripts_deployment_string).map_err(|err| err.to_string())?;\n\n\n", "file_path": "crates/tools/src/polyjuice.rs", "rank": 88, "score": 175681.30975032935 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn withdraw(\n\n godwoken_rpc_url: &str,\n\n privkey_path: &Path,\n\n capacity: &str,\n\n amount: &str,\n\n sudt_script_hash: &str,\n\n owner_ckb_address: &str,\n\n config_path: &Path,\n\n scripts_deployment_path: &Path,\n\n) -> Result<(), String> {\n\n let sudt_script_hash = H256::from_str(sudt_script_hash.trim().trim_start_matches(\"0x\"))\n\n .map_err(|err| err.to_string())?;\n\n let capacity = parse_capacity(capacity)?;\n\n let amount: u128 = amount.parse().expect(\"sUDT amount format error\");\n\n\n\n let scripts_deployment_content =\n\n fs::read_to_string(scripts_deployment_path).map_err(|err| err.to_string())?;\n\n let scripts_deployment: ScriptsDeploymentResult =\n\n serde_json::from_str(&scripts_deployment_content).map_err(|err| err.to_string())?;\n\n\n", "file_path": "crates/tools/src/withdraw.rs", "rank": 89, "score": 175681.30975032935 }, { "content": "fn convert_signature_to_byte65(signature: &[u8]) -> Result<[u8; 65], LockAlgorithmError> {\n\n if signature.len() != 65 {\n\n return Err(LockAlgorithmError::InvalidSignature);\n\n }\n\n\n\n let mut buf = [0u8; 65];\n\n buf.copy_from_slice(signature);\n\n Ok(buf)\n\n}\n\n\n\n#[derive(Debug, Default)]\n\npub struct Secp256k1;\n\n\n\n/// Usage\n\n/// register an algorithm to AccountLockManage\n\n///\n\n/// manage.register_lock_algorithm(code_hash, Box::new(AlwaysSuccess::default()));\n\nimpl LockAlgorithm for Secp256k1 {\n\n fn recover(&self, message: H256, signature: &[u8]) -> Result<Bytes, LockAlgorithmError> {\n\n let signature: RecoverableSignature = {\n", "file_path": "crates/generator/src/account_lock_manage/secp256k1.rs", "rank": 90, "score": 174098.71476808895 }, { "content": "fn run_cli() -> Result<()> {\n\n let arg_privkey_path = Arg::with_name(\"privkey-path\")\n\n .long(\"privkey-path\")\n\n .short(\"k\")\n\n .takes_value(true)\n\n .required(true)\n\n .help(\"The private key file path\");\n\n let arg_ckb_rpc = Arg::with_name(\"ckb-rpc-url\")\n\n .long(\"ckb-rpc\")\n\n .takes_value(true)\n\n .default_value(\"http://127.0.0.1:8114\")\n\n .help(\"CKB jsonrpc rpc sever URL\");\n\n let arg_indexer_rpc = Arg::with_name(\"indexer-rpc-url\")\n\n .long(\"ckb-indexer-rpc\")\n\n .takes_value(true)\n\n .default_value(\"http://127.0.0.1:8116\")\n\n .required(true)\n\n .help(\"The URL of ckb indexer\");\n\n let arg_deployment_results_path = Arg::with_name(\"scripts-deployment-path\")\n\n .long(\"scripts-deployment-path\")\n", "file_path": "crates/tools/src/main.rs", "rank": 91, "score": 173973.11170958285 }, { "content": "fn run_git_clone(repo_url: Url, is_recursive: bool, path: &str) -> Result<()> {\n\n let mut args = vec![\"clone\", repo_url.as_str(), path];\n\n if is_recursive {\n\n args.push(\"--recursive\");\n\n }\n\n utils::transaction::run(\"git\", args)\n\n}\n\n\n", "file_path": "crates/tools/src/prepare_scripts.rs", "rank": 92, "score": 173464.9999639201 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn send_transaction(\n\n godwoken_rpc_url: &str,\n\n config_path: &Path,\n\n scripts_deployment_path: &Path,\n\n privkey_path: &Path,\n\n creator_account_id: u32,\n\n gas_limit: u64,\n\n gas_price: u128,\n\n data: &str,\n\n value: u128,\n\n to_address: &str,\n\n) -> Result<(), String> {\n\n let data = GwBytes::from(\n\n hex::decode(data.trim_start_matches(\"0x\").as_bytes()).map_err(|err| err.to_string())?,\n\n );\n\n\n\n let scripts_deployment_string =\n\n std::fs::read_to_string(scripts_deployment_path).map_err(|err| err.to_string())?;\n\n let scripts_deployment: ScriptsDeploymentResult =\n\n serde_json::from_str(&scripts_deployment_string).map_err(|err| err.to_string())?;\n", "file_path": "crates/tools/src/polyjuice.rs", "rank": 93, "score": 173351.83270151383 }, { "content": "pub fn init_genesis(\n\n store: &Store,\n\n config: &GenesisConfig,\n\n genesis_committed_info: L2BlockCommittedInfo,\n\n secp_data: Bytes,\n\n) -> Result<()> {\n\n let rollup_script_hash: H256 = {\n\n let rollup_script_hash: [u8; 32] = config.rollup_type_hash.clone().into();\n\n rollup_script_hash.into()\n\n };\n\n if store.has_genesis()? {\n\n let chain_id = store.get_chain_id()?;\n\n if chain_id == rollup_script_hash {\n\n return Ok(());\n\n } else {\n\n panic!(\n\n \"The store is already initialized by rollup_type_hash: 0x{}!\",\n\n hex::encode(chain_id.as_slice())\n\n );\n\n }\n", "file_path": "crates/generator/src/genesis.rs", "rank": 94, "score": 173351.83270151383 }, { "content": "pub fn revert(\n\n rollup_context: &RollupContext,\n\n contracts_dep: &ContractsCellDep,\n\n withdrawal_cells: Vec<CellInfo>,\n\n) -> Result<Option<RevertedWithdrawals>> {\n\n if withdrawal_cells.is_empty() {\n\n return Ok(None);\n\n }\n\n\n\n let mut withdrawal_inputs = vec![];\n\n let mut withdrawal_witness = vec![];\n\n let mut custodian_outputs = vec![];\n\n\n\n let timestamp = SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .expect(\"unexpected timestamp\")\n\n .as_millis() as u64;\n\n\n\n // We use timestamp plus idx and rollup_type_hash to create different custodian lock\n\n // hash for every reverted withdrawal input. Withdrawal lock use custodian lock hash to\n", "file_path": "crates/block-producer/src/withdrawal.rs", "rank": 95, "score": 173351.83270151383 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn polyjuice_call(\n\n godwoken_rpc_url: &str,\n\n gas_limit: u64,\n\n gas_price: u128,\n\n data: &str,\n\n value: u128,\n\n to_address: &str,\n\n from: &str,\n\n) -> Result<(), String> {\n\n let data = GwBytes::from(\n\n hex::decode(data.trim_start_matches(\"0x\").as_bytes()).map_err(|err| err.to_string())?,\n\n );\n\n\n\n let mut godwoken_rpc_client = GodwokenRpcClient::new(godwoken_rpc_url);\n\n\n\n let to_address_str = to_address;\n\n let to_address = GwBytes::from(\n\n hex::decode(to_address_str.trim_start_matches(\"0x\").as_bytes())\n\n .map_err(|err| err.to_string())?,\n\n );\n", "file_path": "crates/tools/src/polyjuice.rs", "rank": 96, "score": 173351.83270151383 }, { "content": "pub fn revert(\n\n rollup_context: &RollupContext,\n\n contracts_dep: &ContractsCellDep,\n\n custodian_cells: Vec<CellInfo>,\n\n) -> Result<Option<RevertedDeposits>> {\n\n if custodian_cells.is_empty() {\n\n return Ok(None);\n\n }\n\n\n\n let mut custodian_inputs = vec![];\n\n let mut custodian_witness = vec![];\n\n let mut deposit_outputs = vec![];\n\n\n\n let rollup_type_hash = rollup_context.rollup_script_hash.as_slice().iter();\n\n for revert_custodian in custodian_cells.into_iter() {\n\n let deposit_lock = {\n\n let args: Bytes = revert_custodian.output.lock().args().unpack();\n\n let custodian_lock_args = CustodianLockArgs::from_slice(&args.slice(32..))?;\n\n\n\n let deposit_lock_args = custodian_lock_args.deposit_lock_args();\n", "file_path": "crates/block-producer/src/deposit.rs", "rank": 97, "score": 173351.83270151383 }, { "content": "// Note: custodian lock search rollup cell in inputs\n\npub fn generate(\n\n rollup_context: &RollupContext,\n\n finalized_custodians: CollectedCustodianCells,\n\n block: &L2Block,\n\n contracts_dep: &ContractsCellDep,\n\n) -> Result<Option<GeneratedWithdrawals>> {\n\n if block.withdrawals().is_empty() && finalized_custodians.cells_info.is_empty() {\n\n return Ok(None);\n\n }\n\n log::debug!(\"custodian inputs {:?}\", finalized_custodians);\n\n\n\n let total_withdrawal_amount = sum_withdrawals(block.withdrawals().into_iter());\n\n let mut generator = Generator::new(rollup_context, (&finalized_custodians).into());\n\n for req in block.withdrawals().into_iter() {\n\n generator\n\n .include_and_verify(&req, block)\n\n .map_err(|err| anyhow!(\"unexpected withdrawal err {}\", err))?\n\n }\n\n log::debug!(\"included withdrawals {}\", generator.withdrawals().len());\n\n\n", "file_path": "crates/block-producer/src/withdrawal.rs", "rank": 98, "score": 173351.83270151383 }, { "content": "fn build_godwoken_scripts(repos_dir: &Path, repo_name: &str) {\n\n let repo_dir = repos_dir.join(repo_name).display().to_string();\n\n let target_dir = format!(\"{}/c\", repo_dir);\n\n utils::transaction::run(\"make\", vec![\"-C\", &target_dir]).expect(\"run make\");\n\n utils::transaction::run_in_dir(\n\n \"capsule\",\n\n vec![\"build\", \"--release\", \"--debug-output\"],\n\n &repo_dir,\n\n )\n\n .expect(\"run capsule build\");\n\n}\n\n\n", "file_path": "crates/tools/src/prepare_scripts.rs", "rank": 99, "score": 172433.24367618852 } ]
Rust
src/lib.rs
SilverBzH/wasm-sorter
303cb7c16e764025d5b3d1eedeef219365892830
mod utils; use wasm_bindgen::prelude::*; #[cfg(feature = "wee_alloc")] #[global_allocator] static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; #[wasm_bindgen] #[repr(u8)] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum SortType { Bubble, BubbleOptimizied, QuickSort, } #[wasm_bindgen] pub struct Sorter { data: Vec<u32>, swapped_index: Vec<u32>, } #[wasm_bindgen] impl Sorter { pub fn new(data: Vec<u32>) -> Sorter { let swapped_index = Vec::new(); Sorter { data, swapped_index, } } pub fn run(&mut self, sort_type: SortType) { self.swapped_index.clear(); match sort_type { SortType::Bubble => { println!("BUBBLE SORT"); Sorter::bubble_sort(self); }, SortType::BubbleOptimizied => { println!("BUBBLE SORT OPTIMIZED"); Sorter::bubble_sort_optimized(self); }, SortType::QuickSort => { println!("QUICK SORT"); let first_index = 0; let last_index = self.data.len()-1; Sorter::quick_sort(self, first_index, last_index); } } } pub fn get_data(&mut self) -> Vec<u32> { let data = self.data.clone(); data } pub fn get_swapped_indexes(&mut self) -> Vec<u32> { let indexes = self.swapped_index.clone(); self.swapped_index.clear(); indexes } pub fn update_data(&mut self, data: Vec<u32>) { self.data = data; } } impl Sorter { fn bubble_sort(&mut self) { let length = self.data.len(); for _ in 0..length { for j in 0..length-1 { if self.data[j+1] < self.data[j] { self.data.swap(j, j+1); self.swapped_index.push(j as u32); self.swapped_index.push((j+1) as u32); } } } } fn bubble_sort_optimized(&mut self) { let mut is_sorted; let length = self.data.len(); for _ in 0..length { is_sorted = true; for i in 0..length-1 { if self.data[i+1] < self.data[i] { self.data.swap(i, i+1); self.swapped_index.push(i as u32); self.swapped_index.push((i+1) as u32); is_sorted = false; } } if is_sorted { return; } } } fn partition(&mut self, first_index: usize, last_index: usize) -> usize { let pivot = self.data[last_index]; let mut i = first_index; for j in first_index..last_index { if self.data[j] < pivot { self.data.swap(i, j); self.swapped_index.push(i as u32); self.swapped_index.push(j as u32); i += 1; } } self.data.swap(i, last_index); self.swapped_index.push(i as u32); self.swapped_index.push(last_index as u32); i } fn quick_sort(&mut self, first_index: usize, last_index: usize) { if first_index < last_index { let pivot = Sorter::partition(self, first_index.clone(), last_index.clone()); let pivot_low = if pivot == 0 { 0 } else { pivot-1 }; let pivot_high = if pivot >= last_index { last_index } else { pivot+1 }; Sorter::quick_sort(self, first_index, pivot_low); Sorter::quick_sort(self, pivot_high, last_index); } } } #[cfg(test)] mod test { use super::*; #[test] fn bubble() { let mut data = vec![0,5,2,3,6,9,4,2,5,7,8,1,5,6]; let mut sorter = Sorter::new(data.clone()); sorter.run(SortType::Bubble); data.sort(); let sucess = if data == sorter.get_data() {true} else {false}; assert_eq!(sucess, true); } #[test] fn bubble_optimized() { let mut data = vec![0,5,2,3,6,9,4,2,5,7,8,1,5,6]; let mut sorter = Sorter::new(data.clone()); sorter.run(SortType::BubbleOptimizied); data.sort(); let sucess = if data == sorter.get_data() {true} else {false}; assert_eq!(sucess, true); } #[test] fn quick_sort() { let mut data = vec![0,5,2,3,6,9,4,2,5,7,8,1,5,6]; let mut sorter = Sorter::new(data.clone()); sorter.run(SortType::QuickSort); println!("data: {:?}", data); data.sort(); let sucess = if data == sorter.get_data() {true} else {false}; assert_eq!(sucess, true); } }
mod utils; use wasm_bindgen::prelude::*; #[cfg(feature = "wee_alloc")] #[global_allocator] static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; #[wasm_bindgen] #[repr(u8)] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum SortType { Bubble, BubbleOptimizied, QuickSort, } #[wasm_bindgen] pub struct Sorter { data: Vec<u32>, swapped_index: Vec<u32>, } #[wasm_bindgen] impl Sorter { pub fn new(data: Vec<u32>) -> Sorter { let swapped_index = Vec::new(); Sorter { data, swapped_index, } } pub fn run(&mut self, sort_type: SortType) { self.swapped_index.clear(); match sort_type { SortType::Bubble => { println!("BUBBLE SORT"); Sorter::bubble_sort(self); }, SortType::BubbleOptimizied => { println!("BUBBLE SORT OPTIMIZED"); Sorter::bubble_sort_optimized(self); }, SortType::QuickSort => { println!("QUICK SORT"); let first_index = 0; let last_index = self.data.len()-1; Sorter::quick_sort(self, first_index, last_index); } } } pub fn get_data(&mut self) -> Vec<u32> { let data = self.data.clone(); data } pub fn get_swapped_indexes(&mut self) -> Vec<u32> { let indexes = self.swapped_index.clone(); self.swapped_index.clear(); indexes } pub fn update_data(&mut self, data: Vec<u32>) { self.data = data; } } impl Sorter { fn bubble_sort(&mut self) { let length = self.data.len(); for _ in 0..length { for j in 0..length-1 { if self.data[j+1] < self.data[j] { self.data.swap(j, j+1); self.swapped_index.push(j as u32); self.swapped_index.push((j+1) as u32); } } } } fn bubble_sort_optimized(&mut self) { let mut is_sorted; let length = self.data.len(); for _ in 0..length { is_sorted = true; for i in 0..length-1 { if self.data[i+1] < self.data[i] { self.data.swap(i, i+1); self.swapped_index.push(i as u32); self.swapped_index.push((i+1) as u32); is_sorted = false; } } if is_sorted { return; } } } fn partition(&mut self, first_index: usize, last_index: usize) -> usize { let pivot = self.data[last_index]; let mut i = first_index; for j in first_index..last_index { if self.data[j] < pivot { self.data.swap(i, j); self.swapped_index.push(i as u32); self.swapped_index.push(j as u32); i += 1; } } self.data.swap(i, last_index); self.swapped_index.push(i as u32); self.swapped_index.push(last_index as u32); i } fn quick_sort(&mut self, first_index: usize, last_index: usize) { if first_index < last_index { let pivot = Sorter::partition(self, first_index.clone(), last_index.clone()); let pivot_low = if pivot == 0 { 0 } else { pivot-1 }; let pivot_high = if pivot >= last_index { last_index } else { pivot+1 }; Sorter::quick_sort(self, first_index, pivot_low); Sorter::quick_sort(self, pivot_high, last_index); } } } #[cfg(test)] mod test { use super::*; #[test] fn bubble() { let mut data = vec![0,5,2,3,6,9,4,2,5,7,8,1,5,6];
#[test] fn bubble_optimized() { let mut data = vec![0,5,2,3,6,9,4,2,5,7,8,1,5,6]; let mut sorter = Sorter::new(data.clone()); sorter.run(SortType::BubbleOptimizied); data.sort(); let sucess = if data == sorter.get_data() {true} else {false}; assert_eq!(sucess, true); } #[test] fn quick_sort() { let mut data = vec![0,5,2,3,6,9,4,2,5,7,8,1,5,6]; let mut sorter = Sorter::new(data.clone()); sorter.run(SortType::QuickSort); println!("data: {:?}", data); data.sort(); let sucess = if data == sorter.get_data() {true} else {false}; assert_eq!(sucess, true); } }
let mut sorter = Sorter::new(data.clone()); sorter.run(SortType::Bubble); data.sort(); let sucess = if data == sorter.get_data() {true} else {false}; assert_eq!(sucess, true); }
function_block-function_prefix_line
[ { "content": "pub fn set_panic_hook() {\n\n // When the `console_error_panic_hook` feature is enabled, we can call the\n\n // `set_panic_hook` function at least once during initialization, and then\n\n // we will get better error messages if our code ever panics.\n\n //\n\n // For more details see\n\n // https://github.com/rustwasm/console_error_panic_hook#readme\n\n #[cfg(feature = \"console_error_panic_hook\")]\n\n console_error_panic_hook::set_once();\n\n}\n", "file_path": "src/utils.rs", "rank": 0, "score": 78020.61361954943 }, { "content": "#[wasm_bindgen_test]\n\nfn pass() {\n\n assert_eq!(1 + 1, 2);\n\n}\n", "file_path": "tests/web.rs", "rank": 1, "score": 55826.995611441045 }, { "content": "let data = new Uint32Array();\n", "file_path": "www/src/ts/main.ts", "rank": 2, "score": 45460.45607274036 }, { "content": "let sorter = Sorter.new(data);\n", "file_path": "www/src/ts/main.ts", "rank": 3, "score": 32409.305443530968 }, { "content": "const CopyWebpackPlugin = require(\"copy-webpack-plugin\");\n", "file_path": "www/webpack.config.js", "rank": 4, "score": 29418.29166767292 }, { "content": " public setSortType(type: SortType) {\n\n this.sortType = type;\n", "file_path": "www/src/ts/printer.ts", "rank": 5, "score": 27869.10354000073 }, { "content": "import(\"./src/js_auto_gen/main.js\")\n\n.catch(e => console.error(\"Error importing `main.js` which is auto generated by TypeScript:\", e));\n", "file_path": "www/index.js", "rank": 6, "score": 26474.91568169378 }, { "content": "export namespace Utils {\n\n export function printBars(data: Uint32Array, max_value: number) {\n\n //Clear the canvas first\n\n var canvas = document.getElementById(\"canvas\");\n\n canvas.innerHTML = \"\";\n\n \n\n //print the new canvas\n\n for (var i=0 ; i<data.length ; i++) {\n\n var mother_div = document.createElement(\"div\");\n\n mother_div.classList.add(\"progress\");\n\n \n\n var child_div = document.createElement(\"div\");\n\n child_div.classList.add(\"progress-bar\");\n\n child_div.setAttribute(\"style\", \"width: \" + data[i]*100/max_value + \"%\");\n\n mother_div.appendChild(child_div);\n\n document.querySelector(\".canvas-progress-bar\").appendChild(mother_div);\n\n }\n\n }\n\n\n\n export function printBarsColoredIndex(data: Uint32Array, max_value: number, index_a: number, index_b: number) {\n\n //Clear the canvas first\n\n var canvas = document.getElementById(\"canvas\");\n\n canvas.innerHTML = \"\";\n\n //print the new canvas\n\n for (var i=0 ; i<data.length ; i++) {\n\n var mother_div = document.createElement(\"div\");\n\n mother_div.classList.add(\"progress\");\n\n \n\n var child_div = document.createElement(\"div\");\n\n child_div.classList.add(\"progress-bar\");\n\n child_div.setAttribute(\"style\", \"width: \" + data[i]*100/max_value + \"%\");\n\n child_div.id = i.toString();\n\n mother_div.appendChild(child_div);\n\n document.querySelector(\".canvas-progress-bar\").appendChild(mother_div);\n\n\n\n if (i == index_a || i == index_b) {\n\n $(\"#\" + i.toString()).addClass(\"progress-bar-red\");\n\n }\n\n }\n\n }\n\n\n\n export function generateRandomData(nb_samples: number, max_value: number) : Uint32Array {\n\n let data = new Array();\n\n for (var i=0 ; i<nb_samples ; i++) {\n\n let d = Math.floor(Math.random() * Math.floor(max_value));\n\n data.push(d); \n\n }\n\n return Uint32Array.from(data);\n\n }\n", "file_path": "www/src/ts/utils.ts", "rank": 7, "score": 25089.41743033952 }, { "content": "//! Test suite for the Web and headless browsers.\n\n\n\n#![cfg(target_arch = \"wasm32\")]\n\n\n\nextern crate wasm_bindgen_test;\n\nuse wasm_bindgen_test::*;\n\n\n\nwasm_bindgen_test_configure!(run_in_browser);\n\n\n\n#[wasm_bindgen_test]\n", "file_path": "tests/web.rs", "rank": 8, "score": 23360.689997721693 }, { "content": "const path = require('path');\n", "file_path": "www/webpack.config.js", "rank": 9, "score": 13138.394862438356 }, { "content": "let printer = new Printer(sorter, maxValue, data);\n", "file_path": "www/src/ts/main.ts", "rank": 10, "score": 12762.786561511844 }, { "content": "export class Printer {\n\n\n\n readonly NB_SAMPLE_MAX = 100;\n\n readonly MAX_VALUE_MAX = 1000*1000;\n\n readonly DELAY = 10; // ms\n\n\n\n sorter: Sorter;\n\n sortType: SortType;\n\n maxValue: number;\n\n data: Uint32Array;\n\n isSorted: boolean;\n\n\n\n constructor(sorter: Sorter, maxValue: number, data: Uint32Array) {\n\n this.sorter = sorter;\n\n this.maxValue = maxValue;\n\n this.sortType = SortType.Bubble;\n\n this.initAlgoList();\n\n this.initEventListener();\n\n this.data = data;\n\n this.isSorted = false;\n\n }\n\n\n\n public setSortType(type: SortType) {\n\n this.sortType = type;\n\n }\n\n\n\n private initAlgoList() {\n\n for (let algo in SortType) {\n\n if (isNaN(Number(algo))) {\n\n var item = document.createElement(\"a\");\n\n item.classList.add(\"dropdown-item\");\n\n item.href = \"#\";\n\n item.innerText = algo;\n\n document.querySelector(\".dropdown-menu\").appendChild(item);\n\n }\n\n }\n\n }\n\n\n\n private initEventListener() {\n\n //Add event listener on start button\n\n var startListener = document.getElementById(\"start_button\");\n\n startListener.addEventListener('click', () => {\n\n if (!this.isSorted) {\n\n console.log(\"Algo used: \" + this.sortType);\n\n this.updateAndPrintBars();\n\n this.isSorted = true;\n\n }\n\n });\n\n\n\n var randomListener = document.getElementById(\"random_button\");\n\n randomListener.addEventListener('click', () => {\n\n this.maxValue = Math.floor(Math.random() * Math.floor(this.MAX_VALUE_MAX));\n\n let data = Utils.generateRandomData(this.NB_SAMPLE_MAX, this.maxValue);\n\n this.data = data;\n\n this.sorter.update_data(data);\n\n Utils.printBars(data, this.maxValue);\n\n this.isSorted = false;\n\n });\n\n }\n\n \n\n private async updateAndPrintBars() {\n\n Utils.printBars(this.data, this.maxValue);\n\n this.sorter.run(this.sortType);\n\n let indexes: Uint32Array = this.sorter.get_swapped_indexes();\n\n let nb_swap: number = indexes.length/2;\n\n var j: number = 0;\n\n for (var i=0 ; i<nb_swap ; i++) {\n\n await this.delayedPrint(indexes[j], indexes[j+1]);\n\n j+=2;\n\n }\n\n }\n\n\n\n private async delayedPrint(index_a: number, index_b: number) {\n\n await this.delay();\n\n var temp = this.data[index_a];\n\n this.data[index_a] = this.data[index_b];\n\n this.data[index_b] = temp;\n\n Utils.printBarsColoredIndex(this.data, this.maxValue, index_a, index_b);\n\n }\n\n\n\n private async delay() {\n\n return new Promise(resolve => setTimeout(resolve, this.DELAY));\n\n }\n", "file_path": "www/src/ts/printer.ts", "rank": 11, "score": 12762.786561511844 }, { "content": " private async delay() {\n\n return new Promise(resolve => setTimeout(resolve, this.DELAY));\n", "file_path": "www/src/ts/printer.ts", "rank": 12, "score": 12408.057580209499 }, { "content": "let maxValue = 500;\n", "file_path": "www/src/ts/main.ts", "rank": 13, "score": 12408.057580209499 }, { "content": "let nb_samples = 100;\n", "file_path": "www/src/ts/main.ts", "rank": 14, "score": 12408.057580209499 }, { "content": " constructor(sorter: Sorter, maxValue: number, data: Uint32Array) {\n\n this.sorter = sorter;\n\n this.maxValue = maxValue;\n\n this.sortType = SortType.Bubble;\n\n this.initAlgoList();\n\n this.initEventListener();\n\n this.data = data;\n\n this.isSorted = false;\n", "file_path": "www/src/ts/printer.ts", "rank": 15, "score": 12408.057580209499 }, { "content": " private async delayedPrint(index_a: number, index_b: number) {\n\n await this.delay();\n\n var temp = this.data[index_a];\n\n this.data[index_a] = this.data[index_b];\n\n this.data[index_b] = temp;\n\n Utils.printBarsColoredIndex(this.data, this.maxValue, index_a, index_b);\n", "file_path": "www/src/ts/printer.ts", "rank": 16, "score": 12072.514038370147 }, { "content": " private async updateAndPrintBars() {\n\n Utils.printBars(this.data, this.maxValue);\n\n this.sorter.run(this.sortType);\n\n let indexes: Uint32Array = this.sorter.get_swapped_indexes();\n\n let nb_swap: number = indexes.length/2;\n\n var j: number = 0;\n\n for (var i=0 ; i<nb_swap ; i++) {\n\n await this.delayedPrint(indexes[j], indexes[j+1]);\n\n j+=2;\n\n }\n", "file_path": "www/src/ts/printer.ts", "rank": 17, "score": 11754.640457696687 }, { "content": " private initAlgoList() {\n\n for (let algo in SortType) {\n\n if (isNaN(Number(algo))) {\n\n var item = document.createElement(\"a\");\n\n item.classList.add(\"dropdown-item\");\n\n item.href = \"#\";\n\n item.innerText = algo;\n\n document.querySelector(\".dropdown-menu\").appendChild(item);\n\n }\n\n }\n", "file_path": "www/src/ts/printer.ts", "rank": 18, "score": 11754.640457696687 }, { "content": " private initEventListener() {\n\n //Add event listener on start button\n\n var startListener = document.getElementById(\"start_button\");\n\n startListener.addEventListener('click', () => {\n\n if (!this.isSorted) {\n\n console.log(\"Algo used: \" + this.sortType);\n\n this.updateAndPrintBars();\n\n this.isSorted = true;\n\n }\n\n });\n\n\n\n var randomListener = document.getElementById(\"random_button\");\n\n randomListener.addEventListener('click', () => {\n\n this.maxValue = Math.floor(Math.random() * Math.floor(this.MAX_VALUE_MAX));\n\n let data = Utils.generateRandomData(this.NB_SAMPLE_MAX, this.maxValue);\n\n this.data = data;\n\n this.sorter.update_data(data);\n\n Utils.printBars(data, this.maxValue);\n\n this.isSorted = false;\n\n });\n", "file_path": "www/src/ts/printer.ts", "rank": 19, "score": 11754.640457696687 }, { "content": "### Introduction\n\nSort algorihm implement in rust, used in Typescript, print with html/css. \n\nYou will need rust, wasm, npm and typescript installed on your machine.\n\n\n\n### 🦀 Rust and 🕸 Webassembly \n\n* Install [Rust](https://www.rust-lang.org/tools/install).\n\n* Setup [Rust and Webassembly](https://rustwasm.github.io/docs/book/game-of-life/setup.html).\n\n* Learn [Rust](https://www.rust-lang.org/)\n\n* Learn [Rust + Webassembly](https://rustwasm.github.io/docs/book/introduction.html)\n\n\n\n### 🛠️ Build\n\nLinux:\n\n```\n\n./build.sh\n\n```\n\n\n\nWindows: \n\n```\n\n./build.bat\n\n```\n\n\n\n### 🚀 Launch\n\n```\n\ncd pkg/\n\nnpm init\n\ncd ../www/\n\nnpm run start\n\n```\n\nThen launch a browser at http://localhost:8080/.\n", "file_path": "README.md", "rank": 20, "score": 11454.260609524566 }, { "content": "## 🔋 Batteries Included\n\n\n\n- `.gitignore`: ignores `node_modules`\n\n- `LICENSE-APACHE` and `LICENSE-MIT`: most Rust projects are licensed this way, so these are included for you\n\n- `README.md`: the file you are reading now!\n\n- `index.html`: a bare bones html document that includes the webpack bundle\n\n- `index.js`: example js file with a comment showing how to import and use a wasm pkg\n\n- `package.json` and `package-lock.json`:\n\n - pulls in devDependencies for using webpack:\n\n - [`webpack`](https://www.npmjs.com/package/webpack)\n\n - [`webpack-cli`](https://www.npmjs.com/package/webpack-cli)\n\n - [`webpack-dev-server`](https://www.npmjs.com/package/webpack-dev-server)\n\n - defines a `start` script to run `webpack-dev-server`\n\n- `webpack.config.js`: configuration file for bundling your js with webpack\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)\n\n* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\n### Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally\n\nsubmitted for inclusion in the work by you, as defined in the Apache-2.0\n\nlicense, shall be dual licensed as above, without any additional terms or\n\nconditions.\n", "file_path": "www/README.md", "rank": 21, "score": 11167.978933687147 }, { "content": "<div align=\"center\">\n\n\n\n <h1><code>create-wasm-app</code></h1>\n\n\n\n <strong>An <code>npm init</code> template for kick starting a project that uses NPM packages containing Rust-generated WebAssembly and bundles them with Webpack.</strong>\n\n\n\n <p>\n\n <a href=\"https://travis-ci.org/rustwasm/create-wasm-app\"><img src=\"https://img.shields.io/travis/rustwasm/create-wasm-app.svg?style=flat-square\" alt=\"Build Status\" /></a>\n\n </p>\n\n\n\n <h3>\n\n <a href=\"#usage\">Usage</a>\n\n <span> | </span>\n\n <a href=\"https://discordapp.com/channels/442252698964721669/443151097398296587\">Chat</a>\n\n </h3>\n\n\n\n <sub>Built with 🦀🕸 by <a href=\"https://rustwasm.github.io/\">The Rust and WebAssembly Working Group</a></sub>\n\n</div>\n\n\n\n## About\n\n\n\nThis template is designed for depending on NPM packages that contain\n\nRust-generated WebAssembly and using them to create a Website.\n\n\n\n* Want to create an NPM package with Rust and WebAssembly? [Check out\n\n `wasm-pack-template`.](https://github.com/rustwasm/wasm-pack-template)\n\n* Want to make a monorepo-style Website without publishing to NPM? Check out\n\n [`rust-webpack-template`](https://github.com/rustwasm/rust-webpack-template)\n\n and/or\n\n [`rust-parcel-template`](https://github.com/rustwasm/rust-parcel-template).\n\n\n\n## 🚴 Usage\n\n\n\n```\n\nnpm init wasm-app\n\n```\n\n\n", "file_path": "www/README.md", "rank": 22, "score": 11167.267853686613 }, { "content": "// A dependency graph that contains any wasm must all be imported\n\n// asynchronously. This `bootstrap.js` file does the single async import, so\n\n// that no one else needs to worry about it again.\n\nimport(\"./index.js\")\n\n .catch(e => console.error(\"Error importing `index.js`:\", e));\n", "file_path": "www/bootstrap.js", "rank": 23, "score": 11166.599404228196 }, { "content": "const CopyWebpackPlugin = require(\"copy-webpack-plugin\");\n\nconst path = require('path');\n\n\n\nmodule.exports = {\n\n entry: \"./bootstrap.js\",\n\n output: {\n\n path: path.resolve(__dirname, \"dist\"),\n\n filename: \"bootstrap.js\",\n\n },\n\n mode: \"development\",\n\n plugins: [\n\n new CopyWebpackPlugin(['index.html'])\n\n ],\n\n};\n", "file_path": "www/webpack.config.js", "rank": 24, "score": 10894.1036104712 }, { "content": "import { Sorter, SortType } from \"wasm-sorter\"\n\nimport { Utils } from \"./utils\"\n\nimport { Printer } from \"./printer\"\n\n\n\n//Init Data\n\nlet data = new Uint32Array();\n\n\n\n//default Init values, can be changed by the user later\n\nlet nb_samples = 100;\n\nlet maxValue = 500;\n\ndata = Utils.generateRandomData(nb_samples, maxValue);\n\nUtils.printBars(data, maxValue);\n\n\n\n//Init Sorter and Printer\n\nlet sorter = Sorter.new(data);\n\nlet printer = new Printer(sorter, maxValue, data);\n\n\n\n//Updating algorithm via dropDowm menu button\n\n$(document).ready(function() {\n\n $(\"#dropDown-sort a\").click(function() {\n\n let sortName: string = $(this).text();\n\n switch(sortName) {\n\n case \"Bubble\":\n\n printer.setSortType(SortType.Bubble);\n\n break;\n\n case \"BubbleOptimizied\":\n\n printer.setSortType(SortType.BubbleOptimizied);\n\n break;\n\n case \"QuickSort\":\n\n printer.setSortType(SortType.QuickSort);\n\n break;\n\n default:\n\n console.log(\"Unknown algorithm\");\n\n break;\n\n }\n\n document.getElementById(\"dropdownMenuButton\").innerHTML = sortName;\n\n });\n\n});\n", "file_path": "www/src/ts/main.ts", "rank": 25, "score": 10634.59030463481 }, { "content": "import {SortType, Sorter} from \"wasm-sorter\";\n\nimport { Utils } from \"./utils\"\n\n\n\n// var dropDownListener = document.getElementById(\"dropdownMenuButton\");\n\n// dropDownListener.addEventListener('click', function() {updateAlgoList()});\n\n\n\nexport class Printer {\n\n\n\n readonly NB_SAMPLE_MAX = 100;\n\n readonly MAX_VALUE_MAX = 1000*1000;\n\n readonly DELAY = 10; // ms\n\n\n\n sorter: Sorter;\n\n sortType: SortType;\n\n maxValue: number;\n\n data: Uint32Array;\n\n isSorted: boolean;\n\n\n\n constructor(sorter: Sorter, maxValue: number, data: Uint32Array) {\n\n this.sorter = sorter;\n\n this.maxValue = maxValue;\n\n this.sortType = SortType.Bubble;\n\n this.initAlgoList();\n\n this.initEventListener();\n\n this.data = data;\n\n this.isSorted = false;\n\n }\n\n\n\n public setSortType(type: SortType) {\n\n this.sortType = type;\n\n }\n\n\n\n private initAlgoList() {\n\n for (let algo in SortType) {\n\n if (isNaN(Number(algo))) {\n\n var item = document.createElement(\"a\");\n\n item.classList.add(\"dropdown-item\");\n\n item.href = \"#\";\n\n item.innerText = algo;\n\n document.querySelector(\".dropdown-menu\").appendChild(item);\n\n }\n\n }\n\n }\n\n\n\n private initEventListener() {\n\n //Add event listener on start button\n\n var startListener = document.getElementById(\"start_button\");\n\n startListener.addEventListener('click', () => {\n\n if (!this.isSorted) {\n\n console.log(\"Algo used: \" + this.sortType);\n\n this.updateAndPrintBars();\n\n this.isSorted = true;\n\n }\n\n });\n\n\n\n var randomListener = document.getElementById(\"random_button\");\n\n randomListener.addEventListener('click', () => {\n\n this.maxValue = Math.floor(Math.random() * Math.floor(this.MAX_VALUE_MAX));\n\n let data = Utils.generateRandomData(this.NB_SAMPLE_MAX, this.maxValue);\n\n this.data = data;\n\n this.sorter.update_data(data);\n\n Utils.printBars(data, this.maxValue);\n\n this.isSorted = false;\n\n });\n\n }\n\n \n\n private async updateAndPrintBars() {\n\n Utils.printBars(this.data, this.maxValue);\n\n this.sorter.run(this.sortType);\n\n let indexes: Uint32Array = this.sorter.get_swapped_indexes();\n\n let nb_swap: number = indexes.length/2;\n\n var j: number = 0;\n\n for (var i=0 ; i<nb_swap ; i++) {\n\n await this.delayedPrint(indexes[j], indexes[j+1]);\n\n j+=2;\n\n }\n\n }\n\n\n\n private async delayedPrint(index_a: number, index_b: number) {\n\n await this.delay();\n\n var temp = this.data[index_a];\n\n this.data[index_a] = this.data[index_b];\n\n this.data[index_b] = temp;\n\n Utils.printBarsColoredIndex(this.data, this.maxValue, index_a, index_b);\n\n }\n\n\n\n private async delay() {\n\n return new Promise(resolve => setTimeout(resolve, this.DELAY));\n\n }\n", "file_path": "www/src/ts/printer.ts", "rank": 26, "score": 10634.59030463481 } ]
Rust
tests/test_casbin.rs
smrpn/actix-casbin
c4e91e518e3414ae9c3542f47ba04c358e38e1b1
use actix_casbin::{CasbinActor, CasbinCmd, CasbinResult}; use casbin::prelude::*; #[actix_rt::test] async fn test_enforcer() { let m = DefaultModel::from_file("examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); if let CasbinResult::Enforce(test_enforce) = addr .send(CasbinCmd::Enforce( vec!["alice", "data1", "read"] .iter() .map(|s| s.to_string()) .collect(), )) .await .unwrap() .unwrap() { assert_eq!(true, test_enforce); } } #[actix_rt::test] async fn test_enforcer_threads() { let m = DefaultModel::from_file("examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); for _ in 0..8 { let clone_addr = addr.clone(); tokio::spawn(async move { if let CasbinResult::Enforce(test_enforce) = clone_addr .send(CasbinCmd::Enforce( vec!["alice", "data1", "read"] .iter() .map(|s| s.to_string()) .collect(), )) .await .unwrap() .unwrap() { assert_eq!(true, test_enforce); } }); } } #[actix_rt::test] async fn test_policy_command() { let m = DefaultModel::from_file("examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); if let CasbinResult::RemovePolicy(remove_policy) = addr .send(CasbinCmd::RemovePolicy( vec!["alice", "data1", "read"] .iter() .map(|s| s.to_string()) .collect(), )) .await .unwrap() .unwrap() { assert_eq!(true, remove_policy); } if let CasbinResult::RemoveFilteredNamedPolicy(remove_filtered_policy) = addr .send(CasbinCmd::RemoveFilteredNamedPolicy( "p".to_string(), 1, vec!["data2"].iter().map(|s| s.to_string()).collect(), )) .await .unwrap() .unwrap() { assert_eq!(true, remove_filtered_policy); } if let CasbinResult::AddPolicy(add_policy) = addr .send(CasbinCmd::AddPolicy( vec!["eve", "data3", "read"] .iter() .map(|s| s.to_string()) .collect(), )) .await .unwrap() .unwrap() { assert_eq!(true, add_policy); } if let CasbinResult::AddPolicy(add_policies) = addr .send(CasbinCmd::AddPolicies(vec![ vec!["lucy", "data3", "write"] .iter() .map(|s| s.to_string()) .collect(), vec!["jack", "data4", "read"] .iter() .map(|s| s.to_string()) .collect(), ])) .await .unwrap() .unwrap() { assert_eq!(true, add_policies); } if let CasbinResult::RemovePolicies(remove_policies) = addr .send(CasbinCmd::RemovePolicies(vec![ vec!["lucy", "data3", "write"] .iter() .map(|s| s.to_string()) .collect(), vec!["jack", "data4", "read"] .iter() .map(|s| s.to_string()) .collect(), ])) .await .unwrap() .unwrap() { assert_eq!(true, remove_policies); } } #[actix_rt::test] async fn test_roles_command() { let m = DefaultModel::from_file("examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); if let CasbinResult::AddRoleForUser(add_role_for_user) = addr .send(CasbinCmd::AddRoleForUser( "alice".to_string(), "data1_admin".to_string(), None, )) .await .unwrap() .unwrap() { assert_eq!(true, add_role_for_user); } if let CasbinResult::AddRolesForUser(add_roles_for_user) = addr .send(CasbinCmd::AddRolesForUser( "bob".to_string(), vec!["data1_admin", "data2_admin"] .iter() .map(|s| s.to_string()) .collect(), None, )) .await .unwrap() .unwrap() { assert_eq!(true, add_roles_for_user); } if let CasbinResult::DeleteRoleForUser(delete_role_for_user) = addr .send(CasbinCmd::DeleteRoleForUser( "alice".to_string(), "data1_admin".to_string(), None, )) .await .unwrap() .unwrap() { assert_eq!(true, delete_role_for_user); } if let CasbinResult::DeleteRolesForUser(delete_roles_for_user) = addr .send(CasbinCmd::DeleteRolesForUser("bob".to_string(), None)) .await .unwrap() .unwrap() { assert_eq!(true, delete_roles_for_user); } } #[actix_rt::test] async fn test_implicit_roles_command() { let m = DefaultModel::from_file("examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_with_hierarchy_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); if let CasbinResult::GetImplicitRolesForUser(implicit_roles_alice) = addr .send(CasbinCmd::GetImplicitRolesForUser( "alice".to_string(), None, )) .await .unwrap() .unwrap() { assert_eq!( vec!["admin", "data1_admin", "data2_admin"], sort_unstable(implicit_roles_alice) ); } if let CasbinResult::GetImplicitRolesForUser(implicit_roles_bob) = addr .send(CasbinCmd::GetImplicitRolesForUser("bob".to_string(), None)) .await .unwrap() .unwrap() { assert_eq!(vec![String::new(); 0], implicit_roles_bob); } } #[actix_rt::test] async fn test_implicit_permissions_command() { let m = DefaultModel::from_file("examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_with_hierarchy_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); if let CasbinResult::GetImplicitPermissionsForUser(implicit_permissions_alice) = addr .send(CasbinCmd::GetImplicitPermissionsForUser( "alice".to_string(), None, )) .await .unwrap() .unwrap() { assert_eq!( vec![ vec!["alice", "data1", "read"], vec!["data1_admin", "data1", "read"], vec!["data1_admin", "data1", "write"], vec!["data2_admin", "data2", "read"], vec!["data2_admin", "data2", "write"], ], sort_unstable(implicit_permissions_alice) ); } if let CasbinResult::GetImplicitPermissionsForUser(implicit_permissions_bob) = addr .send(CasbinCmd::GetImplicitPermissionsForUser( "bob".to_string(), None, )) .await .unwrap() .unwrap() { assert_eq!( vec![vec!["bob", "data2", "write"]], implicit_permissions_bob ); } } fn sort_unstable<T: Ord>(mut v: Vec<T>) -> Vec<T> { v.sort_unstable(); v }
use actix_casbin::{CasbinActor, CasbinCmd, CasbinResult}; use casbin::prelude::*; #[actix_rt::test] async fn test_enforcer() { let m = DefaultModel::from_file("
, vec!["data1_admin", "data2_admin"] .iter() .map(|s| s.to_string()) .collect(), None, )) .await .unwrap() .unwrap() { assert_eq!(true, add_roles_for_user); } if let CasbinResult::DeleteRoleForUser(delete_role_for_user) = addr .send(CasbinCmd::DeleteRoleForUser( "alice".to_string(), "data1_admin".to_string(), None, )) .await .unwrap() .unwrap() { assert_eq!(true, delete_role_for_user); } if let CasbinResult::DeleteRolesForUser(delete_roles_for_user) = addr .send(CasbinCmd::DeleteRolesForUser("bob".to_string(), None)) .await .unwrap() .unwrap() { assert_eq!(true, delete_roles_for_user); } } #[actix_rt::test] async fn test_implicit_roles_command() { let m = DefaultModel::from_file("examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_with_hierarchy_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); if let CasbinResult::GetImplicitRolesForUser(implicit_roles_alice) = addr .send(CasbinCmd::GetImplicitRolesForUser( "alice".to_string(), None, )) .await .unwrap() .unwrap() { assert_eq!( vec!["admin", "data1_admin", "data2_admin"], sort_unstable(implicit_roles_alice) ); } if let CasbinResult::GetImplicitRolesForUser(implicit_roles_bob) = addr .send(CasbinCmd::GetImplicitRolesForUser("bob".to_string(), None)) .await .unwrap() .unwrap() { assert_eq!(vec![String::new(); 0], implicit_roles_bob); } } #[actix_rt::test] async fn test_implicit_permissions_command() { let m = DefaultModel::from_file("examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_with_hierarchy_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); if let CasbinResult::GetImplicitPermissionsForUser(implicit_permissions_alice) = addr .send(CasbinCmd::GetImplicitPermissionsForUser( "alice".to_string(), None, )) .await .unwrap() .unwrap() { assert_eq!( vec![ vec!["alice", "data1", "read"], vec!["data1_admin", "data1", "read"], vec!["data1_admin", "data1", "write"], vec!["data2_admin", "data2", "read"], vec!["data2_admin", "data2", "write"], ], sort_unstable(implicit_permissions_alice) ); } if let CasbinResult::GetImplicitPermissionsForUser(implicit_permissions_bob) = addr .send(CasbinCmd::GetImplicitPermissionsForUser( "bob".to_string(), None, )) .await .unwrap() .unwrap() { assert_eq!( vec![vec!["bob", "data2", "write"]], implicit_permissions_bob ); } } fn sort_unstable<T: Ord>(mut v: Vec<T>) -> Vec<T> { v.sort_unstable(); v }
examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); if let CasbinResult::Enforce(test_enforce) = addr .send(CasbinCmd::Enforce( vec!["alice", "data1", "read"] .iter() .map(|s| s.to_string()) .collect(), )) .await .unwrap() .unwrap() { assert_eq!(true, test_enforce); } } #[actix_rt::test] async fn test_enforcer_threads() { let m = DefaultModel::from_file("examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); for _ in 0..8 { let clone_addr = addr.clone(); tokio::spawn(async move { if let CasbinResult::Enforce(test_enforce) = clone_addr .send(CasbinCmd::Enforce( vec!["alice", "data1", "read"] .iter() .map(|s| s.to_string()) .collect(), )) .await .unwrap() .unwrap() { assert_eq!(true, test_enforce); } }); } } #[actix_rt::test] async fn test_policy_command() { let m = DefaultModel::from_file("examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); if let CasbinResult::RemovePolicy(remove_policy) = addr .send(CasbinCmd::RemovePolicy( vec!["alice", "data1", "read"] .iter() .map(|s| s.to_string()) .collect(), )) .await .unwrap() .unwrap() { assert_eq!(true, remove_policy); } if let CasbinResult::RemoveFilteredNamedPolicy(remove_filtered_policy) = addr .send(CasbinCmd::RemoveFilteredNamedPolicy( "p".to_string(), 1, vec!["data2"].iter().map(|s| s.to_string()).collect(), )) .await .unwrap() .unwrap() { assert_eq!(true, remove_filtered_policy); } if let CasbinResult::AddPolicy(add_policy) = addr .send(CasbinCmd::AddPolicy( vec!["eve", "data3", "read"] .iter() .map(|s| s.to_string()) .collect(), )) .await .unwrap() .unwrap() { assert_eq!(true, add_policy); } if let CasbinResult::AddPolicy(add_policies) = addr .send(CasbinCmd::AddPolicies(vec![ vec!["lucy", "data3", "write"] .iter() .map(|s| s.to_string()) .collect(), vec!["jack", "data4", "read"] .iter() .map(|s| s.to_string()) .collect(), ])) .await .unwrap() .unwrap() { assert_eq!(true, add_policies); } if let CasbinResult::RemovePolicies(remove_policies) = addr .send(CasbinCmd::RemovePolicies(vec![ vec!["lucy", "data3", "write"] .iter() .map(|s| s.to_string()) .collect(), vec!["jack", "data4", "read"] .iter() .map(|s| s.to_string()) .collect(), ])) .await .unwrap() .unwrap() { assert_eq!(true, remove_policies); } } #[actix_rt::test] async fn test_roles_command() { let m = DefaultModel::from_file("examples/rbac_model.conf") .await .unwrap(); let a = FileAdapter::new("examples/rbac_policy.csv"); let addr = CasbinActor::<Enforcer>::new(m, a).await.unwrap(); if let CasbinResult::AddRoleForUser(add_role_for_user) = addr .send(CasbinCmd::AddRoleForUser( "alice".to_string(), "data1_admin".to_string(), None, )) .await .unwrap() .unwrap() { assert_eq!(true, add_role_for_user); } if let CasbinResult::AddRolesForUser(add_roles_for_user) = addr .send(CasbinCmd::AddRolesForUser( "bob".to_string()
random
[ { "content": "use actix_casbin_auth::CasbinService;\n\n\n\n#[actix_rt::main]\n\nasync fn main() -> Result<()> {\n\n let m = DefaultModel::from_file(\"examples/rbac_model.conf\")\n\n .await?;\n\n let a = FileAdapter::new(\"examples/rbac_policy.csv\");\n\n\n\n let mut casbin_middleware = CasbinService::new(m, a).await;\n\n let enforcer = casbin_middleware.get_enforcer();\n\n\n\n let addr = CasbinActor::<CachedEnforcer>::set_enforcer(enforcer)?;\n\n if let CasbinResult::Enforce(test_enforce) = addr\n\n .send(CasbinCmd::Enforce(\n\n vec![\"alice\", \"data1\", \"read\"]\n\n .iter()\n\n .map(|s| s.to_string())\n\n .collect(),\n\n ))\n\n .await;\n\n let test_enforce = match res {\n\n Ok(Ok(CasbinResult::Enforce(result))) => result,\n\n _ => panic!(\"Actor Error\"),\n\n };\n\n if test_enforce {\n\n println!(\"Enforce Pass\");\n\n } else {\n\n println!(\"Enforce Fail\");\n\n }\n\n\n\n Ok(())\n\n}\n\n```\n\n\n", "file_path": "README.md", "rank": 1, "score": 4.4608475289129 }, { "content": "# Actix Casbin\n\n\n\n[![Crates.io](https://meritbadge.herokuapp.com/actix-casbin)](https://crates.io/crates/actix-casbin)\n\n[![Docs](https://docs.rs/actix-casbin/badge.svg)](https://docs.rs/actix-casbin)\n\n[![Auto Build](https://github.com/casbin-rs/actix-casbin/workflows/Auto%20Build/badge.svg)](https://github.com/casbin-rs/actix-casbin/actions/)\n\n[![codecov](https://codecov.io/gh/casbin-rs/actix-casbin/branch/master/graph/badge.svg)](https://codecov.io/gh/casbin-rs/actix-casbin)\n\n\n\n[Casbin](https://github.com/casbin/casbin-rs) intergration for [actix](https://github.com/actix/actix) framework\n\n\n\n## Install\n\n\n\nAdd it to `Cargo.toml`\n\n\n\n```rust\n\nactix-casbin = \"0.4.2\"\n\nactix-rt = \"1.1.1\"\n\n```\n\n\n\n\n\n## Example\n\n\n\n1. Using actix-casbin as actor alone\n\n\n\n```rust\n\nuse actix_casbin::casbin::{DefaultModel, FileAdapter, Result, Enforcer};\n\nuse actix_casbin::{CasbinActor, CasbinCmd, CasbinResult};\n\n\n\n#[actix_rt::main]\n\nasync fn main() -> Result<()> {\n\n let m = DefaultModel::from_file(\"examples/rbac_model.conf\").await?;\n\n\n\n let a = FileAdapter::new(\"examples/rbac_policy.csv\");\n\n\n\n let addr = CasbinActor::<Enforcer>::new(m, a).await?;\n\n\n\n let res = addr\n\n .send(CasbinCmd::Enforce(\n\n vec![\"alice\", \"data1\", \"read\"]\n\n .iter()\n\n .map(|s| (*s).to_string())\n\n .collect(),\n\n ))\n\n .await;\n\n\n\n let test_enforce = match res {\n\n Ok(Ok(CasbinResult::Enforce(result))) => result,\n\n _ => panic!(\"Actor Error\"),\n\n };\n\n if test_enforce {\n\n println!(\"Enforce Pass\");\n\n } else {\n\n println!(\"Enforce Fail\");\n\n }\n\n Ok(())\n\n}\n\n```\n\n2. Use actix-casbin with casbin actix middleware [actix-casbin-auth](https://github.com/casbin-rs/actix-casbin-auth)\n\n```rust\n\nuse actix_casbin::casbin::{DefaultModel, FileAdapter, Result, CachedEnforcer};\n\nuse actix_casbin::{CasbinActor, CasbinCmd, CasbinResult};\n", "file_path": "README.md", "rank": 2, "score": 4.360627995679122 }, { "content": "use actix::prelude::*;\n\nuse casbin::prelude::*;\n\nuse casbin::{Error as CasbinError, IEnforcer, Result};\n\nuse std::io::{Error, ErrorKind};\n\nuse std::sync::Arc;\n\n\n\n#[cfg(feature = \"runtime-tokio\")]\n\nuse tokio::sync::RwLock;\n\n\n\n#[cfg(feature = \"runtime-async-std\")]\n\nuse async_std::sync::RwLock;\n\n\n\npub enum CasbinCmd {\n\n Enforce(Vec<String>),\n\n AddPolicy(Vec<String>),\n\n AddPolicies(Vec<Vec<String>>),\n\n AddNamedPolicy(String, Vec<String>),\n\n AddNamedPolicies(String, Vec<Vec<String>>),\n\n AddGroupingPolicy(Vec<String>),\n\n AddGroupingPolicies(Vec<Vec<String>>),\n", "file_path": "src/casbin_actor.rs", "rank": 3, "score": 4.017639456663049 }, { "content": "use actix::Actor;\n\nuse actix_casbin::{CasbinActor, CasbinCmd, CasbinResult};\n\nuse actix_casbin_auth::CasbinService;\n\nuse casbin::prelude::*;\n\n\n\n#[actix_rt::test]\n\nasync fn test_set_enforcer() {\n\n let m = DefaultModel::from_file(\"examples/rbac_model.conf\")\n\n .await\n\n .unwrap();\n\n let a = FileAdapter::new(\"examples/rbac_policy.csv\");\n\n\n\n let mut casbin_middleware = CasbinService::new(m, a).await.unwrap();\n\n let enforcer = casbin_middleware.get_enforcer();\n\n\n\n let addr = CasbinActor::<CachedEnforcer>::set_enforcer(enforcer)\n\n .unwrap()\n\n .start();\n\n if let CasbinResult::Enforce(test_enforce) = addr\n\n .send(CasbinCmd::Enforce(\n", "file_path": "tests/test_set_enforcer.rs", "rank": 4, "score": 3.198564030552982 }, { "content": "pub use casbin;\n\n\n\nmod casbin_actor;\n\npub use casbin_actor::{CasbinActor, CasbinCmd, CasbinResult};\n", "file_path": "src/lib.rs", "rank": 6, "score": 2.5182087611892765 }, { "content": " AddRoleForUser(bool),\n\n AddRolesForUser(bool),\n\n DeleteRoleForUser(bool),\n\n DeleteRolesForUser(bool),\n\n GetImplicitRolesForUser(Vec<String>),\n\n GetImplicitPermissionsForUser(Vec<Vec<String>>),\n\n}\n\n\n\nimpl Message for CasbinCmd {\n\n type Result = Result<CasbinResult>;\n\n}\n\n\n\npub struct CasbinActor<T: IEnforcer + 'static> {\n\n pub enforcer: Option<Arc<RwLock<T>>>,\n\n}\n\n\n\nimpl<T: IEnforcer + 'static> CasbinActor<T> {\n\n pub async fn new<M: TryIntoModel, A: TryIntoAdapter>(\n\n m: M,\n\n a: A,\n", "file_path": "src/casbin_actor.rs", "rank": 11, "score": 1.0835102595313306 }, { "content": " Box::pin(\n\n async move {\n\n let mut lock = cloned_enforcer.write().await;\n\n let result = match msg {\n\n CasbinCmd::Enforce(policy) => lock.enforce(policy).map(CasbinResult::Enforce),\n\n CasbinCmd::AddPolicy(policy) => {\n\n lock.add_policy(policy).await.map(CasbinResult::AddPolicy)\n\n }\n\n CasbinCmd::AddPolicies(policy) => lock\n\n .add_policies(policy)\n\n .await\n\n .map(CasbinResult::AddPolicies),\n\n CasbinCmd::AddNamedPolicy(ptype, policy) => lock\n\n .add_named_policy(&ptype, policy)\n\n .await\n\n .map(CasbinResult::AddNamedPolicy),\n\n CasbinCmd::AddNamedPolicies(ptype, policy) => lock\n\n .add_named_policies(&ptype, policy)\n\n .await\n\n .map(CasbinResult::AddNamedPolicies),\n", "file_path": "src/casbin_actor.rs", "rank": 13, "score": 0.8801641080506024 } ]
Rust
examples/iam/src/bin/iam-getting-started.rs
eduardomourar/aws-sdk-rust
58569c863afbe7bc442da8254df6c3970111de38
/* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. Purpose Shows how to use the AWS SDK for PHP (v3) to get started using AWS Identity and Access Management (IAM). Create an IAM user, assume a role, and perform AWS actions. 1. Create a user that has no permissions. 2. Create a role and policy that grant s3:ListAllMyBuckets permission. 3. Grant the user permission to assume the role. 4. Create an S3 client object as the user and try to list buckets (this should fail). 5. Get temporary credentials by assuming the role. 6. Create an S3 client object with the temporary credentials and list the buckets (this should succeed). 7. Delete all the resources. To run the bin file directly, use the following command: cargo --bin iam-getting-started To run the service class tests run: cargo test */ use aws_config::meta::region::RegionProviderChain; use aws_sdk_iam::Error as iamError; use aws_sdk_iam::{Client as iamClient, Credentials as iamCredentials}; use aws_sdk_s3::Client as s3Client; use aws_sdk_sts::Client as stsClient; use aws_types::region::Region; use std::borrow::Borrow; use tokio::time::{sleep, Duration}; use uuid::Uuid; #[tokio::main] async fn main() -> Result<(), iamError> { let (client, uuid, list_all_buckets_policy_document, inline_policy_document) = initialize_variables().await; if let Err(e) = run_iam_operations( client, uuid, list_all_buckets_policy_document, inline_policy_document, ) .await { println!("{:?}", e); }; Ok(()) } async fn initialize_variables() -> (iamClient, String, String, String) { let region_provider = RegionProviderChain::first_try(Region::new("us-west-2")); let shared_config = aws_config::from_env().region(region_provider).load().await; let client = iamClient::new(&shared_config); let uuid = Uuid::new_v4().to_string(); let list_all_buckets_policy_document = "{ \"Version\": \"2012-10-17\", \"Statement\": [{ \"Effect\": \"Allow\", \"Action\": \"s3:ListAllMyBuckets\", \"Resource\": \"arn:aws:s3:::*\"}] }" .to_string(); let inline_policy_document = "{ \"Version\": \"2012-10-17\", \"Statement\": [{ \"Effect\": \"Allow\", \"Action\": \"sts:AssumeRole\", \"Resource\": \"{}\"}] }" .to_string(); ( client, uuid, list_all_buckets_policy_document, inline_policy_document, ) } async fn run_iam_operations( client: iamClient, uuid: String, list_all_buckets_policy_document: String, inline_policy_document: String, ) -> Result<(), iamError> { let user = iam_service::create_user(&client, &format!("{}{}", "iam_demo_user_", uuid)).await?; println!( "Created the user with the name: {}", user.user_name.as_ref().unwrap() ); let key = iam_service::create_access_key(&client, user.user_name.as_ref().unwrap()).await?; let assume_role_policy_document = "{ \"Version\": \"2012-10-17\", \"Statement\": [{ \"Effect\": \"Allow\", \"Principal\": {\"AWS\": \"{}\"}, \"Action\": \"sts:AssumeRole\" }] }" .to_string() .replace("{}", user.arn.as_ref().unwrap()); let assume_role_role = iam_service::create_role( &client, &format!("{}{}", "iam_demo_role_", uuid), &assume_role_policy_document, ) .await?; println!( "Created the role with the ARN: {}", assume_role_role.arn.as_ref().unwrap() ); let list_all_buckets_policy = iam_service::create_policy( &client, &format!("{}{}", "iam_demo_policy_", uuid), &list_all_buckets_policy_document, ) .await?; println!( "Created policy: {}", list_all_buckets_policy.policy_name.as_ref().unwrap() ); let attach_role_policy_result = iam_service::attach_role_policy(&client, &assume_role_role, &list_all_buckets_policy) .await?; println!( "Attached the policy to the role: {:?}", attach_role_policy_result ); let inline_policy_name = &format!("{}{}", "iam_demo_inline_policy_", uuid); let inline_policy_document = inline_policy_document.replace("{}", assume_role_role.arn.as_ref().unwrap()); iam_service::create_user_policy(&client, &user, &inline_policy_name, &inline_policy_document) .await?; println!("Created inline policy."); let creds = iamCredentials::from_keys( key.access_key_id.as_ref().unwrap(), key.secret_access_key.as_ref().unwrap(), None, ); let fail_config = aws_config::from_env() .credentials_provider(creds.clone()) .load() .await; println!("Fail config: {:?}", fail_config); let fail_client: s3Client = s3Client::new(&fail_config); match fail_client.list_buckets().send().await { Ok(e) => { println!("This should not run. {:?}", e); } Err(e) => { println!("Successfully failed with error: {:?}", e) } } let sts_config = aws_config::from_env() .credentials_provider(creds.clone()) .load() .await; let sts_client: stsClient = stsClient::new(&sts_config); sleep(Duration::from_secs(10)).await; let assumed_role = sts_client .assume_role() .role_arn(assume_role_role.arn.as_ref().unwrap()) .role_session_name(&format!("{}{}", "iam_demo_assumerole_session_", uuid)) .send() .await; println!("Assumed role: {:?}", assumed_role); sleep(Duration::from_secs(10)).await; let assumed_credentials = iamCredentials::from_keys( assumed_role .as_ref() .unwrap() .credentials .as_ref() .unwrap() .access_key_id .as_ref() .unwrap(), assumed_role .as_ref() .unwrap() .credentials .as_ref() .unwrap() .secret_access_key .as_ref() .unwrap(), assumed_role .as_ref() .unwrap() .credentials .as_ref() .unwrap() .session_token .borrow() .clone(), ); let succeed_config = aws_config::from_env() .credentials_provider(assumed_credentials) .load() .await; println!("succeed config: {:?}", succeed_config); let succeed_client: s3Client = s3Client::new(&succeed_config); sleep(Duration::from_secs(10)).await; match succeed_client.list_buckets().send().await { Ok(_) => { println!("This should now run successfully.") } Err(e) => { println!("This should not run. {:?}", e); panic!() } } iam_service::detach_role_policy( &client, assume_role_role.role_name.as_ref().unwrap(), list_all_buckets_policy.arn.as_ref().unwrap(), ) .await?; iam_service::delete_policy(&client, list_all_buckets_policy).await?; iam_service::delete_role(&client, &assume_role_role).await?; println!( "Deleted role {}", assume_role_role.role_name.as_ref().unwrap() ); iam_service::delete_access_key(&client, &user, &key).await?; println!("Deleted key for {}", key.user_name.as_ref().unwrap()); iam_service::delete_user_policy(&client, &user, &inline_policy_name).await?; println!("Deleted inline user policy: {}", inline_policy_name); iam_service::delete_user(&client, &user).await?; println!("Deleted user {}", user.user_name.as_ref().unwrap()); Ok(()) }
/* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. Purpose Shows how to use the AWS SDK for PHP (v3) to get started using AWS Identity and Access Management (IAM). Create an IAM user, assume a role, and perform AWS actions. 1. Create a user that has no permissions. 2. Create a role and policy that grant s3:ListAllMyBuckets permission. 3. Grant the user permission to assume the role. 4. Create an S3 client object as the user and try to list buckets (this should fail). 5. Get temporary credentials by assuming the role. 6. Create an S3 client object with the temporary credentials and list the buckets (this should succeed). 7. Delete all the resources. To run the bin file directly, use the following command: cargo --bin iam-getting-started To run the service class tests run: cargo test */ use aws_config::meta::region::RegionProviderChain; use aws_sdk_iam::Error as iamError; use aws_sdk_iam::{Client as iamClient, Credentials as iamCredentials}; use aws_sdk_s3::Client as s3Client; use aws_sdk_sts::Client as stsClient; use aws_types::region::Region; use std::borrow::Borrow; use tokio::time::{sleep, Duration}; use uuid::Uuid; #[tokio::main] async fn main() -> Result<(), iamError> { let (client, uuid, list_all_buckets_policy_document, inline_policy_document) = initialize_variables().await; if let Err(e) = run_iam_operations( client, uuid, list_all_buckets_policy_document, inline_policy_document, ) .await { println!("{:?}", e); }; Ok(()) } async fn initialize_variables() -> (iamClient, String, String, String) { let region_provider = RegionProviderChain::first_try(Region::new("us-west-2")); let shared_config = aws_config::from_env().region(region_provider).load().await; let client = iamClient::new(&shared_config); let uuid = Uuid::new_v4().to_string(); let list_all_buckets_policy_document = "{ \"Version\": \"2012-10-17\", \"Statement\": [{ \"Effect\": \"Allow\", \"Action\": \"s3:ListAllMyBuckets\", \"Resource\": \"arn:aws:s3:::*\"}] }" .to_string(); let inline_policy_document = "{ \"Version\": \"2012-10-17\", \"Statement\": [{ \"Effect\": \"Allow\", \"Action\": \"sts:AssumeRole\", \"Resource\": \"{}\"}] }" .to_string(); ( client, uuid, list_all_buckets_policy_document, inline_policy_document, ) } async fn run_iam_operations( client: iamClient, uuid: String, list_all_buckets_policy_document: String, inline_policy_document: String, ) -> Result<(), iamError> { let user = iam_service::create_user(&client, &format!("{}{}", "iam_demo_user_", uuid)).await?; println!( "Created the user with the name: {}", user.user_name.as_ref().unwrap() ); let key = iam_service::create_access_key(&client, user.user_name.as_ref().unwrap()).await?; let assume_role_policy_document = "{ \"Version\": \"2012-10-17\", \"Statement\": [{ \"Effect\": \"Allow\", \"Principal\": {\"AWS\": \"{}\"}, \"Action\": \"sts:AssumeRole\" }] }" .to_string() .replace("{}", user.arn.as_ref().unwrap()); let assume_role_role = iam_service::create_role( &client, &format!("{}{}", "iam_demo_role_", uuid), &assume_role_policy_document, ) .await?; println!( "Created the role with the ARN: {}", assume_role_role.arn.as_ref().unwrap() ); let list_all_buckets_policy = iam_service::create_policy( &client, &format!("{}{}", "iam_demo_policy_", uuid), &list_all_buckets_policy_document, ) .await?; println!( "Created p
y).await?; iam_service::delete_role(&client, &assume_role_role).await?; println!( "Deleted role {}", assume_role_role.role_name.as_ref().unwrap() ); iam_service::delete_access_key(&client, &user, &key).await?; println!("Deleted key for {}", key.user_name.as_ref().unwrap()); iam_service::delete_user_policy(&client, &user, &inline_policy_name).await?; println!("Deleted inline user policy: {}", inline_policy_name); iam_service::delete_user(&client, &user).await?; println!("Deleted user {}", user.user_name.as_ref().unwrap()); Ok(()) }
olicy: {}", list_all_buckets_policy.policy_name.as_ref().unwrap() ); let attach_role_policy_result = iam_service::attach_role_policy(&client, &assume_role_role, &list_all_buckets_policy) .await?; println!( "Attached the policy to the role: {:?}", attach_role_policy_result ); let inline_policy_name = &format!("{}{}", "iam_demo_inline_policy_", uuid); let inline_policy_document = inline_policy_document.replace("{}", assume_role_role.arn.as_ref().unwrap()); iam_service::create_user_policy(&client, &user, &inline_policy_name, &inline_policy_document) .await?; println!("Created inline policy."); let creds = iamCredentials::from_keys( key.access_key_id.as_ref().unwrap(), key.secret_access_key.as_ref().unwrap(), None, ); let fail_config = aws_config::from_env() .credentials_provider(creds.clone()) .load() .await; println!("Fail config: {:?}", fail_config); let fail_client: s3Client = s3Client::new(&fail_config); match fail_client.list_buckets().send().await { Ok(e) => { println!("This should not run. {:?}", e); } Err(e) => { println!("Successfully failed with error: {:?}", e) } } let sts_config = aws_config::from_env() .credentials_provider(creds.clone()) .load() .await; let sts_client: stsClient = stsClient::new(&sts_config); sleep(Duration::from_secs(10)).await; let assumed_role = sts_client .assume_role() .role_arn(assume_role_role.arn.as_ref().unwrap()) .role_session_name(&format!("{}{}", "iam_demo_assumerole_session_", uuid)) .send() .await; println!("Assumed role: {:?}", assumed_role); sleep(Duration::from_secs(10)).await; let assumed_credentials = iamCredentials::from_keys( assumed_role .as_ref() .unwrap() .credentials .as_ref() .unwrap() .access_key_id .as_ref() .unwrap(), assumed_role .as_ref() .unwrap() .credentials .as_ref() .unwrap() .secret_access_key .as_ref() .unwrap(), assumed_role .as_ref() .unwrap() .credentials .as_ref() .unwrap() .session_token .borrow() .clone(), ); let succeed_config = aws_config::from_env() .credentials_provider(assumed_credentials) .load() .await; println!("succeed config: {:?}", succeed_config); let succeed_client: s3Client = s3Client::new(&succeed_config); sleep(Duration::from_secs(10)).await; match succeed_client.list_buckets().send().await { Ok(_) => { println!("This should now run successfully.") } Err(e) => { println!("This should not run. {:?}", e); panic!() } } iam_service::detach_role_policy( &client, assume_role_role.role_name.as_ref().unwrap(), list_all_buckets_policy.arn.as_ref().unwrap(), ) .await?; iam_service::delete_policy(&client, list_all_buckets_polic
random
[]
Rust
src/mdbook/fenced_blocks.rs
FifthTry/ft-cli
155c0765bc2e74d5d6dada52b95f8c4422d0e0a3
pub(crate) fn fenced_to_code(content: &str, img_src: &std::path::Path) -> String { #[derive(PartialEq)] enum ParsingState { WaitingForBackTick, WaitingForEndBackTick, } struct State { state: ParsingState, sections: Vec<String>, } let mut state = State { state: ParsingState::WaitingForBackTick, sections: vec![], }; fn parse_lang(line: &str) -> String { let line = line.replace("```", ""); let line = line.trim().split(',').collect::<Vec<_>>(); (match line.get(0) { Some(&"rust") => "rs", Some(&"console") => "sh", Some(&"cmd") => "sh", Some(&"toml") => "toml", Some(&"java") => "java", Some(&"python") => "py", _ => "txt", }) .to_string() } fn finalize(state: State) -> String { state.sections.join("\n") } let mut buffer: String = "".to_string(); let mut is_markdown = false; let mut filename = Option::<String>::None; for line in content.split('\n') { if line.trim().starts_with("<span class=\"filename\"") && line.trim().ends_with("</span>") { let dom = html_parser::Dom::parse(line.trim()).unwrap(); if let Some(html_parser::Node::Element(e)) = dom.children.get(0) { if let Some(html_parser::Node::Text(text)) = e.children.get(0) { let text = if text.contains(':') { match text.split(':').collect::<Vec<_>>().last() { Some(s) => s.to_string(), None => text.to_string(), } } else { text.to_string() }; filename = Some(text); } } } else if line.trim().starts_with("```") && state.state == ParsingState::WaitingForBackTick { let lang = parse_lang(line); if !buffer.is_empty() { let content = buffer.drain(..).collect::<String>(); if !content.trim().is_empty() { let section = if is_markdown { ftd::Markdown::from_body(&content).to_p1().to_string() } else { content }; state.sections.push(section); } } state.state = ParsingState::WaitingForEndBackTick; buffer = format!( "-- code:\nlang: {}{}\n\n", lang, filename .take() .map(|x| format!("\nfilename: {}", x)) .unwrap_or_else(|| "".to_string()) ); is_markdown = false; } else if line.trim().starts_with("```") && state.state == ParsingState::WaitingForEndBackTick { state.sections.push(buffer.drain(..).collect()); state.state = ParsingState::WaitingForBackTick; is_markdown = true; } else { buffer.push_str(line); buffer.push('\n'); } } if !buffer.is_empty() { let content = buffer.drain(..).collect::<String>(); if !content.trim().is_empty() { let section = if is_markdown { ftd::Markdown::from_body(&content).to_p1().to_string() } else { content }; state.sections.push(section); } } let content = finalize(state); img_to_code(content.as_str(), img_src) } pub(crate) fn img_to_code(content: &str, img_src: &std::path::Path) -> String { let mut sections = vec![]; let mut is_markdown = false; let mut buffer: String = "".to_string(); for line in content.lines() { if line.starts_with("<img") && line.ends_with("/>") { if !buffer.is_empty() { let sec = if is_markdown { ftd::Markdown::from_body(&buffer.drain(..).collect::<String>()) .to_p1() .to_string() } else { buffer.drain(..).collect::<String>() }; sections.push(sec); } is_markdown = true; let dom = html_parser::Dom::parse(line) .unwrap_or_else(|_| panic!("unable to parse: {}", line)); if let Some(html_parser::Node::Element(element)) = dom.children.get(0) { if let Some(Some(src)) = element.attributes.get("src") { let cap = if let Some(Some(alt)) = element.attributes.get("alt") { alt.as_str() } else { "" }; let src = img_src.join(src); let sec = ftd::Image::default() .with_src(&src.to_string_lossy()) .with_caption(cap) .with_width(500) .with_alt(cap) .to_p1() .to_string(); sections.push(sec); } } } else { buffer.push_str(line); buffer.push('\n'); } } if !buffer.is_empty() { let sec = if is_markdown { ftd::Markdown::from_body(&buffer.drain(..).collect::<String>()) .to_p1() .to_string() } else { buffer.drain(..).collect::<String>() }; sections.push(sec); } sections.join("\n\n") }
pub(crate) fn fenced_to_code(content: &str, img_src: &std::path::Path) -> String { #[derive(PartialEq)] enum ParsingState { WaitingForBackTick, WaitingForEndBackTick, } struct State { state: ParsingState, sections: Vec<String>, } let mut state = State { state: ParsingState::WaitingForBackTick, sections: vec![], }; fn parse_lang(line: &str) -> String { let line = line.replace("```", ""); let line = line.trim().split(',').collect::<Vec<_>>(); (match line.get(0) { Some(&"rust") => "rs", Some(&"console") => "sh", Some(&"cmd") => "sh", Some(&"toml") => "toml", Some(&"java") => "java", Some(&"python") => "py", _ => "txt", }) .to_string() } fn finalize(state: State) -> String { state.sections.join("\n") } let mut buffer: String = "".to_string(); let mut is_markdown = false; let mut filename = Option::<String>::None; for line in content.split('\n') { if line.trim().starts_with("<span class=\"filename\"") && line.trim().ends_with("</span>") { let dom = html_parser::Dom::parse(line.trim()).unwrap();
ftd::Markdown::from_body(&content).to_p1().to_string() } else { content }; state.sections.push(section); } } state.state = ParsingState::WaitingForEndBackTick; buffer = format!( "-- code:\nlang: {}{}\n\n", lang, filename .take() .map(|x| format!("\nfilename: {}", x)) .unwrap_or_else(|| "".to_string()) ); is_markdown = false; } else if line.trim().starts_with("```") && state.state == ParsingState::WaitingForEndBackTick { state.sections.push(buffer.drain(..).collect()); state.state = ParsingState::WaitingForBackTick; is_markdown = true; } else { buffer.push_str(line); buffer.push('\n'); } } if !buffer.is_empty() { let content = buffer.drain(..).collect::<String>(); if !content.trim().is_empty() { let section = if is_markdown { ftd::Markdown::from_body(&content).to_p1().to_string() } else { content }; state.sections.push(section); } } let content = finalize(state); img_to_code(content.as_str(), img_src) } pub(crate) fn img_to_code(content: &str, img_src: &std::path::Path) -> String { let mut sections = vec![]; let mut is_markdown = false; let mut buffer: String = "".to_string(); for line in content.lines() { if line.starts_with("<img") && line.ends_with("/>") { if !buffer.is_empty() { let sec = if is_markdown { ftd::Markdown::from_body(&buffer.drain(..).collect::<String>()) .to_p1() .to_string() } else { buffer.drain(..).collect::<String>() }; sections.push(sec); } is_markdown = true; let dom = html_parser::Dom::parse(line) .unwrap_or_else(|_| panic!("unable to parse: {}", line)); if let Some(html_parser::Node::Element(element)) = dom.children.get(0) { if let Some(Some(src)) = element.attributes.get("src") { let cap = if let Some(Some(alt)) = element.attributes.get("alt") { alt.as_str() } else { "" }; let src = img_src.join(src); let sec = ftd::Image::default() .with_src(&src.to_string_lossy()) .with_caption(cap) .with_width(500) .with_alt(cap) .to_p1() .to_string(); sections.push(sec); } } } else { buffer.push_str(line); buffer.push('\n'); } } if !buffer.is_empty() { let sec = if is_markdown { ftd::Markdown::from_body(&buffer.drain(..).collect::<String>()) .to_p1() .to_string() } else { buffer.drain(..).collect::<String>() }; sections.push(sec); } sections.join("\n\n") }
if let Some(html_parser::Node::Element(e)) = dom.children.get(0) { if let Some(html_parser::Node::Text(text)) = e.children.get(0) { let text = if text.contains(':') { match text.split(':').collect::<Vec<_>>().last() { Some(s) => s.to_string(), None => text.to_string(), } } else { text.to_string() }; filename = Some(text); } } } else if line.trim().starts_with("```") && state.state == ParsingState::WaitingForBackTick { let lang = parse_lang(line); if !buffer.is_empty() { let content = buffer.drain(..).collect::<String>(); if !content.trim().is_empty() { let section = if is_markdown {
random
[ { "content": "fn content_with_extract_title(content: &str) -> (String, Option<String>) {\n\n let lines = content.lines().into_iter().collect::<Vec<_>>();\n\n let mut title_line = None;\n\n for line in lines.iter() {\n\n if line.trim().starts_with('#') {\n\n title_line = Some(line.to_string());\n\n break;\n\n }\n\n }\n\n\n\n let lines = match title_line.as_ref() {\n\n Some(line) => lines\n\n .into_iter()\n\n .filter(|l| !l.trim().eq(line.trim()))\n\n .collect::<Vec<_>>(),\n\n None => lines,\n\n };\n\n\n\n (\n\n lines.join(\"\\n\"),\n\n title_line.map(|x| {\n\n x.trim()\n\n .trim_start_matches('#')\n\n .trim_matches(' ')\n\n .to_string()\n\n }),\n\n )\n\n}\n", "file_path": "src/mdbook/handle.rs", "rank": 0, "score": 150538.24697692532 }, { "content": "pub fn to_markdown(node: &Node, root_dir: &str, collection_id: &str) -> String {\n\n fn tree_to_markdown_util(\n\n node: &Node,\n\n level: usize,\n\n markdown: &mut String,\n\n root_dir: &str,\n\n collection_id: &str,\n\n ) {\n\n for x in node.children.iter() {\n\n let mut path = x.document_id(root_dir, collection_id);\n\n let file_name = path\n\n .clone()\n\n .file_name()\n\n .unwrap()\n\n .to_string_lossy()\n\n .to_string();\n\n if x.is_dir {\n\n if let Some(readme) = x.readme() {\n\n path = crate::id::to_document_id(&readme, root_dir, collection_id);\n\n }\n", "file_path": "src/traverse.rs", "rank": 1, "score": 139416.4695379588 }, { "content": "pub fn collection_toc(node: &Node, root_dir: &str, collection_id: &str) -> String {\n\n fn tree_to_toc_util(\n\n node: &Node,\n\n level: usize,\n\n toc_string: &mut String,\n\n root_dir: &str,\n\n collection_id: &str,\n\n ) {\n\n for x in node.children.iter() {\n\n let mut path = x.document_id(root_dir, collection_id);\n\n\n\n let file_name = path\n\n .clone()\n\n .file_name()\n\n .unwrap()\n\n .to_string_lossy()\n\n .to_string();\n\n\n\n if let Some(readme) = x.readme() {\n\n path = crate::id::to_document_id(&readme, root_dir, collection_id);\n", "file_path": "src/traverse.rs", "rank": 2, "score": 137286.37425040003 }, { "content": "pub fn ls_tree(hash: &str, root_dir: &str) -> crate::Result<Vec<crate::FileMode>> {\n\n let files: String = if crate::is_test() {\n\n realm_client::mock(\n\n Some(\"ls_tree\".to_string()),\n\n // serde_json::json! ({\"hash\": hash, \"git_root\": git_root, \"root_dir\": root_dir}),\n\n serde_json::json!({ \"hash\": hash }),\n\n )\n\n } else {\n\n let cmd = std::process::Command::new(\"git\")\n\n .args(&[\"ls-tree\", \"-r\", \"--name-only\", hash.trim()])\n\n .output()?;\n\n\n\n String::from_utf8(cmd.stdout)?\n\n };\n\n\n\n let files = files.lines();\n\n Ok(files\n\n .into_iter()\n\n .filter_map(|path| {\n\n if path.starts_with(root_dir) {\n\n Some(crate::FileMode::Created(path.to_string()))\n\n } else {\n\n None\n\n }\n\n })\n\n .collect())\n\n}\n\n\n", "file_path": "src/git.rs", "rank": 3, "score": 130889.74635828071 }, { "content": "pub fn ancestors<'a>(node: &'a Node, path: &str) -> Vec<&'a Node> {\n\n fn dir_till_path_util<'a>(node: &'a Node, path: &str, dirs: &mut Vec<&'a Node>) -> bool {\n\n if node.path.eq(path) {\n\n return true;\n\n }\n\n\n\n for node in node.children.iter() {\n\n if node.is_dir && dir_till_path_util(&node, path, dirs) {\n\n dirs.push(node);\n\n return true;\n\n }\n\n if node.path.eq(path) {\n\n return true;\n\n }\n\n }\n\n false\n\n }\n\n\n\n let mut dirs = vec![];\n\n dir_till_path_util(node, path, &mut dirs);\n", "file_path": "src/traverse.rs", "rank": 4, "score": 129745.71811165512 }, { "content": "fn find_chapter_in_book(book: &mdbook::book::Book, name: &str) -> Option<String> {\n\n fn util(book: &[mdbook::book::BookItem], name: &str) -> Option<String> {\n\n for book_item in book.iter() {\n\n match match book_item {\n\n mdbook::book::BookItem::Chapter(ch) => {\n\n if let Some(path) = ch.path.as_ref() {\n\n if path.eq(std::path::Path::new(name)) {\n\n return Some(ch.content.to_string());\n\n }\n\n }\n\n util(&ch.sub_items, name)\n\n }\n\n _ => None,\n\n } {\n\n Some(t) => return Some(t),\n\n None => continue,\n\n }\n\n }\n\n None\n\n }\n\n util(&book.sections, name)\n\n}\n\n\n", "file_path": "src/mdbook/handle.rs", "rank": 5, "score": 122498.30073620568 }, { "content": "fn chapter_title(summary: &mdbook::book::Summary, file_name: &str) -> Option<String> {\n\n self::get_by_name(summary, file_name).map(|x| x.name)\n\n}\n\n\n", "file_path": "src/mdbook/handle.rs", "rank": 6, "score": 122498.30073620568 }, { "content": "pub fn auth_code() -> String {\n\n match std::env::var(\"FT_AUTH_CODE\") {\n\n Ok(code) => code,\n\n Err(_) => panic!(\"FT_AUTH_CODE not found in environment\"),\n\n }\n\n}\n", "file_path": "src/config/env.rs", "rank": 7, "score": 108110.3956257467 }, { "content": "pub fn action<T, B>(url: &str, body: B, tid: Option<String>) -> crate::Result<T>\n\nwhere\n\n T: serde::de::DeserializeOwned,\n\n B: serde::Serialize,\n\n{\n\n let url = crate::url(url);\n\n\n\n if crate::is_test() {\n\n return crate::mock(tid, serde_json::json! ({\"url\": url.as_str(), \"body\": body}));\n\n }\n\n\n\n let json = match serde_json::to_string(&body) {\n\n Ok(v) => v,\n\n Err(e) => return Err(crate::Error::SerializeError(e)),\n\n };\n\n\n\n crate::handle(crate::client(url.as_str(), reqwest::Method::POST).body(json))\n\n}\n", "file_path": "realm-client/src/action.rs", "rank": 8, "score": 107806.03716915239 }, { "content": "pub fn head() -> crate::Result<String> {\n\n if crate::is_test() {\n\n return Ok(realm_client::mock(\n\n Some(\"head\".to_string()),\n\n serde_json::json!({}),\n\n ));\n\n }\n\n\n\n let output = std::process::Command::new(\"git\")\n\n .arg(\"rev-parse\")\n\n .arg(\"HEAD\")\n\n .output()?;\n\n Ok(String::from_utf8(output.stdout)?.trim().to_string())\n\n}\n\n\n", "file_path": "src/git.rs", "rank": 9, "score": 103961.58934198166 }, { "content": "pub fn to_document_id(path: &str, root_dir: &str, collection_id: &str) -> std::path::PathBuf {\n\n let path_without_root = std::path::Path::new(path)\n\n .strip_prefix(root_dir)\n\n .unwrap_or_else(|_| {\n\n panic!(\n\n \"path `{}` does not start with root_dir `{}`\",\n\n path, root_dir\n\n )\n\n });\n\n std::path::PathBuf::from(collection_id).join(&path_without_root)\n\n}\n", "file_path": "src/id.rs", "rank": 10, "score": 102068.71560293197 }, { "content": "pub fn current_branch() -> crate::Result<String> {\n\n // git rev-parse --abbrev-ref HEAD\n\n let output = std::process::Command::new(\"git\")\n\n .arg(\"rev-parse\")\n\n .arg(\"--abbrev-ref\")\n\n .arg(\"HEAD\")\n\n .output()?;\n\n Ok(String::from_utf8(output.stdout)?.trim().to_string())\n\n}\n", "file_path": "src/git.rs", "rank": 11, "score": 101795.01077786116 }, { "content": "pub fn elapsed(e: std::time::Instant) -> String {\n\n let e = e.elapsed();\n\n let nanos = e.subsec_nanos();\n\n let fraction = match nanos {\n\n t if nanos < 1000 => format!(\"{}ns\", t),\n\n t if nanos < 1_000_000 => format!(\"{:.*}µs\", 3, f64::from(t) / 1000.0),\n\n t => format!(\"{:.*}ms\", 3, f64::from(t) / 1_000_000.0),\n\n };\n\n let secs = e.as_secs();\n\n match secs {\n\n _ if secs == 0 => fraction,\n\n t if secs < 5 => format!(\"{}.{:06}s\", t, nanos / 1000),\n\n t if secs < 60 => format!(\"{}.{:03}s\", t, nanos / 1_000_000),\n\n t if secs < 3600 => format!(\"{}m {}s\", t / 60, t % 60),\n\n t if secs < 86400 => format!(\"{}h {}m\", t / 3600, (t % 3600) / 60),\n\n t => format!(\"{}s\", t),\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 12, "score": 98693.5313656749 }, { "content": "pub fn to_ftd_toc(node: &Node, root_dir: &str, collection_id: &str) -> ftd::toc::ToC {\n\n fn to_toc_items(node: &Node, root_dir: &str, collection_id: &str) -> Vec<ftd::toc::TocItem> {\n\n node.children\n\n .iter()\n\n .map(|c| {\n\n let mut path = c.document_id(root_dir, collection_id);\n\n let file_name = path.file_name().unwrap().to_string_lossy().to_string();\n\n\n\n let title = if c.is_dir {\n\n format!(\"`{}/`\", file_name)\n\n } else {\n\n format!(\"`{}`\", file_name)\n\n };\n\n\n\n if let Some(readme) = c.readme() {\n\n path = crate::id::to_document_id(&readme, root_dir, collection_id);\n\n }\n\n\n\n let mut item =\n\n ftd::toc::TocItem::with_title_and_id(&title, &path.to_string_lossy());\n\n item.children = to_toc_items(c, root_dir, collection_id);\n\n item\n\n })\n\n .collect()\n\n }\n\n ftd::ToC {\n\n items: to_toc_items(node, root_dir, collection_id),\n\n }\n\n}\n\n\n", "file_path": "src/traverse.rs", "rank": 13, "score": 95111.17619802813 }, { "content": "fn summary_title(summary: &mdbook::book::Summary) -> Option<String> {\n\n summary.title.clone()\n\n}\n\n\n", "file_path": "src/mdbook/handle.rs", "rank": 14, "score": 92849.62486029045 }, { "content": "fn is_summary_contains(summary: &mdbook::book::Summary, file_name: &str) -> bool {\n\n self::get_by_name(summary, file_name).is_some()\n\n}\n\n\n", "file_path": "src/mdbook/handle.rs", "rank": 15, "score": 88027.07448602542 }, { "content": "pub fn mock<T1, T2>(tid: Option<String>, input: T1) -> T2\n\nwhere\n\n T1: serde::Serialize,\n\n T2: serde::de::DeserializeOwned,\n\n{\n\n let tid = match tid {\n\n Some(v) => v,\n\n None => panic!(\"tid is none in test mode\"),\n\n };\n\n\n\n // write to ./tid.url and return content of tid.json\n\n\n\n std::fs::create_dir_all(\"out\").unwrap();\n\n let out = format!(\"out/{}.out.json\", tid.as_str());\n\n std::fs::write(\n\n out.as_str(),\n\n sorted_json::to_json(&serde_json::to_value(input).unwrap()),\n\n )\n\n .unwrap_or_else(|e| panic!(\"failed to write to: {}, err={:?}\", out, e));\n\n\n\n let input = format!(\"{}.in.json\", tid.as_str());\n\n\n\n serde_json::from_str(\n\n std::fs::read_to_string(input.as_str())\n\n .unwrap_or_else(|e| panic!(\"failed to read from: {}, err={:?}\", input, e))\n\n .as_str(),\n\n )\n\n .expect(\"failed to parse json\")\n\n}\n", "file_path": "realm-client/src/lib.rs", "rank": 16, "score": 84410.07453393274 }, { "content": "fn to_ftd_toc(book: &mdbook::book::Book, collection_id: &str) -> ftd::toc::ToC {\n\n fn path_to_doc_id(path: &str, collection_id: &str) -> std::path::PathBuf {\n\n std::path::PathBuf::from(collection_id)\n\n .join(path)\n\n .with_extension(\"\")\n\n }\n\n\n\n fn to_ftd_items(items: &[mdbook::BookItem], collection_id: &str) -> Vec<ftd::toc::TocItem> {\n\n let mut toc_items = vec![];\n\n for item in items.iter() {\n\n match item {\n\n mdbook::BookItem::Chapter(chapter) => {\n\n if let Some(name) = chapter.path.as_ref() {\n\n if name.to_string_lossy().eq(\"title-page.md\") {\n\n continue;\n\n }\n\n }\n\n let id = path_to_doc_id(\n\n &match chapter.path.as_ref() {\n\n Some(p) => p.to_string_lossy().to_string(),\n", "file_path": "src/mdbook/handle.rs", "rank": 17, "score": 82780.75391617374 }, { "content": "// can be moved to separate mod `mdbook`\n\nfn get_by_name(summary: &mdbook::book::Summary, file_name: &str) -> Option<mdbook::book::Link> {\n\n fn find_in_items(\n\n items: &[mdbook::book::SummaryItem],\n\n file_name: &str,\n\n ) -> Option<mdbook::book::Link> {\n\n for item in items {\n\n match match item {\n\n mdbook::book::SummaryItem::Link(link) => {\n\n if let Some(name) = link.location.as_ref() {\n\n if name.to_string_lossy().eq(file_name) {\n\n return Some(link.clone());\n\n }\n\n }\n\n find_in_items(&link.nested_items, file_name)\n\n }\n\n mdbook::book::SummaryItem::PartTitle(_) => None,\n\n mdbook::book::SummaryItem::Separator => None,\n\n } {\n\n Some(s) => return Some(s),\n\n None => continue,\n\n };\n\n }\n\n None\n\n }\n\n\n\n find_in_items(&summary.numbered_chapters, file_name)\n\n .or_else(|| find_in_items(&summary.prefix_chapters, file_name))\n\n .or_else(|| find_in_items(&summary.suffix_chapters, file_name))\n\n}\n\n\n", "file_path": "src/mdbook/handle.rs", "rank": 18, "score": 80423.45210261228 }, { "content": "#[derive(serde_derive::Serialize)]\n\nstruct File {\n\n id: String,\n\n content: String,\n\n}\n\n\n\npub enum Error {\n\n RealmClientError(realm_client::Error),\n\n ContentMismatch { id: String },\n\n}\n\n\n", "file_path": "ft-api/src/bulk_update.rs", "rank": 19, "score": 76711.67619288128 }, { "content": "#[derive(serde_derive::Deserialize)]\n\nstruct A<T> {\n\n pub success: bool,\n\n pub result: Option<T>,\n\n pub error: Option<std::collections::HashMap<String, String>>,\n\n}\n\n\n\nimpl<'de, T> serde::de::Deserialize<'de> for ApiResponse<T>\n\nwhere\n\n T: serde::de::DeserializeOwned,\n\n{\n\n fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>\n\n where\n\n D: serde::de::Deserializer<'de>,\n\n {\n\n let a = A::deserialize(deserializer)?;\n\n if a.success {\n\n match a.result {\n\n Some(v) => Ok(ApiResponse(Ok(v))),\n\n None => Err(serde::de::Error::custom(\n\n \"success is true but result is None\",\n", "file_path": "realm-client/src/types.rs", "rank": 20, "score": 76080.97244774207 }, { "content": "#[derive(serde_derive::Serialize)]\n\nstruct BulkUpdateInput {\n\n collection: String,\n\n auth_code: String,\n\n current_hash: String,\n\n new_hash: String,\n\n repo: String,\n\n files: Vec<Action>,\n\n}\n\n\n", "file_path": "ft-api/src/bulk_update.rs", "rank": 21, "score": 73827.71773022128 }, { "content": "fn ancestors(\n\n root_tree: &crate::traverse::Node,\n\n file_path: &str,\n\n root_dir: &str,\n\n collection: &str,\n\n preserve_meta: bool,\n\n) -> Vec<ft_api::bulk_update::Action> {\n\n crate::traverse::ancestors(root_tree, file_path)\n\n .iter()\n\n .filter(|x| !x.readme_exists())\n\n .map(|node| ft_api::bulk_update::Action::Updated {\n\n preserve_meta,\n\n id: node\n\n .document_id(root_dir, collection)\n\n .to_string_lossy()\n\n .to_string(),\n\n content: node.to_markdown(root_dir, collection),\n\n })\n\n .collect()\n\n}\n", "file_path": "src/raw.rs", "rank": 22, "score": 68059.61097178972 }, { "content": "fn index(\n\n tree: &crate::traverse::Node,\n\n config: &crate::Config,\n\n) -> crate::Result<ft_api::bulk_update::Action> {\n\n let readme_content = if let Some(readme) = tree.readme() {\n\n let file = crate::FileMode::Modified(readme);\n\n Some(file.content()?)\n\n } else {\n\n None\n\n };\n\n\n\n let mut content = vec![\n\n ftd::Section::Heading(ftd::Heading::new(\n\n 0,\n\n config\n\n .title\n\n .clone()\n\n .unwrap_or_else(|| format!(\"`{}`\", config.collection.as_str()))\n\n .as_str(),\n\n )),\n", "file_path": "src/raw.rs", "rank": 23, "score": 68059.61097178972 }, { "content": "fn handle(\n\n tree: &crate::traverse::Node,\n\n file: &crate::FileMode,\n\n root: &str,\n\n collection: &str,\n\n preserve_meta: bool,\n\n) -> crate::Result<Vec<ft_api::bulk_update::Action>> {\n\n if !RAW_EXTENSIONS.contains(&file.extension().as_str()) {\n\n return Ok(vec![]);\n\n }\n\n\n\n let id = match file.id_with_extension(root, collection) {\n\n Ok(id) => id,\n\n Err(e) => {\n\n eprintln!(\"{}\", e.to_string());\n\n return Ok(vec![]);\n\n }\n\n };\n\n\n\n Ok(match file {\n", "file_path": "src/raw.rs", "rank": 24, "score": 68059.61097178972 }, { "content": "fn handle(\n\n file: &crate::FileMode,\n\n root_dir: &str,\n\n collection: &str,\n\n preserve_meta: bool,\n\n) -> crate::Result<Vec<ft_api::bulk_update::Action>> {\n\n if file.extension() != \"ftd\" {\n\n return Ok(vec![]);\n\n }\n\n\n\n let id = match file.id(root_dir, collection) {\n\n Ok(id) => id,\n\n Err(e) => {\n\n eprintln!(\"{}\", e.to_string());\n\n return Ok(vec![]);\n\n }\n\n };\n\n\n\n Ok(match file {\n\n crate::types::FileMode::Created(_) => {\n", "file_path": "src/ftd.rs", "rank": 25, "score": 68059.61097178972 }, { "content": "fn main() {\n\n let cmd = clap::App::new(\"ft\")\n\n .author(env!(\"CARGO_PKG_AUTHORS\"))\n\n .version(env!(\"CARGO_PKG_VERSION\"))\n\n .about(env!(\"CARGO_PKG_DESCRIPTION\"))\n\n .arg(\n\n clap::Arg::with_name(\"test\")\n\n .short(\"t\")\n\n .long(\"test\")\n\n .required(false)\n\n .value_name(\"TEST\")\n\n .help(\"if to run in test mode\")\n\n .hidden(true)\n\n .takes_value(false),\n\n )\n\n .subcommand(clap::SubCommand::with_name(\"status\").about(\"show the sync status\"))\n\n .subcommand(\n\n clap::SubCommand::with_name(\"sync\").about(\"sync files\").arg(\n\n clap::Arg::with_name(\"all\")\n\n .long(\"all\")\n", "file_path": "src/main.rs", "rank": 26, "score": 68059.61097178972 }, { "content": "fn handle(\n\n summary: &mdbook::book::Summary,\n\n book: &mdbook::book::Book,\n\n config: &crate::Config,\n\n book_config: &mdbook::Config,\n\n file: &crate::FileMode,\n\n root: &str,\n\n collection: &str,\n\n) -> crate::Result<Vec<ft_api::bulk_update::Action>> {\n\n fn title(summary: &mdbook::book::Summary, file_path: &std::path::Path, id: &str) -> String {\n\n match file_path.file_name() {\n\n Some(p) => match self::chapter_title(summary, &p.to_string_lossy().to_string()) {\n\n Some(t) => t,\n\n None => id.to_string(),\n\n },\n\n None => id.to_string(),\n\n }\n\n }\n\n\n\n fn content_with_title(\n", "file_path": "src/mdbook/handle.rs", "rank": 27, "score": 66340.33133502756 }, { "content": "fn index(\n\n summary: &mdbook::book::Summary,\n\n book: &mdbook::book::Book,\n\n config: &crate::Config,\n\n src: &std::path::Path,\n\n) -> crate::Result<ft_api::bulk_update::Action> {\n\n let mut title = self::summary_title(summary).unwrap_or_else(|| config.collection.to_string());\n\n\n\n let mut sections = vec![];\n\n\n\n let title_page = std::path::Path::new(&config.root)\n\n .join(src)\n\n .join(\"title-page.md\");\n\n if title_page.exists() {\n\n let content = std::fs::read_to_string(&title_page)\n\n .map_err(|e| crate::Error::ReadError(e, title_page.to_string_lossy().to_string()))?;\n\n\n\n let (content, content_title) = self::content_with_extract_title(&content);\n\n\n\n if let Some(content_title) = content_title {\n", "file_path": "src/mdbook/handle.rs", "rank": 28, "score": 66340.33133502756 }, { "content": "pub fn handle_files(\n\n config: &crate::Config,\n\n files: &[crate::FileMode],\n\n) -> crate::Result<Vec<ft_api::bulk_update::Action>> {\n\n let mut actions = vec![];\n\n\n\n for file in files.iter() {\n\n actions.append(&mut self::handle(\n\n file,\n\n config.root.as_str(),\n\n config.collection.as_str(),\n\n config.preserve_meta,\n\n )?);\n\n }\n\n Ok(actions)\n\n}\n\n\n", "file_path": "src/ftd.rs", "rank": 29, "score": 63622.56196964311 }, { "content": "pub fn changed_files(\n\n hash1: &str,\n\n hash2: &str,\n\n root_dir: &str,\n\n) -> crate::Result<Vec<crate::FileMode>> {\n\n let files: String = if crate::is_test() {\n\n realm_client::mock(\n\n Some(\"changed_files\".to_string()),\n\n // serde_json::json! ({\"hash1\": hash1, \"hash2\": hash2, \"git_root\": git_root, \"root_dir\": root_dir}),\n\n serde_json::json! ({\"hash1\": hash1, \"hash2\": hash2}),\n\n )\n\n } else {\n\n let cmd = std::process::Command::new(\"git\")\n\n .args(&[\n\n \"diff\",\n\n \"--name-status\",\n\n \"--no-renames\",\n\n hash1.trim(),\n\n hash2.trim(),\n\n ])\n", "file_path": "src/git.rs", "rank": 30, "score": 63622.56196964311 }, { "content": "pub fn handle_files(\n\n config: &crate::Config,\n\n files: &[crate::FileMode],\n\n) -> crate::Result<Vec<ft_api::bulk_update::Action>> {\n\n let mut actions = vec![];\n\n let tree = crate::traverse::root_tree(&std::path::PathBuf::from(&config.root))?;\n\n\n\n for file in files.iter() {\n\n actions.append(&mut self::handle(\n\n &tree,\n\n &file,\n\n config.root.as_str(),\n\n config.collection.as_str(),\n\n config.preserve_meta,\n\n )?);\n\n }\n\n\n\n if files.iter().any(|v| {\n\n matches!(v, crate::FileMode::Created(_)) || matches!(v, crate::FileMode::Deleted(_))\n\n }) {\n\n actions.push(self::index(&tree, config)?)\n\n }\n\n\n\n Ok(actions)\n\n}\n\n\n", "file_path": "src/raw.rs", "rank": 31, "score": 63622.56196964311 }, { "content": "fn link_preprocessor_ctx(\n\n root: std::path::PathBuf,\n\n config: mdbook::config::Config,\n\n renderer: String,\n\n) -> mdbook::preprocess::PreprocessorContext {\n\n mdbook::preprocess::PreprocessorContext::new(root, config, renderer)\n\n}\n\n\n", "file_path": "src/mdbook/handle.rs", "rank": 32, "score": 63199.54420477752 }, { "content": "fn link_preprocess_book(\n\n ctx: &mdbook::preprocess::PreprocessorContext,\n\n book: mdbook::book::Book,\n\n) -> mdbook::book::Book {\n\n use mdbook::preprocess::Preprocessor;\n\n let link_preprocessor = mdbook::preprocess::LinkPreprocessor;\n\n match link_preprocessor.run(ctx, book) {\n\n Ok(book) => book,\n\n Err(e) => panic!(\"{}\", e),\n\n }\n\n}\n\n\n", "file_path": "src/mdbook/handle.rs", "rank": 33, "score": 63199.54420477752 }, { "content": "pub fn is_test() -> bool {\n\n std::env::args().any(|e| e == \"--test\")\n\n}\n", "file_path": "src/lib.rs", "rank": 34, "score": 62707.7097147289 }, { "content": "pub fn handle_files(\n\n config: &crate::Config,\n\n files: &[crate::FileMode],\n\n) -> crate::Result<Vec<ft_api::bulk_update::Action>> {\n\n let (book_config, mdbook) = {\n\n let book_root = config.root.as_str();\n\n let book_root: std::path::PathBuf = book_root.into();\n\n let config_location = book_root.join(\"book.toml\");\n\n\n\n // the book.json file is no longer used, so we should emit a warning to\n\n // let people know to migrate to book.toml\n\n if book_root.join(\"book.json\").exists() {\n\n eprintln!(\"It appears you are still using book.json for configuration.\");\n\n eprintln!(\"This format is no longer used, so you should migrate to the\");\n\n eprintln!(\"book.toml format.\");\n\n eprintln!(\"Check the user guide for migration information:\");\n\n eprintln!(\"\\thttps://rust-lang.github.io/mdBook/format/config.html\");\n\n }\n\n\n\n let config = if config_location.exists() {\n", "file_path": "src/mdbook/handle.rs", "rank": 35, "score": 62098.84676951119 }, { "content": "pub fn is_test() -> bool {\n\n std::env::args().any(|e| e == \"--test\")\n\n}\n\n\n", "file_path": "realm-client/src/lib.rs", "rank": 36, "score": 59745.77836377107 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn bulk_update(\n\n collection: &str,\n\n current_hash: &str,\n\n new_hash: &str,\n\n repo: &str,\n\n files: Vec<Action>,\n\n auth_code: &str,\n\n platform: String,\n\n client_version: String,\n\n) -> realm_client::Result<()> {\n\n let url = format!(\"/{}/~/bulk-update/\", collection);\n\n\n\n let update = BulkUpdateInput {\n\n collection: collection.trim().to_string(),\n\n auth_code: auth_code.trim().to_string(),\n\n current_hash: current_hash.trim().to_string(),\n\n new_hash: new_hash.trim().to_string(),\n\n repo: repo.trim().to_string(),\n\n files,\n\n };\n", "file_path": "ft-api/src/bulk_update.rs", "rank": 37, "score": 59300.91359499996 }, { "content": "pub fn sync_status(\n\n collection: &str,\n\n auth_code: &str,\n\n platform: &str,\n\n client_version: &str,\n\n) -> realm_client::Result<Status> {\n\n realm_client::page(\n\n &format!(\"/{}/~/sync-status/\", collection),\n\n maplit::hashmap! {\"auth_code\" => auth_code, \"platform\" => platform, \"client_version\" => client_version},\n\n Some(\"sync_status\".to_string()),\n\n )\n\n}\n", "file_path": "ft-api/src/sync_status.rs", "rank": 38, "score": 59300.91359499996 }, { "content": "fn to_url_with_query<K, V>(\n\n u: &str,\n\n query: std::collections::HashMap<K, V>,\n\n) -> crate::Result<url::Url>\n\nwhere\n\n K: Into<String> + AsRef<str> + Ord,\n\n V: Into<String> + AsRef<str>,\n\n{\n\n let mut params: Vec<(_, _)> = query.iter().collect();\n\n params.sort_by(|(a, _), (b, _)| a.cmp(b));\n\n\n\n url::Url::parse_with_params(crate::url(u).as_str(), &params)\n\n .map_err(crate::Error::UrlParseError)\n\n}\n\n\n", "file_path": "realm-client/src/page.rs", "rank": 39, "score": 58386.06134008575 }, { "content": " context: p1.name.clone(),\n\n })\n\n }\n\n })\n\n }\n\n}\n\n\n\n#[derive(Debug, Default, Clone)]\n\npub struct Ignored {\n\n pub patterns: Vec<String>,\n\n}\n\n\n\nimpl Ignored {\n\n pub fn from_p1(p1: &ftd::p1::Section) -> ftd::p1::Result<Self> {\n\n Ok(Self {\n\n patterns: if let Some(body) = p1.body.as_ref() {\n\n body.lines()\n\n .into_iter()\n\n .filter(|x| !x.trim().is_empty())\n\n .map(|x| x.to_string())\n", "file_path": "src/config/section.rs", "rank": 40, "score": 57743.10787948722 }, { "content": "}\n\n\n\nimpl FtSync {\n\n pub fn from_p1(p1: &ftd::p1::Section) -> ftd::p1::Result<Self> {\n\n Ok(Self {\n\n mode: p1.header.string(\"mode\")?,\n\n backend: {\n\n let b = p1.header.str(\"backend\")?;\n\n match crate::Backend::from(b) {\n\n Some(v) => v,\n\n None => {\n\n return Err(ftd::p1::Error::InvalidInput {\n\n message: \"invalid backend (allowed: ftd, mdbook, raw)\".to_string(),\n\n context: b.to_string(),\n\n })\n\n }\n\n }\n\n },\n\n root: p1\n\n .header\n", "file_path": "src/config/section.rs", "rank": 41, "score": 57742.92555412772 }, { "content": " ftd::p1::Error::InvalidInput {\n\n message: \"Can not parse index-extra\".to_string(),\n\n context: e.to_string(),\n\n }\n\n })?,\n\n None => {\n\n return Err(ftd::p1::Error::InvalidInput {\n\n message: \"body of index-extra section is empty\".to_string(),\n\n context: \"\".to_string(),\n\n })\n\n }\n\n },\n\n })\n\n }\n\n\n\n pub fn from_meta(p1: &ftd::p1::Section) -> ftd::p1::Result<Self> {\n\n Ok(Self {\n\n body: ftd::Document::new(&vec![ftd::Section::Meta(ftd::Meta::from_p1(p1).map_err(\n\n |e| ftd::p1::Error::InvalidInput {\n\n message: \"Can not parse index-extra\".to_string(),\n\n context: e.to_string(),\n\n },\n\n )?)]),\n\n })\n\n }\n\n}\n", "file_path": "src/config/section.rs", "rank": 42, "score": 57742.59133353875 }, { "content": " .string_optional(\"root\")?\n\n .unwrap_or_else(|| \"\".to_string()), // Empty because it is relative to git root\n\n repo: p1.header.string(\"repo\")?,\n\n collection: p1.header.string(\"collection\")?,\n\n title: p1.header.string_optional(\"title\")?,\n\n preserve_meta: p1.header.bool_with_default(\"preserve-meta\", false)?,\n\n })\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct IndexExtra {\n\n pub body: ftd::Document,\n\n}\n\n\n\nimpl IndexExtra {\n\n pub fn from_index_extra(p1: &ftd::p1::Section) -> ftd::p1::Result<Self> {\n\n Ok(Self {\n\n body: match p1.body.as_ref() {\n\n Some(b) => ftd::Document::parse(b, \"ft-sync\").map_err(|e| {\n", "file_path": "src/config/section.rs", "rank": 43, "score": 57741.11571745356 }, { "content": "#[derive(Debug)]\n\npub enum Section {\n\n FtSync(FtSync),\n\n Ignored(Ignored),\n\n IndexExtra(IndexExtra),\n\n}\n\n\n\nimpl Section {\n\n pub fn from_p1(p1: &ftd::p1::Section) -> Result<Self, ftd::p1::Error> {\n\n Ok(match p1.name.as_str() {\n\n \"ft-sync\" => Self::FtSync(FtSync::from_p1(p1)?),\n\n \"ignored\" => Self::Ignored(Ignored::from_p1(p1)?),\n\n \"index-extra\" => Self::IndexExtra(IndexExtra::from_index_extra(p1)?),\n\n \"meta\" => Self::IndexExtra(IndexExtra::from_meta(p1)?),\n\n t => {\n\n return Err(ftd::p1::Error::InvalidInput {\n\n message: format!(\n\n \"unknown section {}, allowed sections are: 'ft-sync' and 'ignored'\",\n\n t\n\n ),\n", "file_path": "src/config/section.rs", "rank": 44, "score": 57741.07855201581 }, { "content": " .collect()\n\n } else {\n\n return Err(ftd::p1::Error::InvalidInput {\n\n message: \"body of ignore is empty\".to_string(),\n\n context: \"todo\".to_string(),\n\n });\n\n },\n\n })\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct FtSync {\n\n pub mode: String,\n\n pub backend: crate::Backend,\n\n pub root: String,\n\n pub repo: String,\n\n pub collection: String,\n\n pub title: Option<String>,\n\n pub preserve_meta: bool,\n", "file_path": "src/config/section.rs", "rank": 45, "score": 57739.80256584387 }, { "content": "pub fn page<T, K, V>(\n\n url: &str,\n\n query: std::collections::HashMap<K, V>,\n\n tid: Option<String>,\n\n) -> crate::Result<T>\n\nwhere\n\n T: serde::de::DeserializeOwned,\n\n K: Into<String> + AsRef<str> + Ord,\n\n V: Into<String> + AsRef<str>,\n\n{\n\n let url = to_url_with_query(url, query)?;\n\n\n\n if crate::is_test() {\n\n return crate::mock(tid, serde_json::json! ({\"url\": url.as_str()}));\n\n }\n\n\n\n crate::handle(crate::client(url.as_str(), reqwest::Method::GET))\n\n}\n", "file_path": "realm-client/src/page.rs", "rank": 46, "score": 55665.34280640101 }, { "content": "pub fn status(config: &crate::Config) -> crate::Result<()> {\n\n let auth_code = match config.auth {\n\n crate::Auth::AuthCode(ref s) => s,\n\n _ => return Ok(()),\n\n };\n\n\n\n let status = ft_api::sync_status(\n\n config.collection.as_str(),\n\n auth_code.as_str(),\n\n &crate::utils::platform()?,\n\n &crate::utils::client_version(),\n\n )?;\n\n\n\n // println!(\"Config: {}\", config_file);\n\n // println!(\"Backend: {}\", config.backend.to_string());\n\n println!(\"Root: {}\", config.root);\n\n println!(\n\n \"Last git synced hash: {}\",\n\n if status.last_synced_hash.is_empty() {\n\n \"Never synced\"\n", "file_path": "src/status.rs", "rank": 47, "score": 54593.907282894594 }, { "content": "pub fn sync(config: &crate::Config, re_sync: bool) -> crate::Result<()> {\n\n let auth_code = match &config.auth {\n\n crate::Auth::AuthCode(s) => s.to_string(),\n\n _ => return Ok(()),\n\n };\n\n\n\n let latest_hash = crate::git::head()?;\n\n\n\n let status = ft_api::sync_status(\n\n config.collection.as_str(),\n\n auth_code.as_str(),\n\n &crate::utils::platform()?,\n\n &crate::utils::client_version(),\n\n )?;\n\n\n\n // Need to handle sync --all\n\n let actions = {\n\n let files = if re_sync || status.last_synced_hash.is_empty() {\n\n crate::git::ls_tree(&latest_hash, config.root.as_str())?\n\n } else {\n", "file_path": "src/sync.rs", "rank": 48, "score": 50280.343314520724 }, { "content": "pub fn import(config: &crate::Config, re_sync: bool) -> crate::Result<()> {\n\n crate::sync(config, re_sync)\n\n}\n", "file_path": "src/import.rs", "rank": 49, "score": 50280.343314520724 }, { "content": "pub fn root_tree(root_dir: &std::path::Path) -> crate::Result<Node> {\n\n fn traverse_tree(root_dir: &std::path::Path) -> crate::Result<Vec<Node>> {\n\n let mut children = vec![];\n\n\n\n for entry in std::fs::read_dir(root_dir)? {\n\n let p = entry?.path();\n\n if p.is_dir() {\n\n children.push(Node {\n\n is_dir: true,\n\n path: p.to_string_lossy().to_string(),\n\n children: traverse_tree(&p)?,\n\n });\n\n } else {\n\n children.push(Node {\n\n is_dir: false,\n\n path: p.to_string_lossy().to_string(),\n\n children: vec![],\n\n });\n\n }\n\n }\n", "file_path": "src/traverse.rs", "rank": 50, "score": 49231.07725615907 }, { "content": "pub fn summary_content(src_dir: &std::path::Path) -> mdbook::errors::Result<mdbook::book::Summary> {\n\n use std::io::Read;\n\n let summary_md = src_dir.join(\"SUMMARY.md\");\n\n\n\n let mut summary_content = String::new();\n\n std::fs::File::open(&summary_md)?.read_to_string(&mut summary_content)?;\n\n\n\n let summary = mdbook::book::parse_summary(&summary_content)?;\n\n\n\n // TODO: Will handle it later, this part of code is coming from `mdbook`\n\n // if create_missing {\n\n // create_missing(&src_dir, &summary).with_context(|| \"Unable to create missing chapters\")?;\n\n // }\n\n\n\n Ok(summary)\n\n}\n\n\n", "file_path": "src/mdbook/handle.rs", "rank": 51, "score": 44545.95990119426 }, { "content": "pub fn never_expected<'de, D, T>(_deserializer: D) -> std::result::Result<T, D::Error>\n\nwhere\n\n D: serde::de::Deserializer<'de>,\n\n{\n\n unreachable!(\"must never happen\")\n\n}\n\n\n\n#[derive(Debug, thiserror::Error, serde_derive::Deserialize)]\n\npub enum Error {\n\n #[serde(deserialize_with = \"never_expected\")]\n\n #[error(\"HttpError: {}\", _0)]\n\n HttpError(reqwest::Error),\n\n #[error(\"UnexpectedResponse: {code:?} {body:?}\")]\n\n UnexpectedResponse {\n\n // non 200\n\n body: String,\n\n code: u16,\n\n },\n\n // SpecificError(T),\n\n #[error(\"PageNotFound: {}\", _0)]\n", "file_path": "realm-client/src/types.rs", "rank": 52, "score": 43453.70296428438 }, { "content": "fn deserialize_datetime<'de, D>(deserializer: D) -> Result<chrono::DateTime<chrono::Utc>, D::Error>\n\nwhere\n\n D: serde::de::Deserializer<'de>,\n\n{\n\n use chrono::TimeZone;\n\n\n\n // use our visitor to deserialize an `ActualValue`\n\n let v: i64 = serde::de::Deserialize::deserialize(deserializer)?;\n\n\n\n Ok(chrono::Utc.timestamp_millis(v))\n\n}\n\n\n\n// TODO: define ActionError here and return actual errors that sync status can throw.\n\n\n", "file_path": "ft-api/src/sync_status.rs", "rank": 53, "score": 42912.645658117995 }, { "content": " .output()?;\n\n String::from_utf8(cmd.stdout)?\n\n };\n\n\n\n let files = files.lines();\n\n\n\n Ok(files\n\n .into_iter()\n\n .filter_map(|line: &str| {\n\n let sp = line.split('\\t').collect::<Vec<_>>();\n\n let mode = sp[0].chars().next().unwrap();\n\n let path = sp[1].to_string();\n\n if path.starts_with(root_dir) {\n\n Some(match mode {\n\n 'A' => crate::FileMode::Created(path),\n\n 'M' => crate::FileMode::Modified(path),\n\n 'D' => crate::FileMode::Deleted(path),\n\n _ => panic!(\"file with unknown mode : {}\", line),\n\n })\n\n } else {\n\n None\n\n }\n\n })\n\n .collect::<Vec<_>>())\n\n}\n\n\n", "file_path": "src/git.rs", "rank": 54, "score": 25007.46493017727 }, { "content": " children: vec![super::Node {\n\n is_dir: true,\n\n path: \"docs/a/b/c/d/e\".to_string(),\n\n children: vec![super::Node {\n\n is_dir: false,\n\n path: \"docs/a/b/c/d/e/f.txt\".to_string(),\n\n children: vec![],\n\n }],\n\n }],\n\n },\n\n super::Node {\n\n is_dir: true,\n\n path: \"docs/a/b/c/d/e\".to_string(),\n\n children: vec![super::Node {\n\n is_dir: false,\n\n path: \"docs/a/b/c/d/e/f.txt\".to_string(),\n\n children: vec![],\n\n }],\n\n },\n\n ];\n\n\n\n let test_tree = test_node();\n\n let output = super::ancestors(&test_tree, \"docs/a/b/c/d/e/f.txt\");\n\n assert_eq!(tree.iter().collect::<Vec<_>>(), output);\n\n }\n\n}\n", "file_path": "src/traverse.rs", "rank": 55, "score": 25006.859177314705 }, { "content": " children: vec![\n\n Node {\n\n is_dir: true,\n\n path: \"docs/a/b/c/d\".to_string(),\n\n children: vec![Node {\n\n is_dir: true,\n\n path: \"docs/a/b/c/d/e\".to_string(),\n\n children: vec![Node {\n\n is_dir: false,\n\n path: \"docs/a/b/c/d/e/f.txt\".to_string(),\n\n children: vec![],\n\n }],\n\n }],\n\n },\n\n Node {\n\n is_dir: false,\n\n path: \"docs/a/b/c/README.md\".to_string(),\n\n children: vec![],\n\n },\n\n ],\n", "file_path": "src/traverse.rs", "rank": 56, "score": 25006.64513211174 }, { "content": " \"File extension not found: {}\",\n\n self.path().to_string_lossy()\n\n )\n\n })\n\n .to_string_lossy()\n\n .to_string();\n\n\n\n let heading = ftd::Section::Heading(ftd::Heading::new(0, title));\n\n let section = if extension.eq(\"md\") || extension.eq(\"mdx\") {\n\n ftd::Section::Markdown(ftd::Markdown::from_body(self.content()?.as_str()))\n\n } else if extension.eq(\"rst\") {\n\n ftd::Section::Rst(ftd::Rst::from_body(self.content()?.as_str()))\n\n } else {\n\n ftd::Section::Code(\n\n ftd::Code::default()\n\n .with_lang(&extension)\n\n .with_code(self.content()?.as_str()),\n\n )\n\n };\n\n\n", "file_path": "src/types.rs", "rank": 57, "score": 25006.492054490285 }, { "content": " path: \"docs/a/b/c/d/e/f.txt\".to_string(),\n\n children: vec![],\n\n }],\n\n }],\n\n },\n\n super::Node {\n\n is_dir: false,\n\n path: \"docs/a/b/c/README.md\".to_string(),\n\n children: vec![],\n\n },\n\n ],\n\n }],\n\n },\n\n super::Node {\n\n is_dir: true,\n\n path: \"docs/a/b/c\".to_string(),\n\n children: vec![\n\n super::Node {\n\n is_dir: true,\n\n path: \"docs/a/b/c/d\".to_string(),\n", "file_path": "src/traverse.rs", "rank": 58, "score": 25006.412587543884 }, { "content": " is_dir: true,\n\n path: \"docs/a/b/c\".to_string(),\n\n children: vec![\n\n super::Node {\n\n is_dir: true,\n\n path: \"docs/a/b/c/d\".to_string(),\n\n children: vec![super::Node {\n\n is_dir: true,\n\n path: \"docs/a/b/c/d/e\".to_string(),\n\n children: vec![super::Node {\n\n is_dir: false,\n\n path: \"docs/a/b/c/d/e/f.txt\".to_string(),\n\n children: vec![],\n\n }],\n\n }],\n\n },\n\n super::Node {\n\n is_dir: false,\n\n path: \"docs/a/b/c/README.md\".to_string(),\n\n children: vec![],\n", "file_path": "src/traverse.rs", "rank": 59, "score": 25006.294442972532 }, { "content": " children: vec![super::Node {\n\n is_dir: true,\n\n path: \"docs/a/b/c/d/e\".to_string(),\n\n children: vec![super::Node {\n\n is_dir: false,\n\n path: \"docs/a/b/c/d/e/f.txt\".to_string(),\n\n children: vec![],\n\n }],\n\n }],\n\n },\n\n super::Node {\n\n is_dir: false,\n\n path: \"docs/a/b/c/README.md\".to_string(),\n\n children: vec![],\n\n },\n\n ],\n\n },\n\n super::Node {\n\n is_dir: true,\n\n path: \"docs/a/b/c/d\".to_string(),\n", "file_path": "src/traverse.rs", "rank": 60, "score": 25006.28183479797 }, { "content": " Ok(ftd::Document::new(&[heading, section]).convert_to_string())\n\n }\n\n\n\n pub fn raw_content_with_content(&self, title: &str, content: &str) -> String {\n\n let extension = self\n\n .path()\n\n .extension()\n\n .unwrap_or_else(|| {\n\n panic!(\n\n \"File extension not found: {}\",\n\n self.path().to_string_lossy()\n\n )\n\n })\n\n .to_string_lossy()\n\n .to_string();\n\n\n\n let heading = ftd::Section::Heading(ftd::Heading::new(0, title));\n\n let section = if extension.eq(\"md\") || extension.eq(\"mdx\") {\n\n ftd::Section::Markdown(ftd::Markdown::from_body(content))\n\n } else if extension.eq(\"rst\") {\n", "file_path": "src/types.rs", "rank": 61, "score": 25006.15098661683 }, { "content": " ftd::Section::Rst(ftd::Rst::from_body(content))\n\n } else {\n\n ftd::Section::Code(\n\n ftd::Code::default()\n\n .with_lang(&extension)\n\n .with_code(content),\n\n )\n\n };\n\n ftd::Document::new(&[heading, section]).convert_to_string()\n\n }\n\n\n\n pub fn path(&self) -> std::path::PathBuf {\n\n std::path::PathBuf::from(match self {\n\n FileMode::Created(v) => v,\n\n FileMode::Deleted(v) => v,\n\n FileMode::Modified(v) => v,\n\n })\n\n }\n\n\n\n pub fn path_str(&self) -> String {\n", "file_path": "src/types.rs", "rank": 62, "score": 25005.956977550777 }, { "content": " - [`b/`](/testuser/index/a/b)\n\n - [`c/`](/testuser/index/a/b/c/README.md)\n\n - [`d/`](/testuser/index/a/b/c/d)\n\n - [`e/`](/testuser/index/a/b/c/d/e)\n\n - [`f.txt`](/testuser/index/a/b/c/d/e/f.txt)\n\n - [`README.md`](/testuser/index/a/b/c/README.md)\n\n\"#\n\n )\n\n }\n\n\n\n #[test]\n\n fn till_dir() {\n\n let tree = vec![\n\n super::Node {\n\n is_dir: true,\n\n path: \"docs/a\".to_string(),\n\n children: vec![super::Node {\n\n is_dir: true,\n\n path: \"docs/a/b\".to_string(),\n\n children: vec![super::Node {\n", "file_path": "src/traverse.rs", "rank": 63, "score": 25005.616714162275 }, { "content": " };\n\n if t == \"index\" {\n\n Ok(collection.to_string())\n\n } else {\n\n Ok(collection.to_string() + \"/\" + t.as_str())\n\n }\n\n }\n\n\n\n pub fn id_with_extension(&self, root_dir: &str, collection: &str) -> Result<String> {\n\n let t = match self.path().strip_prefix(root_dir) {\n\n Ok(path) => path.to_string_lossy().to_string(),\n\n Err(e) => {\n\n let m = format!(\n\n \"File path does not start with root dir: {}, root_dir: {} err: {}\",\n\n self.path().to_string_lossy(),\n\n root_dir,\n\n e.to_string()\n\n );\n\n return Err(crate::error::Error::IDError(m));\n\n }\n", "file_path": "src/types.rs", "rank": 64, "score": 25005.571888162416 }, { "content": " ftd::Section::Markdown(ftd::Markdown::from_body(\n\n &readme_content.unwrap_or_else(|| \"\".to_string()),\n\n )),\n\n ftd::Section::ToC(tree.to_ftd_toc(config.root.as_str(), config.collection.as_str())),\n\n ];\n\n\n\n content.extend_from_slice(&config.index_extra);\n\n\n\n println!(\"Updated: {}\", config.collection.as_str());\n\n Ok(ft_api::bulk_update::Action::Updated {\n\n preserve_meta: config.preserve_meta,\n\n id: config.collection.to_string(),\n\n content: ftd::Document::new(&content).convert_to_string(),\n\n })\n\n}\n\n\n", "file_path": "src/raw.rs", "rank": 65, "score": 25005.25760338936 }, { "content": "#[derive(Debug)]\n\npub enum FileMode {\n\n Deleted(String),\n\n Created(String),\n\n Modified(String),\n\n}\n\n\n\nimpl FileMode {\n\n pub fn id(&self, root_dir: &str, collection: &str) -> Result<String> {\n\n let t = match self.path().strip_prefix(root_dir) {\n\n Ok(path) => path.with_extension(\"\").to_string_lossy().to_string(),\n\n Err(e) => {\n\n let m = format!(\n\n \"File path does not start with root dir: {}, root_dir: {} err: {}\",\n\n self.path().to_string_lossy(),\n\n root_dir,\n\n e.to_string()\n\n );\n\n return Err(crate::error::Error::IDError(m));\n\n }\n", "file_path": "src/types.rs", "rank": 66, "score": 25004.94361637133 }, { "content": " },\n\n ],\n\n }],\n\n }],\n\n },\n\n super::Node {\n\n is_dir: true,\n\n path: \"docs/a/b\".to_string(),\n\n children: vec![super::Node {\n\n is_dir: true,\n\n path: \"docs/a/b/c\".to_string(),\n\n children: vec![\n\n super::Node {\n\n is_dir: true,\n\n path: \"docs/a/b/c/d\".to_string(),\n\n children: vec![super::Node {\n\n is_dir: true,\n\n path: \"docs/a/b/c/d/e\".to_string(),\n\n children: vec![super::Node {\n\n is_dir: false,\n", "file_path": "src/traverse.rs", "rank": 67, "score": 25004.663432934907 }, { "content": " self.path().to_string_lossy().to_string()\n\n }\n\n\n\n pub fn extension(&self) -> String {\n\n self.path()\n\n .extension()\n\n .and_then(|v| v.to_str())\n\n .unwrap_or(\"\")\n\n .to_lowercase()\n\n }\n\n}\n", "file_path": "src/types.rs", "rank": 68, "score": 25004.65638446202 }, { "content": "const RAW_EXTENSIONS: [&str; 4] = [\"txt\", \"md\", \"mdx\", \"rst\"];\n\n\n", "file_path": "src/raw.rs", "rank": 69, "score": 25004.6443839338 }, { "content": " markdown.push_str(&format!(\n\n \"{: >width$}- [`{file_name}/`](/{path})\\n\",\n\n \"\",\n\n width = level,\n\n file_name = file_name,\n\n path = path.to_string_lossy()\n\n ));\n\n } else {\n\n markdown.push_str(&format!(\n\n \"{: >width$}- [`{file_name}`](/{path})\\n\",\n\n \"\",\n\n width = level,\n\n file_name = file_name,\n\n path = path.to_string_lossy()\n\n ));\n\n }\n\n if x.is_dir {\n\n tree_to_markdown_util(&x, level + 2, markdown, root_dir, collection_id);\n\n }\n\n }\n\n }\n\n let mut markdown = \"-- markdown:\\n\\n\".to_string();\n\n tree_to_markdown_util(node, 0, &mut markdown, root_dir, collection_id);\n\n markdown\n\n}\n\n\n", "file_path": "src/traverse.rs", "rank": 70, "score": 25004.627182108132 }, { "content": " }\n\n\n\n toc_string.push_str(&format!(\n\n \"{: >width$}- {path}\\n\",\n\n \"\",\n\n width = level,\n\n path = path.to_string_lossy()\n\n ));\n\n if x.is_dir {\n\n toc_string.push_str(&format!(\n\n \"{: >width$}`{path}/`\\n\",\n\n \"\",\n\n width = level + 2,\n\n path = file_name\n\n ));\n\n } else {\n\n toc_string.push_str(&format!(\n\n \"{: >width$}`{path}`\\n\",\n\n \"\",\n\n width = level + 2,\n", "file_path": "src/traverse.rs", "rank": 71, "score": 25004.542897037507 }, { "content": " };\n\n\n\n if t == \"index\" {\n\n Ok(collection.to_string())\n\n } else {\n\n Ok(collection.to_string() + \"/\" + t.as_str())\n\n }\n\n }\n\n\n\n pub fn content(&self) -> crate::Result<String> {\n\n std::fs::read_to_string(self.path())\n\n .map_err(|e| crate::Error::ReadError(e, self.path_str()))\n\n }\n\n\n\n pub fn raw_content(&self, title: &str) -> crate::Result<String> {\n\n let extension = self\n\n .path()\n\n .extension()\n\n .unwrap_or_else(|| {\n\n panic!(\n", "file_path": "src/types.rs", "rank": 72, "score": 25004.517737941227 }, { "content": " })\n\n .map(|x| x.path.to_string())\n\n }\n\n\n\n pub fn readme_exists(&self) -> bool {\n\n self.readme().is_some()\n\n }\n\n\n\n pub fn document_id(&self, root: &str, collection: &str) -> std::path::PathBuf {\n\n crate::id::to_document_id(&self.path, root, collection)\n\n }\n\n\n\n pub fn to_markdown(&self, root: &str, collection: &str) -> String {\n\n self::to_markdown(self, root, collection)\n\n }\n\n\n\n pub fn collection_toc(&self, root: &str, collection: &str) -> String {\n\n self::collection_toc(self, root, collection)\n\n }\n\n\n\n pub fn to_ftd_toc(&self, root: &str, collection: &str) -> ftd::ToC {\n\n self::to_ftd_toc(self, root, collection)\n\n }\n\n}\n\n\n", "file_path": "src/traverse.rs", "rank": 73, "score": 25004.24608375089 }, { "content": " dirs.reverse();\n\n dirs\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Node;\n\n fn test_node() -> super::Node {\n\n Node {\n\n is_dir: true,\n\n path: \"docs\".to_string(),\n\n children: vec![Node {\n\n is_dir: true,\n\n path: \"docs/a\".to_string(),\n\n children: vec![Node {\n\n is_dir: true,\n\n path: \"docs/a/b\".to_string(),\n\n children: vec![Node {\n\n is_dir: true,\n\n path: \"docs/a/b/c\".to_string(),\n", "file_path": "src/traverse.rs", "rank": 74, "score": 25004.16753668274 }, { "content": " path = file_name\n\n ));\n\n }\n\n\n\n if x.is_dir {\n\n tree_to_toc_util(&x, level + 2, toc_string, root_dir, collection_id);\n\n }\n\n }\n\n }\n\n\n\n let mut toc = \"-- toc:\\n\\n\".to_string();\n\n tree_to_toc_util(node, 0, &mut toc, root_dir, collection_id);\n\n toc\n\n}\n\n\n", "file_path": "src/traverse.rs", "rank": 75, "score": 25003.842143430556 }, { "content": "\n\n match ft_cli::import(&config, true) {\n\n Ok(()) => {}\n\n Err(e) => println!(\"{}\", e.to_string()),\n\n }\n\n }\n\n (_, _) => todo!(\"impossible!\"),\n\n };\n\n}\n", "file_path": "src/main.rs", "rank": 76, "score": 25003.656978021456 }, { "content": " println!(\"Created: {}\", id.as_str());\n\n vec![ft_api::bulk_update::Action::Added {\n\n id,\n\n preserve_meta,\n\n content: file.content()?,\n\n }]\n\n }\n\n\n\n crate::types::FileMode::Modified(_) => {\n\n println!(\"Updated: {}\", id.as_str());\n\n vec![ft_api::bulk_update::Action::Updated {\n\n id,\n\n preserve_meta,\n\n content: file.content()?,\n\n }]\n\n }\n\n\n\n crate::types::FileMode::Deleted(_) => {\n\n println!(\"Deleted: {}\", id.as_str());\n\n vec![ft_api::bulk_update::Action::Deleted { id }]\n\n }\n\n })\n\n}\n", "file_path": "src/ftd.rs", "rank": 77, "score": 25003.62115353467 }, { "content": " Raw,\n\n MdBook,\n\n}\n\n\n\nimpl Backend {\n\n pub fn from(s: &str) -> Option<Backend> {\n\n match s {\n\n \"ftd\" => Some(Backend::FTD),\n\n \"raw\" => Some(Backend::Raw),\n\n \"mdbook\" => Some(Backend::MdBook),\n\n _ => None,\n\n }\n\n }\n\n\n\n pub fn is_raw(&self) -> bool {\n\n matches!(self, Backend::Raw)\n\n }\n\n\n\n pub fn is_mdbook(&self) -> bool {\n\n matches!(self, Backend::MdBook)\n", "file_path": "src/types.rs", "rank": 78, "score": 25003.29718873976 }, { "content": " `d/`\n\n - testuser/index/a/b/c/d/e\n\n `e/`\n\n - testuser/index/a/b/c/d/e/f.txt\n\n `f.txt`\n\n - testuser/index/a/b/c/README.md\n\n `README.md`\n\n\"#\n\n .to_string()\n\n )\n\n }\n\n\n\n #[test]\n\n fn to_markdown() {\n\n let node = test_node();\n\n assert_eq!(\n\n super::to_markdown(&node, \"docs\", \"testuser/index\"),\n\n r#\"-- markdown:\n\n\n\n- [`a/`](/testuser/index/a)\n", "file_path": "src/traverse.rs", "rank": 79, "score": 25003.107160168634 }, { "content": "pub(crate) fn platform() -> crate::Result<String> {\n\n if crate::is_test() {\n\n return Ok(\"test-platform\".to_string());\n\n }\n\n\n\n let output = match std::process::Command::new(\"uname\").arg(\"-a\").output() {\n\n Ok(o) => o,\n\n Err(e) => {\n\n return Err(crate::Error::UnknownError(format!(\n\n \"failed to run uname: {:?}\",\n\n e\n\n )))\n\n }\n\n };\n\n\n\n Ok(String::from_utf8(output.stdout)\n\n .unwrap_or_else(|_| \"unknown platform\".to_string())\n\n .trim()\n\n .to_string())\n\n}\n\n\n\npub(crate) fn client_version() -> String {\n\n if crate::is_test() {\n\n return \"test-version\".to_string();\n\n }\n\n env!(\"CARGO_PKG_VERSION\").to_string()\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 80, "score": 25003.096899283708 }, { "content": " crate::types::FileMode::Created(ref file_path) => {\n\n println!(\"Created: {}\", id.as_str());\n\n let mut actions = ancestors(tree, file_path, root, collection, preserve_meta);\n\n actions.push(ft_api::bulk_update::Action::Added {\n\n content: file.raw_content(&format!(\"`{}`\", id))?,\n\n id,\n\n preserve_meta,\n\n });\n\n actions\n\n }\n\n\n\n crate::types::FileMode::Modified(_) => {\n\n println!(\"Updated: {}\", id.as_str());\n\n vec![ft_api::bulk_update::Action::Updated {\n\n content: file.raw_content(&format!(\"`{}`\", id))?,\n\n id,\n\n preserve_meta,\n\n }]\n\n }\n\n\n\n crate::types::FileMode::Deleted(ref file_path) => {\n\n println!(\"Deleted: {}\", id.as_str());\n\n let mut actions = ancestors(tree, file_path, root, collection, preserve_meta);\n\n actions.push(ft_api::bulk_update::Action::Deleted { id });\n\n actions\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/raw.rs", "rank": 81, "score": 25002.907339743713 }, { "content": "#[derive(Debug, PartialEq, Ord, Eq, PartialOrd)]\n\npub struct Node {\n\n pub is_dir: bool,\n\n pub path: String,\n\n pub children: Vec<Node>,\n\n}\n\n\n\nimpl Node {\n\n pub fn readme(&self) -> Option<String> {\n\n if !self.is_dir {\n\n return None;\n\n }\n\n self.children\n\n .iter()\n\n .filter(|c| !c.is_dir)\n\n .find(|c| {\n\n let p = std::path::PathBuf::from(&self.path).join(\"readme\");\n\n c.path\n\n .to_lowercase()\n\n .starts_with(&p.to_string_lossy().to_string())\n", "file_path": "src/traverse.rs", "rank": 82, "score": 25002.855869991414 }, { "content": "\n\n match cmd.subcommand() {\n\n (\"status\", _) => {\n\n let config = ft_cli::Config::from_file(\"ft-sync.p1\").expect(\"failed to read config\");\n\n match ft_cli::status(&config) {\n\n Ok(()) => {}\n\n Err(e) => println!(\"{}\", e.to_string()),\n\n }\n\n }\n\n (\"sync\", Some(args)) => {\n\n let config = ft_cli::Config::from_file(\"ft-sync.p1\").expect(\"failed to read config\");\n\n let re_sync = args.args.get(\"all\").is_some();\n\n match ft_cli::sync(&config, re_sync) {\n\n Ok(()) => {}\n\n Err(e) => println!(\"{}\", e.to_string()),\n\n }\n\n }\n\n\n\n (\"import\", Some(args)) => {\n\n let config = ft_cli::Config::from_args(args);\n", "file_path": "src/main.rs", "rank": 83, "score": 25002.720852501967 }, { "content": " crate::git::changed_files(&status.last_synced_hash, &latest_hash, config.root.as_str())?\n\n };\n\n\n\n match config.backend {\n\n crate::Backend::FTD => crate::ftd::handle_files(config, &files)?,\n\n crate::Backend::Raw => crate::raw::handle_files(config, &files)?,\n\n crate::Backend::MdBook => crate::mdbook::handle_files(config, &files)?,\n\n }\n\n };\n\n\n\n // println!(\"{:#?}\", actions);\n\n\n\n let st = std::time::Instant::now();\n\n\n\n ft_api::bulk_update(\n\n config.collection.as_str(),\n\n status.last_synced_hash.as_str(),\n\n latest_hash.as_str(),\n\n config.repo.as_str(),\n\n actions,\n", "file_path": "src/sync.rs", "rank": 84, "score": 25002.539682213075 }, { "content": " }\n\n}\n\n\n\nimpl std::fmt::Display for Backend {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n match self {\n\n Backend::FTD => write!(f, \"ftd\"),\n\n Backend::Raw => write!(f, \"raw\"),\n\n Backend::MdBook => write!(f, \"mdbook\"),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum SyncMode {\n\n LocalToRemote,\n\n RemoteToLocal,\n\n TwoWay,\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 85, "score": 25001.918950002528 }, { "content": " #[error(\"api status code: {}\", _0)]\n\n APIResponseNotOk(String),\n\n\n\n #[error(\"DeserializeError: {}\", _0)]\n\n DeserializeError(String),\n\n\n\n #[error(\"ResponseError: {}\", _0)]\n\n ResponseError(String),\n\n\n\n #[error(\"UnknownError: {}\", _0)]\n\n UnknownError(String),\n\n\n\n #[error(\"IDError: {}\", _0)]\n\n IDError(String),\n\n}\n\n\n\nimpl From<realm_client::Error> for Error {\n\n fn from(e: realm_client::Error) -> Self {\n\n Error::RealmError { error: e }\n\n }\n", "file_path": "src/error.rs", "rank": 86, "score": 25001.719816168552 }, { "content": "pub type Result<T> = std::result::Result<T, crate::Error>;\n\n\n\n#[derive(Debug)]\n\npub enum Auth {\n\n SignedIn(User),\n\n AuthCode(String),\n\n Anonymous,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct User {\n\n pub cookie: String,\n\n pub username: String,\n\n pub name: String,\n\n}\n\n\n\n#[derive(Debug)]\n\n#[allow(clippy::upper_case_acronyms)]\n\npub enum Backend {\n\n FTD,\n", "file_path": "src/types.rs", "rank": 87, "score": 25001.561330178178 }, { "content": " children.sort();\n\n\n\n Ok(children)\n\n }\n\n\n\n let root = Node {\n\n is_dir: true,\n\n path: root_dir.to_string_lossy().to_string(),\n\n children: traverse_tree(root_dir)?,\n\n };\n\n Ok(root)\n\n}\n\n\n", "file_path": "src/traverse.rs", "rank": 88, "score": 25001.433821429906 }, { "content": "}\n\n\n\nimpl From<ftd::p1::Error> for Error {\n\n fn from(e: ftd::p1::Error) -> Self {\n\n Error::ConfigFileFTDError { error: e }\n\n }\n\n}\n\n\n\nimpl From<std::io::Error> for Error {\n\n fn from(e: std::io::Error) -> Self {\n\n Error::IOError { error: e }\n\n }\n\n}\n\n\n\nimpl From<std::string::FromUtf8Error> for Error {\n\n fn from(e: std::string::FromUtf8Error) -> Self {\n\n Error::Utf8Error { error: e }\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 89, "score": 25000.806139003784 }, { "content": "#[derive(thiserror::Error, Debug)]\n\npub enum Error {\n\n #[error(\"cannot parse config file {error:?}\")]\n\n ConfigFileFTDError { error: ftd::p1::Error },\n\n\n\n #[error(\"cannot parse config file {error:?}\")]\n\n ConfigFileParseError { error: String },\n\n\n\n #[error(\"RealmError: {error:?}\")]\n\n RealmError { error: realm_client::Error },\n\n\n\n #[error(\"Utf8Error: {error:?}\")]\n\n Utf8Error { error: std::string::FromUtf8Error },\n\n\n\n #[error(\"IOError: {error:?}\")]\n\n IOError { error: std::io::Error },\n\n\n\n #[error(\"cannot read file: {}, {}\", _0, _1)]\n\n ReadError(std::io::Error, String),\n\n\n", "file_path": "src/error.rs", "rank": 90, "score": 25000.77405553374 }, { "content": " auth_code.as_str(),\n\n crate::utils::platform()?,\n\n crate::utils::client_version(),\n\n )?;\n\n\n\n if crate::is_test() {\n\n println!(\"Synced successfully.\");\n\n } else {\n\n println!(\"Synced successfully: {}.\", crate::utils::elapsed(st));\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/sync.rs", "rank": 91, "score": 25000.74713789722 }, { "content": " .required(true),\n\n )\n\n .arg(\n\n clap::Arg::with_name(\"root\")\n\n .long(\"root\")\n\n .allow_hyphen_values(true)\n\n .takes_value(true)\n\n .required(false)\n\n .default_value(\"\"),\n\n )\n\n .arg(\n\n clap::Arg::with_name(\"backend\")\n\n .long(\"backend\")\n\n .allow_hyphen_values(true)\n\n .takes_value(true)\n\n .required(false)\n\n .default_value(\"\"),\n\n ),\n\n )\n\n .get_matches();\n", "file_path": "src/main.rs", "rank": 92, "score": 25000.278574940086 }, { "content": " } else {\n\n status.last_synced_hash.as_str()\n\n }\n\n );\n\n\n\n if crate::is_test() {\n\n // we fix the timezone to IST in test mode so on github etc we get consistent output\n\n // let local: chrono::DateTime<chrono_tz::Asia::Kolkata> =\n\n // chrono::DateTime::from(status.last_updated_on);\n\n let last_updated_on_in_ist = status\n\n .last_updated_on\n\n .with_timezone(&chrono_tz::Asia::Kolkata);\n\n println!(\"Last synced on: {:?}\", last_updated_on_in_ist);\n\n } else {\n\n let local: chrono::DateTime<chrono::Local> = chrono::DateTime::from(status.last_updated_on);\n\n println!(\"Last synced on: {:?}\", local);\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/status.rs", "rank": 93, "score": 24999.87390133955 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n #[test]\n\n fn fbt() {\n\n if fbt_lib::main().is_some() {\n\n panic!(\"test failed\")\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 94, "score": 24998.783355869557 }, { "content": "mod config;\n\npub mod error;\n\nmod ftd;\n\npub mod git;\n\nmod id;\n\nmod import;\n\nmod mdbook;\n\nmod raw;\n\npub mod status;\n\npub mod sync;\n\npub mod traverse;\n\npub mod types;\n\nmod utils;\n\n\n\npub use crate::config::Config;\n\npub use error::Error;\n\npub use import::import;\n\npub use status::status;\n\npub use sync::sync;\n\npub use types::{Auth, Backend, FileMode, Result, SyncMode, User};\n", "file_path": "src/lib.rs", "rank": 95, "score": 24998.783355869557 }, { "content": " .short(\"a\")\n\n .allow_hyphen_values(true)\n\n .help(\"re-sync all document\"),\n\n ),\n\n )\n\n .subcommand(\n\n clap::SubCommand::with_name(\"import\")\n\n .about(\"import book\")\n\n .arg(\n\n clap::Arg::with_name(\"repo\")\n\n .long(\"repo\")\n\n .allow_hyphen_values(true)\n\n .takes_value(true)\n\n .required(true),\n\n )\n\n .arg(\n\n clap::Arg::with_name(\"collection\")\n\n .long(\"collection\")\n\n .allow_hyphen_values(true)\n\n .takes_value(true)\n", "file_path": "src/main.rs", "rank": 96, "score": 24998.783355869557 }, { "content": " }],\n\n }],\n\n }],\n\n }\n\n }\n\n\n\n #[test]\n\n fn collection_toc_test() {\n\n let node = test_node();\n\n assert_eq!(\n\n super::collection_toc(&node, \"docs\", \"testuser/index\"),\n\n r#\"-- toc:\n\n\n\n- testuser/index/a\n\n `a/`\n\n - testuser/index/a/b\n\n `b/`\n\n - testuser/index/a/b/c/README.md\n\n `c/`\n\n - testuser/index/a/b/c/d\n", "file_path": "src/traverse.rs", "rank": 97, "score": 24998.783355869557 }, { "content": "impl Config {\n\n pub fn from_file(file_path: &str) -> crate::Result<Self> {\n\n use std::fs;\n\n let contents = fs::read_to_string(file_path)\n\n .map_err(|v| crate::Error::ReadError(v, file_path.to_string()))?;\n\n Self::parse(contents.as_str(), file_path)\n\n }\n\n\n\n pub fn parse(content: &str, file_path: &str) -> crate::Result<Self> {\n\n let p1 = ftd::p1::parse(content)?;\n\n let mut ft_sync: Option<section::FtSync> = None;\n\n let mut ignored: Vec<section::Ignored> = vec![];\n\n let mut index_extra: Option<section::IndexExtra> = None;\n\n for section in p1 {\n\n let s = section::Section::from_p1(&section)?;\n\n match s {\n\n section::Section::FtSync(sec) => {\n\n if ft_sync.is_none() {\n\n ft_sync = Some(sec)\n\n } else {\n", "file_path": "src/config/mod.rs", "rank": 98, "score": 24219.828489130377 }, { "content": " );\n\n\n\n // println!(\"{:#?}\", summary);\n\n // println!(\"{:#?}\", book);\n\n\n\n let actions = {\n\n let mut actions = vec![];\n\n for file in files.iter() {\n\n actions.append(&mut self::handle(\n\n &summary,\n\n &book,\n\n config,\n\n &book_config,\n\n &file,\n\n &src_dir.to_string_lossy(),\n\n config.collection.as_str(),\n\n )?);\n\n }\n\n actions\n\n };\n\n\n\n Ok(actions)\n\n}\n\n\n", "file_path": "src/mdbook/handle.rs", "rank": 99, "score": 24216.372925371634 } ]
Rust
path_tracing/src/utilities.rs
sansumbrella/path-tracer-rs
9de8797bfa957b82393d1757b84c24357ddc0f02
use super::vector::*; use rand::distributions::{Distribution, UnitSphereSurface}; use rand::prelude::*; use std::borrow::Borrow; use std::ops::{Add, Mul, Sub}; pub fn mix<T, U>(a: T, b: T, t: U) -> T where T: Copy + Add<T, Output = T> + Sub<T, Output = T> + Mul<U, Output = T>, { a + (b - a) * t } pub fn random_in_unit_sphere() -> Vec3 { let mut rng = rand::thread_rng(); let sphere = UnitSphereSurface::new(); Vec3(sphere.sample(&mut rng)) } pub fn random_in_unit_disk() -> [f64; 2] { let mut rng = rand::thread_rng(); loop { let p = Vec3::new( 2.0 * rng.gen::<f64>() - 1.0, 2.0 * rng.gen::<f64>() - 1.0, 0.0, ); if p.length_squared() < 1.0 { return [p.0[0], p.0[1]]; } } } pub fn rand() -> f64 { let mut rng = rand::thread_rng(); rng.gen::<f64>() } pub fn normalize<T>(vector: T) -> Vec3 where T: Borrow<Vec3>, { let vector = vector.borrow(); let &[x, y, z] = &vector.0; Vec3::new(x, y, z) / vector.length() } pub fn dot<T, U>(a: T, b: U) -> f64 where T: Borrow<Vec3>, U: Borrow<Vec3>, { let &[ax, ay, az] = &a.borrow().0; let &[bx, by, bz] = &b.borrow().0; ax * bx + ay * by + az * bz } pub fn cross<T, U>(a: T, b: U) -> Vec3 where T: Borrow<Vec3>, U: Borrow<Vec3>, { let &[ax, ay, az] = &a.borrow().0; let &[bx, by, bz] = &b.borrow().0; Vec3::new(ay * bz - az * by, -(ax * bz - az * bx), ax * by - ay * bx) } pub fn reflect<T, U>(vector: T, normal: U) -> Vec3 where T: Borrow<Vec3>, U: Borrow<Vec3>, { let vector = vector.borrow(); let normal = normal.borrow(); vector - &(normal * 2.0 * dot(vector, normal)) } pub fn refract<T, U>(vector: T, normal: U, ni_over_nt: f64) -> Option<Vec3> where T: Borrow<Vec3>, U: Borrow<Vec3>, { let vector = vector.borrow(); let normal = normal.borrow(); let vector = normalize(vector); let dt = dot(&vector, normal); let discriminant = 1.0 - ni_over_nt * ni_over_nt * (1.0 - dt * dt); if discriminant > 0.0 { let refracted = (vector - normal * dt) * ni_over_nt - normal * f64::sqrt(discriminant); return Some(refracted); } None } pub fn schlick(cosine: f64, refractive_index: f64) -> f64 { let r0 = (1.0 - refractive_index) / (1.0 + refractive_index); let r0 = r0 * r0; r0 + (1.0 - r0) * f64::powi(1.0 - cosine, 5) } #[cfg(test)] mod tests { use super::*; use approx::assert_relative_eq; #[test] fn mixing_built_in_types() { assert_eq!(mix(1.0, 2.0, 0.5), 1.5); assert_eq!(mix(1.0, -1.0, 0.5), 0.0); assert_eq!(mix(10 as f32, 20 as f32, 0.5) as i32, 15); } #[test] fn mixing_vector_types() { assert_eq!( mix(Vec3::new(0.0, 1.0, 2.0), Vec3::new(2.0, 1.0, 0.0), 0.5), Vec3::new(1.0, 1.0, 1.0) ); } #[test] fn sphere_random() { assert_relative_eq!(random_in_unit_sphere().length_squared(), 1.0); } #[test] fn trigonometry_functions() { assert_relative_eq!(f64::sin(std::f64::consts::PI), 0.0); } #[test] fn dot_product() { let a = Vec3::new(1.0, 0.0, 0.0); let b = Vec3::new(0.5, 0.5, 0.5); assert_eq!( dot(&a, &b), 0.5, "Dot product returns a scalar measuring similarity of two vectors" ); assert_eq!( dot(&Vec3::new(0.0, -1.0, 0.0), &Vec3::new(0.0, 1.0, 0.0)), -1.0, "Dot product returns a scalar measuring similarity of two vectors" ); } #[test] fn cross_product() { let x = Vec3::new(1.0, 0.0, 0.0); let y = Vec3::new(0.0, 1.0, 0.0); let z = Vec3::new(0.0, 0.0, 1.0); assert_eq!( cross(&x, &y), z, "Cross product returns a vector orthogonal to both inputs" ); assert_eq!( cross(&y, &x), Vec3::new(0.0, 0.0, -1.0), "Cross product is not commutative" ); assert_eq!( cross(&y, &z), x, "Cross product returns a vector orthogonal to both inputs" ); assert_eq!( cross(&z, &x), y, "Cross product returns a vector orthogonal to both inputs" ); } #[test] fn normalize_vectors() { let b = Vec3::new(1.0, 1.0, 0.0); normalize(b); normalize(&b); } }
use super::vector::*; use rand::distributions::{Distribution, UnitSphereSurface}; use rand::prelude::*; use std::borrow::Borrow; use std::ops::{Add, Mul, Sub}; pub fn mix<T, U>(a: T, b: T, t: U) -> T where T: Copy + Add<T, Output = T> + Sub<T, Output = T> + Mul<U, Output = T>, { a + (b - a) * t } pub fn random_in_unit_sphere() -> Vec3 { let mut rng = rand::thread_rng(); let sphere = UnitSphereSurface::new(); Vec3(sphere.sample(&mut rng)) } pub fn random_in_unit_disk() -> [f64; 2] { let mut rng = rand::thread_rng(); loop { let p = Vec3::new( 2.0 * rng.gen::<f64>() - 1.0, 2.0 * rng.gen::<f64>() - 1.0, 0.0, ); if p.length_squared() < 1.0 { return [p.0[0], p.0[1]]; } } } pub fn rand() -> f64 { let mut rng = rand::thread_rng(); rng.gen::<f64>() } pub fn normalize<T>(vector: T) -> Vec3 where T: Borrow<Vec3>, { let vector = vector.borrow(); let &[x, y, z] = &vector.0; Vec3::new(x, y, z) / vector.length() }
pub fn cross<T, U>(a: T, b: U) -> Vec3 where T: Borrow<Vec3>, U: Borrow<Vec3>, { let &[ax, ay, az] = &a.borrow().0; let &[bx, by, bz] = &b.borrow().0; Vec3::new(ay * bz - az * by, -(ax * bz - az * bx), ax * by - ay * bx) } pub fn reflect<T, U>(vector: T, normal: U) -> Vec3 where T: Borrow<Vec3>, U: Borrow<Vec3>, { let vector = vector.borrow(); let normal = normal.borrow(); vector - &(normal * 2.0 * dot(vector, normal)) } pub fn refract<T, U>(vector: T, normal: U, ni_over_nt: f64) -> Option<Vec3> where T: Borrow<Vec3>, U: Borrow<Vec3>, { let vector = vector.borrow(); let normal = normal.borrow(); let vector = normalize(vector); let dt = dot(&vector, normal); let discriminant = 1.0 - ni_over_nt * ni_over_nt * (1.0 - dt * dt); if discriminant > 0.0 { let refracted = (vector - normal * dt) * ni_over_nt - normal * f64::sqrt(discriminant); return Some(refracted); } None } pub fn schlick(cosine: f64, refractive_index: f64) -> f64 { let r0 = (1.0 - refractive_index) / (1.0 + refractive_index); let r0 = r0 * r0; r0 + (1.0 - r0) * f64::powi(1.0 - cosine, 5) } #[cfg(test)] mod tests { use super::*; use approx::assert_relative_eq; #[test] fn mixing_built_in_types() { assert_eq!(mix(1.0, 2.0, 0.5), 1.5); assert_eq!(mix(1.0, -1.0, 0.5), 0.0); assert_eq!(mix(10 as f32, 20 as f32, 0.5) as i32, 15); } #[test] fn mixing_vector_types() { assert_eq!( mix(Vec3::new(0.0, 1.0, 2.0), Vec3::new(2.0, 1.0, 0.0), 0.5), Vec3::new(1.0, 1.0, 1.0) ); } #[test] fn sphere_random() { assert_relative_eq!(random_in_unit_sphere().length_squared(), 1.0); } #[test] fn trigonometry_functions() { assert_relative_eq!(f64::sin(std::f64::consts::PI), 0.0); } #[test] fn dot_product() { let a = Vec3::new(1.0, 0.0, 0.0); let b = Vec3::new(0.5, 0.5, 0.5); assert_eq!( dot(&a, &b), 0.5, "Dot product returns a scalar measuring similarity of two vectors" ); assert_eq!( dot(&Vec3::new(0.0, -1.0, 0.0), &Vec3::new(0.0, 1.0, 0.0)), -1.0, "Dot product returns a scalar measuring similarity of two vectors" ); } #[test] fn cross_product() { let x = Vec3::new(1.0, 0.0, 0.0); let y = Vec3::new(0.0, 1.0, 0.0); let z = Vec3::new(0.0, 0.0, 1.0); assert_eq!( cross(&x, &y), z, "Cross product returns a vector orthogonal to both inputs" ); assert_eq!( cross(&y, &x), Vec3::new(0.0, 0.0, -1.0), "Cross product is not commutative" ); assert_eq!( cross(&y, &z), x, "Cross product returns a vector orthogonal to both inputs" ); assert_eq!( cross(&z, &x), y, "Cross product returns a vector orthogonal to both inputs" ); } #[test] fn normalize_vectors() { let b = Vec3::new(1.0, 1.0, 0.0); normalize(b); normalize(&b); } }
pub fn dot<T, U>(a: T, b: U) -> f64 where T: Borrow<Vec3>, U: Borrow<Vec3>, { let &[ax, ay, az] = &a.borrow().0; let &[bx, by, bz] = &b.borrow().0; ax * bx + ay * by + az * bz }
function_block-full_function
[ { "content": "pub fn trace_scene(\n\n world: &World,\n\n camera: &Camera,\n\n rows: u32,\n\n columns: u32,\n\n num_samples: u32,\n\n) -> Vec<Vec3> {\n\n let colors: Vec<Vec3> = (0..(rows * columns))\n\n .into_par_iter()\n\n .map(|index| {\n\n let x = index % columns;\n\n let y = index / columns;\n\n (x as i32, y as i32)\n\n })\n\n .map(|(x, y)| {\n\n let mut rng = rand::thread_rng();\n\n (0..num_samples)\n\n .map(|_| (x as f64 + rng.gen::<f64>(), y as f64 + rng.gen::<f64>()))\n\n .map(|(x, y)| (x / columns as f64, 1.0 - y / rows as f64))\n\n .collect::<Vec<(f64, f64)>>()\n", "file_path": "path_tracing/src/lib.rs", "rank": 10, "score": 58489.797917947384 }, { "content": "fn color(world: &World, ray: Ray, depth: u8) -> Vec3 {\n\n if let Some(hit) = world.hit(&ray, 0.001, std::f64::MAX) {\n\n // return (hit.normal + 1.0) * 0.5;\n\n // recurse until you bounce off into the sky\n\n if depth < 50 {\n\n if let Some(reflection) = hit.material.scatter(&ray, &hit) {\n\n // return (*reflection.ray.direction() + 1.0) * 0.5;\n\n return color(world, reflection.ray, depth + 1) * reflection.attenuation;\n\n } else {\n\n return Vec3::new(0.0, 0.0, 0.0);\n\n }\n\n }\n\n }\n\n\n\n let unit_direction = normalize(ray.direction());\n\n let t = 0.5 * (unit_direction.y() + 1.0);\n\n mix(Vec3::new(1.0, 1.0, 1.0), Vec3::new(0.5, 0.7, 1.0), t)\n\n}\n\n\n", "file_path": "path_tracing/src/lib.rs", "rank": 11, "score": 50976.34511630055 }, { "content": "/// Hitable types can reflect rays for tracing\n\npub trait Hitable {\n\n fn hit(&self, ray: &Ray, t_min: f64, t_max: f64) -> Option<HitRecord>;\n\n}\n", "file_path": "path_tracing/src/hitable.rs", "rank": 12, "score": 30713.494584281718 }, { "content": "/// Scattering determines how a ray behaves after hitting a Hitable\n\npub trait Scattering {\n\n fn scatter(&self, ray: &Ray, hit: &HitRecord) -> Option<ScatteredRay>;\n\n}\n\n\n\n/// Lambertian scattering is perfectly diffuse\n\npub struct Lambertian {\n\n pub albedo: Vec3,\n\n}\n\n\n\nimpl Scattering for Lambertian {\n\n fn scatter(&self, _: &Ray, hit: &HitRecord) -> Option<ScatteredRay> {\n\n let target = hit.p + hit.normal + random_in_unit_sphere();\n\n let ray = Ray::new(hit.p, target - hit.p);\n\n Some(ScatteredRay {\n\n ray,\n\n attenuation: self.albedo,\n\n })\n\n }\n\n}\n\n\n", "file_path": "path_tracing/src/scattering.rs", "rank": 13, "score": 30713.494584281718 }, { "content": "fn build_book_scene() -> World {\n\n let mut world = World::new();\n\n let mut rng = rand::thread_rng();\n\n let mut rand = || rng.gen::<f64>();\n\n\n\n world.push(Box::new(Sphere {\n\n center: Vec3::new(0.0, -1000.0, 0.0),\n\n radius: 1000.0,\n\n material: Box::new(Lambertian {\n\n albedo: Vec3::fill(0.5),\n\n }),\n\n }));\n\n\n\n for a in -11..11 {\n\n for b in -11..11 {\n\n let a = a as f64;\n\n let b = b as f64;\n\n let material_choice: f64 = rand();\n\n let center = Vec3::new(a + 0.9 * rand(), 0.2, b + 0.9 * rand());\n\n\n", "file_path": "path-tracer/src/main.rs", "rank": 14, "score": 27831.946626628192 }, { "content": "fn main() -> std::io::Result<()> {\n\n let nx = 900;\n\n let ny = 600;\n\n let ns = 100;\n\n\n\n let look_from = Vec3::new(13.0, 2.0, 3.0);\n\n let look_at = Vec3::new(0.0, 0.0, 0.0);\n\n let dist_to_focus = 10.0;\n\n let aperture = 0.1;\n\n\n\n let camera = Camera::new(\n\n look_from,\n\n look_at,\n\n Vec3::new(0.0, 1.0, 0.0),\n\n 20.0,\n\n nx as f64 / ny as f64,\n\n aperture,\n\n dist_to_focus,\n\n );\n\n\n", "file_path": "path-tracer/src/main.rs", "rank": 15, "score": 26748.83910210157 }, { "content": "use super::{dot, Ray, Vec3};\n\nuse super::{HitRecord, Hitable, Scattering};\n\n\n\n/// A Sphere at a given position\n\npub struct Sphere {\n\n pub center: Vec3,\n\n pub radius: f64,\n\n pub material: Box<dyn Scattering + Sync>, // for testability, would be easier not to complect with materials\n\n}\n\n\n\nimpl Hitable for Sphere {\n\n fn hit(&self, ray: &Ray, t_min: f64, t_max: f64) -> Option<HitRecord> {\n\n let oc = ray.origin() - &self.center;\n\n let a = dot(ray.direction(), ray.direction());\n\n let b = dot(oc, ray.direction());\n\n let c = dot(oc, oc) - self.radius * self.radius;\n\n let discriminant = b * b - a * c;\n\n if discriminant > 0.0 {\n\n let t = (-b - f64::sqrt(discriminant)) / a;\n\n if t < t_max && t > t_min {\n", "file_path": "path_tracing/src/sphere.rs", "rank": 16, "score": 23755.291486691363 }, { "content": " None\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::super::Lambertian;\n\n use super::{Hitable, Ray, Sphere, Vec3};\n\n\n\n #[test]\n\n fn sphere_at_origin() {\n\n let sphere = Sphere {\n\n center: Vec3::fill(0.0),\n\n radius: 1.0,\n\n material: Box::new(Lambertian {\n\n albedo: Vec3::fill(1.0),\n\n }),\n\n };\n\n\n\n let ray = Ray::new(Vec3::new(0.0, 0.0, -2.0), Vec3::new(0.0, 0.0, 1.0));\n\n let hit = sphere.hit(&ray, 0.0, std::f64::MAX).unwrap();\n\n assert_eq!(hit.p, Vec3::new(0.0, 0.0, -1.0));\n\n }\n\n}\n", "file_path": "path_tracing/src/sphere.rs", "rank": 17, "score": 23754.07119627817 }, { "content": " let p = ray.point_at_parameter(t);\n\n return Some(HitRecord {\n\n t,\n\n p,\n\n normal: (&p - &self.center) / self.radius,\n\n material: &*self.material,\n\n });\n\n }\n\n\n\n let t = (-b + f64::sqrt(discriminant)) / a;\n\n if t < t_max && t > t_min {\n\n let p = ray.point_at_parameter(t);\n\n return Some(HitRecord {\n\n t,\n\n p,\n\n normal: (&p - &self.center) / self.radius,\n\n material: &*self.material,\n\n });\n\n }\n\n }\n", "file_path": "path_tracing/src/sphere.rs", "rank": 18, "score": 23747.9699645434 }, { "content": "use std::borrow::Borrow;\n\nuse std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign};\n\n\n\n/// Simple vector implementation in 3 dimensions\n\n/// Using the newtype idiom since arrays can be structs directly\n\n#[derive(Debug, PartialEq, Copy)]\n\npub struct Vec3(pub [f64; 3]);\n\n\n\nimpl Vec3 {\n\n pub fn new(x: f64, y: f64, z: f64) -> Vec3 {\n\n Vec3([x, y, z])\n\n }\n\n pub fn fill(v: f64) -> Vec3 {\n\n Vec3([v, v, v])\n\n }\n\n pub fn r(&self) -> f64 {\n\n self.0[0]\n\n }\n\n pub fn g(&self) -> f64 {\n\n self.0[1]\n", "file_path": "path_tracing/src/vector.rs", "rank": 19, "score": 22270.346428539586 }, { "content": " }\n\n pub fn b(&self) -> f64 {\n\n self.0[2]\n\n }\n\n pub fn x(&self) -> f64 {\n\n self.0[0]\n\n }\n\n pub fn y(&self) -> f64 {\n\n self.0[1]\n\n }\n\n pub fn z(&self) -> f64 {\n\n self.0[2]\n\n }\n\n\n\n /// Returns the magnitude squared of the vector.\n\n /// Use to avoid the square root calculation needed for magnitude.\n\n pub fn length_squared(&self) -> f64 {\n\n let &[x, y, z] = &self.0;\n\n x * x + y * y + z * z\n\n }\n", "file_path": "path_tracing/src/vector.rs", "rank": 20, "score": 22266.416051270437 }, { "content": "\n\n /// Returns the magnitude of the vector (Euclidian norm)\n\n pub fn length(&self) -> f64 {\n\n f64::sqrt(self.length_squared())\n\n }\n\n}\n\n\n\nimpl Clone for Vec3 {\n\n fn clone(&self) -> Self {\n\n *self\n\n }\n\n}\n\n\n\nimpl<T> Add<T> for Vec3\n\nwhere\n\n T: Borrow<Vec3>,\n\n{\n\n type Output = Self;\n\n\n\n // Component-wise addition of two vectors.\n", "file_path": "path_tracing/src/vector.rs", "rank": 21, "score": 22266.013675649607 }, { "content": " fn mul(self, rhs: T) -> Self {\n\n let &[x, y, z] = &rhs.borrow().0;\n\n Vec3::new(self.0[0] * x, self.0[1] * y, self.0[2] * z)\n\n }\n\n}\n\n\n\nimpl Mul<f64> for Vec3 {\n\n type Output = Self;\n\n\n\n fn mul(self, rhs: f64) -> Self {\n\n let &[x, y, z] = &self.0;\n\n Vec3::new(x * rhs, y * rhs, z * rhs)\n\n }\n\n}\n\n\n\nimpl Mul<f64> for &Vec3 {\n\n type Output = Vec3;\n\n fn mul(self, rhs: f64) -> Vec3 {\n\n let &[x, y, z] = &self.0;\n\n Vec3::new(x * rhs, y * rhs, z * rhs)\n", "file_path": "path_tracing/src/vector.rs", "rank": 22, "score": 22265.357849912118 }, { "content": " fn div_assign(&mut self, rhs: Self) {\n\n self.0[0] /= rhs.0[0];\n\n self.0[1] /= rhs.0[1];\n\n self.0[2] /= rhs.0[2];\n\n }\n\n}\n\n\n\nimpl DivAssign<f64> for Vec3 {\n\n fn div_assign(&mut self, rhs: f64) {\n\n self.0[0] /= rhs;\n\n self.0[1] /= rhs;\n\n self.0[2] /= rhs;\n\n }\n\n}\n\n\n\nimpl<T> Mul<T> for Vec3\n\nwhere\n\n T: Borrow<Vec3>,\n\n{\n\n type Output = Self;\n", "file_path": "path_tracing/src/vector.rs", "rank": 23, "score": 22264.852089465072 }, { "content": "\n\nimpl Sub for &Vec3 {\n\n type Output = Vec3;\n\n\n\n fn sub(self, rhs: Self) -> Vec3 {\n\n Vec3::new(\n\n self.0[0] - rhs.0[0],\n\n self.0[1] - rhs.0[1],\n\n self.0[2] - rhs.0[2],\n\n )\n\n }\n\n}\n\n\n\nimpl SubAssign for Vec3 {\n\n fn sub_assign(&mut self, rhs: Self) {\n\n self.0[0] -= rhs.0[0];\n\n self.0[1] -= rhs.0[1];\n\n self.0[2] -= rhs.0[2];\n\n }\n\n}\n", "file_path": "path_tracing/src/vector.rs", "rank": 24, "score": 22264.274226048397 }, { "content": " }\n\n}\n\n\n\nimpl MulAssign for Vec3 {\n\n fn mul_assign(&mut self, rhs: Self) {\n\n self.0[0] *= rhs.0[0];\n\n self.0[1] *= rhs.0[1];\n\n self.0[2] *= rhs.0[2];\n\n }\n\n}\n\n\n\nimpl Neg for Vec3 {\n\n type Output = Vec3;\n\n\n\n fn neg(self) -> Vec3 {\n\n let &[x, y, z] = &self.0;\n\n Vec3::new(-x, -y, -z)\n\n }\n\n}\n\n\n", "file_path": "path_tracing/src/vector.rs", "rank": 25, "score": 22264.212097009295 }, { "content": "impl Neg for &Vec3 {\n\n type Output = Vec3;\n\n\n\n fn neg(self) -> Vec3 {\n\n let &[x, y, z] = &self.0;\n\n Vec3::new(-x, -y, -z)\n\n }\n\n}\n\n\n\nimpl<T> Sub<T> for Vec3\n\nwhere\n\n T: Borrow<Vec3>,\n\n{\n\n type Output = Self;\n\n\n\n fn sub(self, rhs: T) -> Self {\n\n let &[x, y, z] = &rhs.borrow().0;\n\n Vec3::new(self.0[0] - x, self.0[1] - y, self.0[2] - z)\n\n }\n\n}\n", "file_path": "path_tracing/src/vector.rs", "rank": 26, "score": 22262.764472643037 }, { "content": " fn add(self, rhs: T) -> Self {\n\n let &[x, y, z] = &rhs.borrow().0;\n\n Vec3::new(self.0[0] + x, self.0[1] + y, self.0[2] + z)\n\n }\n\n}\n\n\n\nimpl Add<f64> for Vec3 {\n\n type Output = Self;\n\n\n\n fn add(self, rhs: f64) -> Self {\n\n let &[x, y, z] = &self.0;\n\n Vec3::new(x + rhs, y + rhs, z + rhs)\n\n }\n\n}\n\n\n\nimpl<T> Add<T> for &Vec3\n\nwhere\n\n T: Borrow<Vec3>,\n\n{\n\n type Output = Vec3;\n", "file_path": "path_tracing/src/vector.rs", "rank": 27, "score": 22262.08494714176 }, { "content": "where\n\n T: Borrow<Vec3>,\n\n{\n\n type Output = Self;\n\n\n\n fn div(self, rhs: T) -> Self {\n\n let &[x, y, z] = &rhs.borrow().0;\n\n Vec3::new(self.0[0] / x, self.0[1] / y, self.0[2] / z)\n\n }\n\n}\n\n\n\nimpl Div<f64> for Vec3 {\n\n type Output = Self;\n\n\n\n fn div(self, rhs: f64) -> Self {\n\n Vec3::new(self.0[0] / rhs, self.0[1] / rhs, self.0[2] / rhs)\n\n }\n\n}\n\n\n\nimpl DivAssign for Vec3 {\n", "file_path": "path_tracing/src/vector.rs", "rank": 28, "score": 22262.04170180298 }, { "content": "\n\nimpl std::cmp::Eq for Vec3 {}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn multiplication() {\n\n let a = Vec3::new(-1.0, 1.0, 2.0);\n\n let b = Vec3::new(10.0, 10.0, 10.0);\n\n assert_eq!(\n\n a * b,\n\n Vec3::new(-10.0, 10.0, 20.0),\n\n \"Vector multiplication is component-wise\"\n\n );\n\n assert_eq!(a * b, a * &b);\n\n }\n\n\n\n #[test]\n", "file_path": "path_tracing/src/vector.rs", "rank": 29, "score": 22260.886306041863 }, { "content": " fn division() {\n\n let a = Vec3::new(1.0, 2.0, 4.0);\n\n let b = Vec3::new(2.0, 2.0, 2.0);\n\n assert_eq!(\n\n a / b,\n\n Vec3::new(0.5, 1.0, 2.0),\n\n \"Vector division is component-wise\"\n\n );\n\n assert_eq!(a / b, a / &b);\n\n }\n\n\n\n #[test]\n\n fn addition() {\n\n let a = Vec3::new(-1.0, 1.0, 2.0);\n\n let b = Vec3::new(10.0, 10.0, 10.0);\n\n assert_eq!(\n\n a + b,\n\n Vec3::new(9.0, 11.0, 12.0),\n\n \"Vectors can be added together\"\n\n );\n", "file_path": "path_tracing/src/vector.rs", "rank": 30, "score": 22260.175036079858 }, { "content": "\n\n fn add(self, rhs: T) -> Vec3 {\n\n let rhs = rhs.borrow();\n\n Vec3::new(\n\n self.0[0] + rhs.0[0],\n\n self.0[1] + rhs.0[1],\n\n self.0[2] + rhs.0[2],\n\n )\n\n }\n\n}\n\n\n\nimpl AddAssign for Vec3 {\n\n fn add_assign(&mut self, rhs: Self) {\n\n self.0[0] += rhs.0[0];\n\n self.0[1] += rhs.0[1];\n\n self.0[2] += rhs.0[2];\n\n }\n\n}\n\n\n\nimpl<T> Div<T> for Vec3\n", "file_path": "path_tracing/src/vector.rs", "rank": 31, "score": 22259.414635389072 }, { "content": "\n\n assert_eq!(a + b, a + &b);\n\n assert_eq!(a + b, b + a);\n\n }\n\n\n\n #[test]\n\n fn subtraction() {\n\n let a = Vec3::new(1.0, 2.0, 3.0);\n\n let b = Vec3::new(4.0, 5.0, 6.0);\n\n assert_eq!(a - b, Vec3::new(-3.0, -3.0, -3.0));\n\n assert_eq!(b - a, Vec3::new(3.0, 3.0, 3.0));\n\n assert_eq!(b - a, b - &a);\n\n assert_eq!(a - b, a - &b);\n\n assert_ne!(a - b, b - a);\n\n\n\n assert_eq!(-a, Vec3::new(-1.0, -2.0, -3.0));\n\n assert_eq!(-b, Vec3::new(-4.0, -5.0, -6.0));\n\n }\n\n}\n", "file_path": "path_tracing/src/vector.rs", "rank": 32, "score": 22258.020215909262 }, { "content": "//! # Ray Tracing\n\n//!\n\n//! Provides traits for defining geometric objects and materials that\n\n//! govern how rays interact when hitting them.\n\n//!\n\n//! Vector math routines and convenience functions for graphics.\n\n//!\n\n\n\nmod camera;\n\nmod hitable;\n\nmod ray;\n\nmod scattering;\n\nmod sphere;\n\nmod utilities;\n\nmod vector;\n\nmod world;\n\n\n\npub use self::camera::*;\n\npub use self::hitable::*;\n\npub use self::ray::*;\n\npub use self::scattering::*;\n\npub use self::sphere::*;\n\npub use self::utilities::*;\n\npub use self::vector::*;\n\npub use self::world::*;\n\nuse rand::prelude::*;\n\nuse rayon::prelude::*;\n\n\n", "file_path": "path_tracing/src/lib.rs", "rank": 34, "score": 10.112696543071921 }, { "content": "use super::vector::*;\n\n\n\npub struct Ray {\n\n origin: Vec3,\n\n direction: Vec3,\n\n}\n\n\n\nimpl Ray {\n\n pub fn new(origin: Vec3, direction: Vec3) -> Ray {\n\n Ray { origin, direction }\n\n }\n\n\n\n pub fn point_at_parameter(&self, t: f64) -> Vec3 {\n\n &self.origin + (&self.direction * t)\n\n }\n\n\n\n pub fn origin(&self) -> &Vec3 {\n\n &self.origin\n\n }\n\n\n", "file_path": "path_tracing/src/ray.rs", "rank": 35, "score": 9.418847058035979 }, { "content": "use super::HitRecord;\n\nuse super::{dot, normalize, rand, random_in_unit_sphere, reflect, refract, schlick, Ray, Vec3};\n\n\n\n/// A Ray after scattering off a Hitable\n\npub struct ScatteredRay {\n\n pub ray: Ray,\n\n pub attenuation: Vec3,\n\n}\n\n\n\n/// Scattering determines how a ray behaves after hitting a Hitable\n", "file_path": "path_tracing/src/scattering.rs", "rank": 36, "score": 9.032928555866597 }, { "content": "use super::{cross, normalize, random_in_unit_disk, Ray, Vec3};\n\n\n\n#[derive(Debug)]\n\npub struct Camera {\n\n pub origin: Vec3,\n\n pub lower_left_corner: Vec3,\n\n pub horizontal: Vec3,\n\n pub vertical: Vec3,\n\n pub lens_radius: f64,\n\n pub u: Vec3,\n\n pub v: Vec3,\n\n}\n\n\n\nimpl Camera {\n\n pub fn new(\n\n origin: Vec3,\n\n target: Vec3,\n\n up: Vec3,\n\n fov: f64,\n\n aspect: f64,\n", "file_path": "path_tracing/src/camera.rs", "rank": 37, "score": 8.796144419996192 }, { "content": "use super::Scattering;\n\nuse super::{Ray, Vec3};\n\n\n\n///\n\n/// HitRecords store information about a ray intersection with a Hitable surface or volume.\n\n///\n\npub struct HitRecord<'a> {\n\n pub t: f64,\n\n pub p: Vec3,\n\n pub normal: Vec3,\n\n pub material: &'a dyn Scattering,\n\n}\n\n\n\nimpl<'a> HitRecord<'a> {\n\n pub fn normal(&self) -> &Vec3 {\n\n &self.normal\n\n }\n\n\n\n pub fn position(&self) -> &Vec3 {\n\n &self.p\n\n }\n\n}\n\n\n\n/// Hitable types can reflect rays for tracing\n", "file_path": "path_tracing/src/hitable.rs", "rank": 38, "score": 8.062358280247707 }, { "content": "extern crate image;\n\n\n\nuse image::RgbImage;\n\nuse path_tracing::{trace_scene, Camera, Dielectric, Lambertian, Metallic, Sphere, Vec3, World};\n\nuse rand::prelude::*;\n\n\n", "file_path": "path-tracer/src/main.rs", "rank": 39, "score": 7.656363163707072 }, { "content": " if (center - Vec3::new(4.0, 0.2, 0.0)).length() > 0.9 {\n\n if material_choice < 0.8 {\n\n world.push(Box::new(Sphere {\n\n center,\n\n radius: 0.2,\n\n material: Box::new(Lambertian {\n\n albedo: Vec3::new(rand() * rand(), rand() * rand(), rand() * rand()),\n\n }),\n\n }))\n\n } else if material_choice < 0.95 {\n\n world.push(Box::new(Sphere {\n\n center,\n\n radius: 0.2,\n\n material: Box::new(Metallic {\n\n albedo: Vec3::new(\n\n 0.5 * rand() * rand(),\n\n 0.5 * rand() * rand(),\n\n 0.5 * rand() * rand(),\n\n ),\n\n roughness: 0.5 * rand(),\n", "file_path": "path-tracer/src/main.rs", "rank": 41, "score": 6.881857253763036 }, { "content": "use super::hitable::*;\n\nuse super::Ray;\n\n\n\npub struct World(Vec<Box<dyn Hitable + Sync>>);\n\n\n\nimpl Hitable for World {\n\n fn hit(&self, ray: &Ray, t_min: f64, t_max: f64) -> Option<HitRecord> {\n\n let mut closest = t_max;\n\n let mut found: Option<HitRecord> = None;\n\n for hitable in &self.0 {\n\n if let Some(hit) = hitable.hit(ray, t_min, closest) {\n\n closest = hit.t;\n\n found = Some(hit);\n\n }\n\n }\n\n found\n\n }\n\n}\n\n\n\nimpl World {\n\n pub fn new() -> World {\n\n World(vec![])\n\n }\n\n\n\n pub fn push(&mut self, item: Box<dyn Hitable + Sync>) {\n\n self.0.push(item);\n\n }\n\n}\n", "file_path": "path_tracing/src/world.rs", "rank": 42, "score": 6.8700451465752295 }, { "content": " pub fn direction(&self) -> &Vec3 {\n\n &self.direction\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn point_along_ray() {\n\n let ray = Ray::new(Vec3::new(1.0, 1.0, 1.0), Vec3::new(1.0, 0.0, 0.0));\n\n assert_eq!(ray.point_at_parameter(0.5), Vec3::new(1.5, 1.0, 1.0));\n\n }\n\n}\n", "file_path": "path_tracing/src/ray.rs", "rank": 43, "score": 5.422548853995265 }, { "content": " })\n\n }\n\n}\n\n\n\npub struct NaiveDielectric {\n\n pub refractive_index: f64,\n\n}\n\n\n\nimpl Scattering for NaiveDielectric {\n\n fn scatter(&self, ray: &Ray, hit: &HitRecord) -> Option<ScatteredRay> {\n\n let attenuation = Vec3::new(1.0, 1.0, 1.0);\n\n\n\n let outward_normal: Vec3;\n\n let ni_over_nt: f64;\n\n if dot(ray.direction(), hit.normal()) > 0.0 {\n\n outward_normal = &Vec3::new(0.0, 0.0, 0.0) - hit.normal();\n\n ni_over_nt = self.refractive_index;\n\n } else {\n\n outward_normal = *hit.normal();\n\n ni_over_nt = 1.0 / self.refractive_index;\n", "file_path": "path_tracing/src/scattering.rs", "rank": 44, "score": 5.1207905435583285 }, { "content": "/// Metallic scattering reflects rays at a consistent angle\n\npub struct Metallic {\n\n pub albedo: Vec3,\n\n pub roughness: f64,\n\n}\n\n\n\nimpl Scattering for Metallic {\n\n fn scatter(&self, ray: &Ray, hit: &HitRecord) -> Option<ScatteredRay> {\n\n let reflected = reflect(&normalize(ray.direction()), &hit.normal);\n\n let scattered = Ray::new(hit.p, reflected + random_in_unit_sphere() * self.roughness);\n\n\n\n if dot(scattered.direction(), &hit.normal) > 0.0 {\n\n return Some(ScatteredRay {\n\n attenuation: self.albedo,\n\n ray: scattered,\n\n });\n\n }\n\n\n\n None\n\n }\n", "file_path": "path_tracing/src/scattering.rs", "rank": 45, "score": 5.097296180391748 }, { "content": " }),\n\n }));\n\n\n\n world.push(Box::new(Sphere {\n\n center: Vec3::new(-4.0, 1.0, 0.0),\n\n radius: 1.0,\n\n material: Box::new(Lambertian {\n\n albedo: Vec3::new(0.4, 0.2, 0.1),\n\n }),\n\n }));\n\n\n\n world.push(Box::new(Sphere {\n\n center: Vec3::new(4.0, 1.0, 0.0),\n\n radius: 1.0,\n\n material: Box::new(Metallic {\n\n albedo: Vec3::new(0.7, 0.6, 0.5),\n\n roughness: 0.0,\n\n }),\n\n }));\n\n\n\n world\n\n}\n", "file_path": "path-tracer/src/main.rs", "rank": 47, "score": 4.31960439930406 }, { "content": "}\n\n\n\n/// Dielectric materials refract light, like glass.\n\npub struct Dielectric {\n\n pub refractive_index: f64,\n\n}\n\n\n\nimpl Scattering for Dielectric {\n\n fn scatter(&self, ray: &Ray, hit: &HitRecord) -> Option<ScatteredRay> {\n\n let attenuation = Vec3::new(1.0, 1.0, 1.0);\n\n let outward_normal;\n\n let ni_over_nt;\n\n let cosine;\n\n\n\n if dot(ray.direction(), hit.normal()) > 0.0 {\n\n outward_normal = -hit.normal();\n\n ni_over_nt = self.refractive_index;\n\n let c = dot(ray.direction(), hit.normal()) / ray.direction().length();\n\n cosine = f64::sqrt(1.0 - self.refractive_index * self.refractive_index * (1.0 - c * c));\n\n } else {\n", "file_path": "path_tracing/src/scattering.rs", "rank": 50, "score": 4.000249609902504 }, { "content": " lens_radius: aperture / 2.0,\n\n u,\n\n v,\n\n }\n\n }\n\n\n\n pub fn make_ray(&self, u: f64, v: f64) -> Ray {\n\n let rd = random_in_unit_disk();\n\n let offset = (self.u * rd[0] + self.v * rd[1]) * self.lens_radius;\n\n let origin = self.origin + offset;\n\n Ray::new(\n\n origin,\n\n self.lower_left_corner + self.horizontal * u + self.vertical * v - origin,\n\n )\n\n }\n\n}\n", "file_path": "path_tracing/src/camera.rs", "rank": 51, "score": 3.5527598666830063 }, { "content": " }),\n\n }));\n\n } else {\n\n world.push(Box::new(Sphere {\n\n center,\n\n radius: 0.2,\n\n material: Box::new(Dielectric {\n\n refractive_index: 1.5,\n\n }),\n\n }));\n\n }\n\n }\n\n }\n\n }\n\n\n\n world.push(Box::new(Sphere {\n\n center: Vec3::new(0.0, 1.0, 0.0),\n\n radius: 1.0,\n\n material: Box::new(Dielectric {\n\n refractive_index: 1.5,\n", "file_path": "path-tracer/src/main.rs", "rank": 52, "score": 3.520107215077701 }, { "content": " }) // generate N normalized samples per pixel coordinate\n\n .map(|samples| {\n\n samples\n\n .iter()\n\n .map(|(u, v)| {\n\n let ray = camera.make_ray(*u, *v);\n\n return color(world, ray, 0);\n\n })\n\n .fold(Vec3::fill(0.0), |acc, x| acc + x)\n\n / num_samples as f64\n\n }) // accumulate colors for pixels\n\n .map(|linear| Vec3::new(linear.r().sqrt(), linear.g().sqrt(), linear.b().sqrt())) // gamma adjust\n\n .collect();\n\n\n\n colors\n\n}\n", "file_path": "path_tracing/src/lib.rs", "rank": 53, "score": 2.751309820732992 }, { "content": " aperture: f64,\n\n focus_dist: f64,\n\n ) -> Camera {\n\n let theta = fov * std::f64::consts::PI / 180.0;\n\n let half_height = f64::tan(theta / 2.0);\n\n let half_width = aspect * half_height;\n\n let w = normalize(origin - target);\n\n let u = normalize(cross(up, w));\n\n let v = cross(w, u);\n\n\n\n let lower_left_corner =\n\n origin - u * half_width * focus_dist - v * half_height * focus_dist - w * focus_dist;\n\n let horizontal = u * 2.0 * focus_dist * half_width;\n\n let vertical = v * 2.0 * focus_dist * half_height;\n\n\n\n Camera {\n\n origin,\n\n lower_left_corner,\n\n horizontal,\n\n vertical,\n", "file_path": "path_tracing/src/camera.rs", "rank": 54, "score": 2.144813155587931 }, { "content": "# Path Tracer\n\n\n\nA simple path tracer implemented in Rust following the C++ book [Ray Tracing in One Weekend](https://github.com/petershirley/raytracinginoneweekend). This is a learning project for both Rust and ray tracing.\n\n\n\nScene output (fewer pixels and more samples than the end-of-book code):\n\n![mapped-image](https://user-images.githubusercontent.com/81553/51091108-eac4c800-1753-11e9-8ed0-08aa7495bef9.png)\n\n\n\n## Building and running\n\n\n\nRun in release mode to avoid having your render take all day:\n\n`cargo run --release`\n", "file_path": "README.md", "rank": 55, "score": 1.0750047554801356 }, { "content": " outward_normal = *hit.normal();\n\n ni_over_nt = 1.0 / self.refractive_index;\n\n cosine = -dot(ray.direction(), hit.normal()) / ray.direction().length();\n\n }\n\n\n\n if let Some(refracted) = refract(ray.direction(), &outward_normal, ni_over_nt) {\n\n let reflect_prob = schlick(cosine, self.refractive_index);\n\n if rand() > reflect_prob {\n\n // refract\n\n return Some(ScatteredRay {\n\n ray: Ray::new(*hit.position(), refracted),\n\n attenuation,\n\n });\n\n }\n\n }\n\n\n\n let reflected = reflect(ray.direction(), hit.normal());\n\n Some(ScatteredRay {\n\n ray: Ray::new(*hit.position(), reflected),\n\n attenuation,\n", "file_path": "path_tracing/src/scattering.rs", "rank": 56, "score": 1.067581581361582 } ]
Rust
rust/xaynet-server/src/state_machine/phases/sum2.rs
little-dude/xain-fl
9c421c03bf1b98c7717593c0856fe856b6f338f7
use xaynet_core::{ mask::{Aggregation, MaskObject}, SumDict, SumParticipantPublicKey, }; use crate::state_machine::{ coordinator::MaskDict, phases::{Handler, Phase, PhaseName, PhaseState, Shared, StateError, Unmask}, requests::{StateMachineRequest, Sum2Request}, StateMachine, StateMachineError, }; #[cfg(feature = "metrics")] use crate::metrics; use tokio::time::{timeout, Duration}; #[derive(Debug)] pub struct Sum2 { sum_dict: SumDict, model_agg: Aggregation, scalar_agg: Aggregation, model_mask_dict: MaskDict, scalar_mask_dict: MaskDict, } #[cfg(test)] impl Sum2 { pub fn sum_dict(&self) -> &SumDict { &self.sum_dict } pub fn aggregation(&self) -> &Aggregation { &self.model_agg } pub fn mask_dict(&self) -> &MaskDict { &self.model_mask_dict } pub fn scalar_agg(&self) -> &Aggregation { &self.scalar_agg } pub fn scalar_mask_dict(&self) -> &MaskDict { &self.scalar_mask_dict } } #[async_trait] impl Phase for PhaseState<Sum2> where Self: Handler, { const NAME: PhaseName = PhaseName::Sum2; async fn run(&mut self) -> Result<(), StateError> { let min_time = self.shared.state.min_sum_time; debug!("in sum2 phase for a minimum of {} seconds", min_time); self.process_during(Duration::from_secs(min_time)).await?; let time_left = self.shared.state.max_sum_time - min_time; timeout(Duration::from_secs(time_left), self.process_until_enough()).await??; info!( "{} sum2 messages handled (min {} required)", self.mask_count(), self.shared.state.min_sum_count ); Ok(()) } fn next(self) -> Option<StateMachine> { Some( PhaseState::<Unmask>::new( self.shared, self.inner.model_agg, self.inner.scalar_agg, self.inner.model_mask_dict, self.inner.scalar_mask_dict, ) .into(), ) } } impl PhaseState<Sum2> where Self: Handler + Phase, { async fn process_until_enough(&mut self) -> Result<(), StateError> { while !self.has_enough_sum2s() { debug!( "{} sum2 messages handled (min {} required)", self.mask_count(), self.shared.state.min_sum_count ); self.process_single().await?; } Ok(()) } } impl Handler for PhaseState<Sum2> { fn handle_request(&mut self, req: StateMachineRequest) -> Result<(), StateMachineError> { match req { StateMachineRequest::Sum2(sum2_req) => { metrics!( self.shared.io.metrics_tx, metrics::message::sum2::increment(self.shared.state.round_id, Self::NAME) ); self.handle_sum2(sum2_req) } _ => Err(StateMachineError::MessageRejected), } } } impl PhaseState<Sum2> { pub fn new( shared: Shared, sum_dict: SumDict, model_agg: Aggregation, scalar_agg: Aggregation, ) -> Self { info!("state transition"); Self { inner: Sum2 { sum_dict, model_agg, scalar_agg, model_mask_dict: MaskDict::new(), scalar_mask_dict: MaskDict::new(), }, shared, } } fn handle_sum2(&mut self, req: Sum2Request) -> Result<(), StateMachineError> { let Sum2Request { participant_pk, model_mask, scalar_mask, } = req; self.add_mask(&participant_pk, model_mask, scalar_mask) } fn add_mask( &mut self, pk: &SumParticipantPublicKey, model_mask: MaskObject, scalar_mask: MaskObject, ) -> Result<(), StateMachineError> { if self.inner.sum_dict.remove(pk).is_none() { return Err(StateMachineError::MessageRejected); } if let Some(count) = self.inner.model_mask_dict.get_mut(&model_mask) { *count += 1; } else { self.inner.model_mask_dict.insert(model_mask, 1); } if let Some(count) = self.inner.scalar_mask_dict.get_mut(&scalar_mask) { *count += 1; } else { self.inner.scalar_mask_dict.insert(scalar_mask, 1); } Ok(()) } fn mask_count(&self) -> usize { let sum1 = self.inner.model_mask_dict.values().sum(); let sum2: usize = self.inner.scalar_mask_dict.values().sum(); if sum1 != sum2 { warn!( "unexpected difference in mask sum count: {} vs {}", sum1, sum2 ); } sum1 } fn has_enough_sum2s(&self) -> bool { self.mask_count() >= self.shared.state.min_sum_count } } #[cfg(test)] mod test { use super::*; use crate::state_machine::{ events::Event, tests::{builder::StateMachineBuilder, utils}, }; use xaynet_core::{ common::RoundSeed, crypto::{ByteObject, EncryptKeyPair}, mask::{FromPrimitives, Model}, SumDict, }; #[tokio::test] pub async fn sum2_to_unmask() { let n_updaters = 1; let n_summers = 1; let seed = RoundSeed::generate(); let sum_ratio = 0.5; let update_ratio = 1.0; let coord_keys = EncryptKeyPair::generate(); let model_size = 4; let mut summer = utils::generate_summer(&seed, sum_ratio, update_ratio); let ephm_pk = utils::ephm_pk(&summer.compose_sum_message(coord_keys.public)); let mut sum_dict = SumDict::new(); sum_dict.insert(summer.pk, ephm_pk); let updater = utils::generate_updater(&seed, sum_ratio, update_ratio); let scalar = 1.0 / (n_updaters as f64 * update_ratio); let model = Model::from_primitives(vec![0; model_size].into_iter()).unwrap(); let msg = updater.compose_update_message(coord_keys.public, &sum_dict, scalar, model.clone()); let masked_model = utils::masked_model(&msg); let masked_scalar = utils::masked_scalar(&msg); let local_seed_dict = utils::local_seed_dict(&msg); let mut aggregation = Aggregation::new(utils::mask_settings().into(), model_size); aggregation.aggregate(masked_model.clone()); let mut scalar_agg = Aggregation::new(utils::mask_settings().into(), 1); scalar_agg.aggregate(masked_scalar.clone()); let sum2 = Sum2 { sum_dict, model_agg: aggregation, scalar_agg, model_mask_dict: MaskDict::new(), scalar_mask_dict: MaskDict::new(), }; let (state_machine, request_tx, events) = StateMachineBuilder::new() .with_seed(seed.clone()) .with_phase(sum2) .with_sum_ratio(sum_ratio) .with_update_ratio(update_ratio) .with_min_sum(n_summers) .with_min_update(n_updaters) .with_mask_config(utils::mask_settings().into()) .build(); assert!(state_machine.is_sum2()); let msg = summer .compose_sum2_message(coord_keys.public, &local_seed_dict, masked_model.data.len()) .unwrap(); let req = async { request_tx.msg(&msg).await.unwrap() }; let transition = async { state_machine.next().await.unwrap() }; let ((), state_machine) = tokio::join!(req, transition); assert!(state_machine.is_unmask()); let PhaseState { inner: unmask_state, .. } = state_machine.into_unmask_phase_state(); assert_eq!(unmask_state.mask_dict().len(), 1); let (mask, count) = unmask_state.mask_dict().iter().next().unwrap().clone(); assert_eq!(*count, 1); let unmasked_model = unmask_state .aggregation() .unwrap() .clone() .unmask(mask.clone()); assert_eq!(unmasked_model, model); assert_eq!( events.phase_listener().get_latest(), Event { round_id: 0, event: PhaseName::Sum2, } ); } }
use xaynet_core::{ mask::{Aggregation, MaskObject}, SumDict, SumParticipantPublicKey, }; use crate::state_machine::{ coordinator::MaskDict, phases::{Handler, Phase, PhaseName, PhaseState, Shared, StateError, Unmask}, requests::{StateMachineRequest, Sum2Request}, StateMachine, StateMachineError, }; #[cfg(feature = "metrics")] use crate::metrics; use tokio::time::{timeout, Duration}; #[derive(Debug)] pub struct Sum2 { sum_dict: SumDict, model_agg: Aggregation, scalar_agg: Aggregation, model_mask_dict: MaskDict, scalar_mask_dict: MaskDict, } #[cfg(test)] impl Sum2 { pub fn sum_dict(&self) -> &SumDict { &self.sum_dict } pub fn aggregation(&self) -> &Aggregation { &self.model_agg } pub fn mask_dict(&self) -> &MaskDict { &self.model_mask_dict } pub fn scalar_agg(&self) -> &Aggregation { &self.scalar_agg } pub fn scalar_mask_dict(&self) -> &MaskDict { &self.scalar_mask_dict } } #[async_trait] impl Phase for PhaseState<Sum2> where Self: Handler, { const NAME: PhaseName = PhaseName::Sum2; async fn run(&mut self) -> Result<(), StateError> { let min_time = self.shared.state.min_sum_time; debug!("in sum2 phase for a minimum of {} seconds", min_time); self.process_during(Duration::from_secs(min_time)).await?; let time_left = self.shared.state.max_sum_time - min_time; timeout(Duration::from_secs(time_left), self.process_until_enough()).await??; info!( "{} sum2 messages handled (min {} required)", self.mask_count(), self.shared.state.min_sum_count ); Ok(()) } fn next(self) -> Option<StateMachine> { Some( PhaseState::<Unmask>::new( self.shared, self.inner.model_agg, self.inner.scalar_agg, self.inner.model_mask_dict, self.inner.scalar_mask_dict, ) .into(), ) } } impl PhaseState<Sum2> where Self: Handler + Phase, { async fn process_until_enough(&mut self) -> Result<(), StateError> { while !self.has_enough_sum2s() { debug!( "{} sum2 messages handled (min {} required)", self.mask_count(), self.shared.state.min_sum_count ); self.process_single().await?; } Ok(()) } } impl Handler for PhaseState<Sum2> { fn handle_request(&mut self, req: StateMachineRequest) -> Result<(), StateMachineError> {
} } impl PhaseState<Sum2> { pub fn new( shared: Shared, sum_dict: SumDict, model_agg: Aggregation, scalar_agg: Aggregation, ) -> Self { info!("state transition"); Self { inner: Sum2 { sum_dict, model_agg, scalar_agg, model_mask_dict: MaskDict::new(), scalar_mask_dict: MaskDict::new(), }, shared, } } fn handle_sum2(&mut self, req: Sum2Request) -> Result<(), StateMachineError> { let Sum2Request { participant_pk, model_mask, scalar_mask, } = req; self.add_mask(&participant_pk, model_mask, scalar_mask) } fn add_mask( &mut self, pk: &SumParticipantPublicKey, model_mask: MaskObject, scalar_mask: MaskObject, ) -> Result<(), StateMachineError> { if self.inner.sum_dict.remove(pk).is_none() { return Err(StateMachineError::MessageRejected); } if let Some(count) = self.inner.model_mask_dict.get_mut(&model_mask) { *count += 1; } else { self.inner.model_mask_dict.insert(model_mask, 1); } if let Some(count) = self.inner.scalar_mask_dict.get_mut(&scalar_mask) { *count += 1; } else { self.inner.scalar_mask_dict.insert(scalar_mask, 1); } Ok(()) } fn mask_count(&self) -> usize { let sum1 = self.inner.model_mask_dict.values().sum(); let sum2: usize = self.inner.scalar_mask_dict.values().sum(); if sum1 != sum2 { warn!( "unexpected difference in mask sum count: {} vs {}", sum1, sum2 ); } sum1 } fn has_enough_sum2s(&self) -> bool { self.mask_count() >= self.shared.state.min_sum_count } } #[cfg(test)] mod test { use super::*; use crate::state_machine::{ events::Event, tests::{builder::StateMachineBuilder, utils}, }; use xaynet_core::{ common::RoundSeed, crypto::{ByteObject, EncryptKeyPair}, mask::{FromPrimitives, Model}, SumDict, }; #[tokio::test] pub async fn sum2_to_unmask() { let n_updaters = 1; let n_summers = 1; let seed = RoundSeed::generate(); let sum_ratio = 0.5; let update_ratio = 1.0; let coord_keys = EncryptKeyPair::generate(); let model_size = 4; let mut summer = utils::generate_summer(&seed, sum_ratio, update_ratio); let ephm_pk = utils::ephm_pk(&summer.compose_sum_message(coord_keys.public)); let mut sum_dict = SumDict::new(); sum_dict.insert(summer.pk, ephm_pk); let updater = utils::generate_updater(&seed, sum_ratio, update_ratio); let scalar = 1.0 / (n_updaters as f64 * update_ratio); let model = Model::from_primitives(vec![0; model_size].into_iter()).unwrap(); let msg = updater.compose_update_message(coord_keys.public, &sum_dict, scalar, model.clone()); let masked_model = utils::masked_model(&msg); let masked_scalar = utils::masked_scalar(&msg); let local_seed_dict = utils::local_seed_dict(&msg); let mut aggregation = Aggregation::new(utils::mask_settings().into(), model_size); aggregation.aggregate(masked_model.clone()); let mut scalar_agg = Aggregation::new(utils::mask_settings().into(), 1); scalar_agg.aggregate(masked_scalar.clone()); let sum2 = Sum2 { sum_dict, model_agg: aggregation, scalar_agg, model_mask_dict: MaskDict::new(), scalar_mask_dict: MaskDict::new(), }; let (state_machine, request_tx, events) = StateMachineBuilder::new() .with_seed(seed.clone()) .with_phase(sum2) .with_sum_ratio(sum_ratio) .with_update_ratio(update_ratio) .with_min_sum(n_summers) .with_min_update(n_updaters) .with_mask_config(utils::mask_settings().into()) .build(); assert!(state_machine.is_sum2()); let msg = summer .compose_sum2_message(coord_keys.public, &local_seed_dict, masked_model.data.len()) .unwrap(); let req = async { request_tx.msg(&msg).await.unwrap() }; let transition = async { state_machine.next().await.unwrap() }; let ((), state_machine) = tokio::join!(req, transition); assert!(state_machine.is_unmask()); let PhaseState { inner: unmask_state, .. } = state_machine.into_unmask_phase_state(); assert_eq!(unmask_state.mask_dict().len(), 1); let (mask, count) = unmask_state.mask_dict().iter().next().unwrap().clone(); assert_eq!(*count, 1); let unmasked_model = unmask_state .aggregation() .unwrap() .clone() .unmask(mask.clone()); assert_eq!(unmasked_model, model); assert_eq!( events.phase_listener().get_latest(), Event { round_id: 0, event: PhaseName::Sum2, } ); } }
match req { StateMachineRequest::Sum2(sum2_req) => { metrics!( self.shared.io.metrics_tx, metrics::message::sum2::increment(self.shared.state.round_id, Self::NAME) ); self.handle_sum2(sum2_req) } _ => Err(StateMachineError::MessageRejected), }
if_condition
[ { "content": "/// A trait that must be implemented by a state to handle a request.\n\npub trait Handler {\n\n /// Handles a request.\n\n fn handle_request(&mut self, req: StateMachineRequest) -> Result<(), StateMachineError>;\n\n}\n\n\n\n/// I/O interfaces.\n\n#[derive(Debug)]\n\npub struct IO {\n\n /// The request receiver half.\n\n pub(in crate::state_machine) request_rx: RequestReceiver,\n\n /// The event publisher.\n\n pub(in crate::state_machine) events: EventPublisher,\n\n #[cfg(feature = \"metrics\")]\n\n /// The metrics sender half.\n\n pub(in crate::state_machine) metrics_tx: MetricsSender,\n\n}\n\n\n\n/// A struct that contains the coordinator state and the I/O interfaces that is shared and\n\n/// accessible by all `PhaseState`s.\n\n#[derive(Debug)]\n", "file_path": "rust/xaynet-server/src/state_machine/phases/mod.rs", "rank": 0, "score": 155457.945200643 }, { "content": "/// Sign and encrypt the given message using the given round\n\n/// parameters and particpant keys.\n\npub fn encrypt_message(\n\n message: &Message,\n\n round_params: &RoundParameters,\n\n participant_signing_keys: &SigningKeyPair,\n\n) -> Vec<u8> {\n\n let serialized = serialize_message(message, participant_signing_keys);\n\n round_params.pk.encrypt(&serialized[..])\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/services/tests/utils.rs", "rank": 1, "score": 153116.789527887 }, { "content": "fn ready_ok<T, E>(t: T) -> Ready<Result<T, E>> {\n\n future::ready(Ok(t))\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/services/messages/multipart/service.rs", "rank": 2, "score": 150606.40512834582 }, { "content": "struct PhaseFilterLayer {\n\n phase: EventListener<PhaseName>,\n\n}\n\n\n\nimpl<S> Layer<S> for PhaseFilterLayer {\n\n type Service = PhaseFilter<S>;\n\n\n\n fn layer(&self, service: S) -> PhaseFilter<S> {\n\n PhaseFilter {\n\n phase: self.phase.clone(),\n\n next_svc: service,\n\n }\n\n }\n\n}\n\n\n\n/// A service for verifying the signature of PET messages\n\n///\n\n/// Since this is a CPU-intensive task for large messages, this\n\n/// service offloads the processing to a `rayon` thread-pool to avoid\n\n/// overloading the tokio thread-pool with blocking tasks.\n", "file_path": "rust/xaynet-server/src/services/messages/message_parser.rs", "rank": 3, "score": 144949.2402333724 }, { "content": "/// Converts a PET message handler into a `warp` filter.\n\nfn with_message_handler(\n\n handler: PetMessageHandler,\n\n) -> impl Filter<Extract = (PetMessageHandler,), Error = Infallible> + Clone {\n\n warp::any().map(move || handler.clone())\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/rest.rs", "rank": 4, "score": 144683.92454128535 }, { "content": "/// Extract the masked scalar from an update message\n\n///\n\n/// # Panic\n\n///\n\n/// Panic if this message is not an update message\n\npub fn masked_scalar(msg: &Message) -> MaskObject {\n\n if let Payload::Update(Update { masked_scalar, .. }) = &msg.payload {\n\n masked_scalar.clone()\n\n } else {\n\n panic!(\"not an update message\");\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/state_machine/tests/utils.rs", "rank": 5, "score": 143853.9365477831 }, { "content": "/// Extract the masked model from an update message\n\n///\n\n/// # Panic\n\n///\n\n/// Panic if this message is not an update message\n\npub fn masked_model(msg: &Message) -> MaskObject {\n\n if let Payload::Update(Update { masked_model, .. }) = &msg.payload {\n\n masked_model.clone()\n\n } else {\n\n panic!(\"not an update message\");\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/state_machine/tests/utils.rs", "rank": 6, "score": 143853.9365477831 }, { "content": "#[derive(Debug, Clone)]\n\nstruct PhaseFilter<S> {\n\n /// A listener to retrieve the current phase\n\n phase: EventListener<PhaseName>,\n\n /// Next service to be called\n\n next_svc: S,\n\n}\n\n\n\nimpl<T, S> Service<RawMessage<T>> for PhaseFilter<S>\n\nwhere\n\n T: AsRef<[u8]> + Send + 'static,\n\n S: Service<RawMessage<T>, Response = Message, Error = ServiceError>,\n\n S::Future: Sync + Send + 'static,\n\n{\n\n type Response = Message;\n\n type Error = ServiceError;\n\n type Future = BoxedServiceFuture<Self::Response, Self::Error>;\n\n\n\n fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n self.next_svc.poll_ready(cx)\n\n }\n", "file_path": "rust/xaynet-server/src/services/messages/message_parser.rs", "rank": 7, "score": 141329.50250114527 }, { "content": "pub fn init_shared() -> (Shared, EventSubscriber, RequestSender) {\n\n let coordinator_state =\n\n CoordinatorState::new(pet_settings(), mask_settings(), model_settings());\n\n\n\n let (event_publisher, event_subscriber) = EventPublisher::init(\n\n coordinator_state.round_id,\n\n coordinator_state.keys.clone(),\n\n coordinator_state.round_params.clone(),\n\n PhaseName::Idle,\n\n );\n\n\n\n let (request_rx, request_tx) = RequestReceiver::new();\n\n (\n\n Shared::new(\n\n coordinator_state,\n\n event_publisher,\n\n request_rx,\n\n #[cfg(feature = \"metrics\")]\n\n MetricsSender(),\n\n ),\n\n event_subscriber,\n\n request_tx,\n\n )\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/state_machine/tests/utils.rs", "rank": 8, "score": 139892.51927730482 }, { "content": "#[proc_macro]\n\npub fn metrics(input: TokenStream) -> TokenStream {\n\n let Send { sender, metrics } = parse_macro_input!(input as Send);\n\n\n\n TokenStream::from(quote! {\n\n #[cfg(feature = \"metrics\")]\n\n {\n\n #(#sender.send(#metrics);)*\n\n }\n\n })\n\n}\n", "file_path": "rust/xaynet-macros/src/lib.rs", "rank": 9, "score": 136986.19659830406 }, { "content": "/// Checks validity of phase time ranges.\n\nfn validate_phase_times(s: &PetSettings) -> Result<(), ValidationError> {\n\n if s.min_sum_time <= s.max_sum_time && s.min_update_time <= s.max_update_time {\n\n Ok(())\n\n } else {\n\n Err(ValidationError::new(\"invalid phase time range(s)\"))\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/settings.rs", "rank": 10, "score": 134902.0208589152 }, { "content": "pub fn serialize_message(message: &Message, participant_signing_keys: &SigningKeyPair) -> Vec<u8> {\n\n let mut buf = vec![0; message.buffer_length()];\n\n message.to_bytes(&mut buf, &participant_signing_keys.secret);\n\n buf\n\n}\n", "file_path": "rust/xaynet-server/src/services/tests/utils.rs", "rank": 11, "score": 130331.62279160626 }, { "content": "/// Simulate a participant generating keys and crafting a valid sum\n\n/// message for the given round parameters. The keys generated by the\n\n/// participants are returned along with the message.\n\npub fn new_sum_message(round_params: &RoundParameters) -> (Message, SigningKeyPair) {\n\n let signing_keys = SigningKeyPair::generate();\n\n let sum = Sum {\n\n sum_signature: signing_keys\n\n .secret\n\n .sign_detached(&[round_params.seed.as_slice(), b\"sum\"].concat()),\n\n ephm_pk: PublicEncryptKey::generate(),\n\n };\n\n let message = Message::new_sum(signing_keys.public.clone(), round_params.pk, sum);\n\n (message, signing_keys)\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/services/tests/utils.rs", "rank": 12, "score": 130156.98804852206 }, { "content": "#[async_trait]\n\npub trait Phase {\n\n /// Name of the current phase\n\n const NAME: PhaseName;\n\n\n\n /// Run this phase to completion\n\n async fn run(&mut self) -> Result<(), StateError>;\n\n\n\n /// Moves from this state to the next state.\n\n fn next(self) -> Option<StateMachine>;\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/state_machine/phases/mod.rs", "rank": 13, "score": 127826.23657973898 }, { "content": "/// Extract the local seed dictioanry from an update message\n\n///\n\n/// # Panic\n\n///\n\n/// Panic if this message is not an update message\n\npub fn local_seed_dict(msg: &Message) -> LocalSeedDict {\n\n if let Payload::Update(Update {\n\n local_seed_dict, ..\n\n }) = &msg.payload\n\n {\n\n local_seed_dict.clone()\n\n } else {\n\n panic!(\"not an update message\");\n\n }\n\n}\n", "file_path": "rust/xaynet-server/src/state_machine/tests/utils.rs", "rank": 14, "score": 125271.07511096215 }, { "content": "// // override the old state with the new one\n\n// db.save(\"client_state\", serialized_client);\n\n// }\n\nfn main() -> Result<(), ()> {\n\n let opt = Opt::from_args();\n\n\n\n let _fmt_subscriber = FmtSubscriber::builder()\n\n .with_env_filter(EnvFilter::from_default_env())\n\n .with_ansi(true)\n\n .init();\n\n\n\n // create a new client\n\n let client = MobileClient::init(&opt.url, get_participant_settings()).unwrap();\n\n // serialize the current client state (and save it on the phone)\n\n let mut bytes = client.serialize();\n\n\n\n // simulate the regular execution of perform_task on the phone\n\n loop {\n\n // load local model\n\n let model = Model::from_primitives(vec![1; opt.len as usize].into_iter()).unwrap();\n\n bytes = perform_task(&opt.url, &bytes, model);\n\n pause();\n\n }\n\n}\n\n\n", "file_path": "rust/examples/mobile-client.rs", "rank": 15, "score": 124678.2976125742 }, { "content": "/// Extract the ephemeral public key from a sum message.\n\n///\n\n/// # Panic\n\n///\n\n/// Panic if this message is not a sum message\n\npub fn ephm_pk(msg: &Message) -> SumParticipantEphemeralPublicKey {\n\n if let Payload::Sum(Sum { ephm_pk, .. }) = &msg.payload {\n\n *ephm_pk\n\n } else {\n\n panic!(\"not a sum message\");\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/state_machine/tests/utils.rs", "rank": 16, "score": 123405.62489639211 }, { "content": "/// Construct a [`Fetcher`] service\n\npub fn fetcher(event_subscriber: &EventSubscriber) -> impl Fetcher + Sync + Send + Clone + 'static {\n\n let round_params = ServiceBuilder::new()\n\n .buffer(100)\n\n .concurrency_limit(100)\n\n .layer(FetcherLayer)\n\n .service(RoundParamsService::new(event_subscriber));\n\n\n\n let mask_length = ServiceBuilder::new()\n\n .buffer(100)\n\n .concurrency_limit(100)\n\n .layer(FetcherLayer)\n\n .service(MaskLengthService::new(event_subscriber));\n\n\n\n let model = ServiceBuilder::new()\n\n .buffer(100)\n\n .concurrency_limit(100)\n\n .layer(FetcherLayer)\n\n .service(ModelService::new(event_subscriber));\n\n\n\n let sum_dict = ServiceBuilder::new()\n", "file_path": "rust/xaynet-server/src/services/fetchers/mod.rs", "rank": 17, "score": 122906.9572437598 }, { "content": "fn deserialize_redis_url<'de, D>(deserializer: D) -> Result<ConnectionInfo, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n struct ConnectionInfoVisitor;\n\n\n\n impl<'de> Visitor<'de> for ConnectionInfoVisitor {\n\n type Value = ConnectionInfo;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n write!(\n\n formatter,\n\n \"redis://[<username>][:<passwd>@]<hostname>[:port][/<db>]\"\n\n )\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n", "file_path": "rust/xaynet-server/src/settings.rs", "rank": 18, "score": 118836.63523371481 }, { "content": "fn ready_err<T, E>(e: E) -> Ready<Result<T, E>> {\n\n future::ready(Err(e))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::iter;\n\n\n\n use tokio_test::assert_ready;\n\n use tower_test::mock::Spawn;\n\n use xaynet_core::crypto::{ByteObject, PublicEncryptKey, Signature};\n\n\n\n use super::*;\n\n\n\n fn spawn_svc() -> Spawn<MultipartHandler> {\n\n Spawn::new(MultipartHandler::new())\n\n }\n\n\n\n fn sum() -> (Vec<u8>, Sum) {\n\n let mut start_byte: u8 = 0xff;\n", "file_path": "rust/xaynet-server/src/services/messages/multipart/service.rs", "rank": 19, "score": 117589.7658757795 }, { "content": "pub fn enable_logging() {\n\n let _fmt_subscriber = FmtSubscriber::builder()\n\n .with_env_filter(EnvFilter::from_default_env())\n\n .with_ansi(true)\n\n .init();\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/state_machine/tests/utils.rs", "rank": 20, "score": 114629.89978491726 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Parser;\n\n\n\nimpl<T> Service<RawMessage<T>> for Parser\n\nwhere\n\n T: AsRef<[u8]> + Send + 'static,\n\n{\n\n type Response = Message;\n\n type Error = ServiceError;\n\n type Future = future::Ready<Result<Self::Response, Self::Error>>;\n\n\n\n fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n Poll::Ready(Ok(()))\n\n }\n\n\n\n fn call(&mut self, req: RawMessage<T>) -> Self::Future {\n\n let bytes = req.buffer.inner();\n\n future::ready(Message::from_bytes(&bytes).map_err(ServiceError::Parsing))\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/services/messages/message_parser.rs", "rank": 21, "score": 111225.35643297227 }, { "content": "/// A type that hold a un-parsed message\n\nstruct RawMessage<T> {\n\n /// The buffer that contains the message to parse\n\n buffer: Arc<MessageBuffer<T>>,\n\n}\n\n\n\nimpl<T> Clone for RawMessage<T> {\n\n fn clone(&self) -> Self {\n\n Self {\n\n buffer: self.buffer.clone(),\n\n }\n\n }\n\n}\n\n\n\nimpl<T> From<MessageBuffer<T>> for RawMessage<T> {\n\n fn from(buffer: MessageBuffer<T>) -> Self {\n\n RawMessage {\n\n buffer: Arc::new(buffer),\n\n }\n\n }\n\n}\n\n\n\n/// A service that wraps a buffer `T` representing a message into a\n\n/// [`RawMessage<T>`]\n", "file_path": "rust/xaynet-server/src/services/messages/message_parser.rs", "rank": 22, "score": 109517.84129882575 }, { "content": "struct BufferWrapperLayer;\n\n\n\nimpl<S> Layer<S> for BufferWrapperLayer {\n\n type Service = BufferWrapper<S>;\n\n\n\n fn layer(&self, service: S) -> BufferWrapper<S> {\n\n BufferWrapper(service)\n\n }\n\n}\n\n\n\n/// A service that discards messages that are not expected in the current phase\n", "file_path": "rust/xaynet-server/src/services/messages/message_parser.rs", "rank": 23, "score": 108128.86872254437 }, { "content": "struct SignatureVerifierLayer {\n\n thread_pool: Arc<ThreadPool>,\n\n}\n\n\n\nimpl<S> Layer<S> for SignatureVerifierLayer {\n\n type Service = ConcurrencyLimit<SignatureVerifier<S>>;\n\n\n\n fn layer(&self, service: S) -> Self::Service {\n\n let limit = self.thread_pool.current_num_threads();\n\n // FIXME: we actually want to limit the concurrency of just\n\n // the SignatureVerifier middleware. Right now we're limiting\n\n // the whole stack of services.\n\n ConcurrencyLimit::new(\n\n SignatureVerifier {\n\n thread_pool: self.thread_pool.clone(),\n\n next_svc: service,\n\n },\n\n limit,\n\n )\n\n }\n\n}\n\n\n\n/// A service that verifies the coordinator public key embedded in PET\n\n/// messsages\n", "file_path": "rust/xaynet-server/src/services/messages/message_parser.rs", "rank": 24, "score": 108128.86872254437 }, { "content": "pub fn pet_settings() -> PetSettings {\n\n PetSettings {\n\n sum: 0.4,\n\n update: 0.5,\n\n min_sum_count: 1,\n\n min_update_count: 3,\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/state_machine/tests/utils.rs", "rank": 25, "score": 106265.91434996424 }, { "content": "pub fn mask_settings() -> MaskSettings {\n\n MaskSettings {\n\n group_type: GroupType::Prime,\n\n data_type: DataType::F32,\n\n bound_type: BoundType::B0,\n\n model_type: ModelType::M3,\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/state_machine/tests/utils.rs", "rank": 26, "score": 106265.91434996424 }, { "content": "pub fn model_settings() -> ModelSettings {\n\n ModelSettings { size: 1 }\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/state_machine/tests/utils.rs", "rank": 27, "score": 106265.91434996424 }, { "content": "struct CoordinatorPublicKeyValidatorLayer {\n\n keys: EventListener<EncryptKeyPair>,\n\n}\n\n\n\nimpl<S> Layer<S> for CoordinatorPublicKeyValidatorLayer {\n\n type Service = CoordinatorPublicKeyValidator<S>;\n\n\n\n fn layer(&self, service: S) -> CoordinatorPublicKeyValidator<S> {\n\n CoordinatorPublicKeyValidator {\n\n keys: self.keys.clone(),\n\n next_svc: service,\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/services/messages/message_parser.rs", "rank": 28, "score": 105259.2320665955 }, { "content": "#[derive(Debug, Clone)]\n\nstruct SignatureVerifier<S> {\n\n /// Thread-pool the CPU-intensive tasks are offloaded to.\n\n thread_pool: Arc<ThreadPool>,\n\n /// The service to be called after the [`SignatureVerifier`]\n\n next_svc: S,\n\n}\n\n\n\nimpl<T, S> Service<RawMessage<T>> for SignatureVerifier<S>\n\nwhere\n\n T: AsRef<[u8]> + Sync + Send + 'static,\n\n S: Service<RawMessage<T>, Response = Message, Error = ServiceError>\n\n + Clone\n\n + Sync\n\n + Send\n\n + 'static,\n\n S::Future: Sync + Send + 'static,\n\n{\n\n type Response = Message;\n\n type Error = ServiceError;\n\n type Future = BoxedServiceFuture<Self::Response, Self::Error>;\n", "file_path": "rust/xaynet-server/src/services/messages/message_parser.rs", "rank": 29, "score": 104509.13099031724 }, { "content": "/// An interface to convert a collection of primitive values into an iterator of numerical values.\n\n///\n\n/// This trait is used to convert primitive types ([`f32`], [`f64`], [`i32`], [`i64`]) into a\n\n/// [`Model`], which has its own internal representation of the weights. The opposite trait is\n\n/// [`IntoPrimitives`].\n\npub trait FromPrimitives<P: Debug>: Sized {\n\n /// Creates an iterator from primitive values that yields converted numerical values.\n\n ///\n\n /// # Errors\n\n /// Yields an error for the first encountered primitive value that can't be converted into a\n\n /// numerical value due to not being finite.\n\n fn from_primitives<I: Iterator<Item = P>>(iter: I) -> Result<Self, PrimitiveCastError<P>>;\n\n\n\n /// Creates an iterator from primitive values that yields converted numerical values.\n\n ///\n\n /// If a primitive value cannot be directly converted into a numerical value due to not being\n\n /// finite, it is clamped.\n\n fn from_primitives_bounded<I: Iterator<Item = P>>(iter: I) -> Self;\n\n}\n\n\n\nimpl IntoPrimitives<i32> for Model {\n\n fn into_primitives(self) -> Box<dyn Iterator<Item = Result<i32, ModelCastError>>> {\n\n Box::new(self.0.into_iter().map(|i| {\n\n i.to_integer().to_i32().ok_or_else(|| ModelCastError {\n\n weight: i,\n", "file_path": "rust/xaynet-core/src/mask/model.rs", "rank": 30, "score": 103311.60104905226 }, { "content": "#[derive(Debug, Clone)]\n\nstruct CoordinatorPublicKeyValidator<S> {\n\n /// A listener to retrieve the latest coordinator keys\n\n keys: EventListener<EncryptKeyPair>,\n\n /// Next service to be called\n\n next_svc: S,\n\n}\n\n\n\nimpl<T, S> Service<RawMessage<T>> for CoordinatorPublicKeyValidator<S>\n\nwhere\n\n T: AsRef<[u8]> + Send + 'static,\n\n S: Service<RawMessage<T>, Response = Message, Error = ServiceError>,\n\n S::Future: Sync + Send + 'static,\n\n{\n\n type Response = Message;\n\n type Error = ServiceError;\n\n type Future = BoxedServiceFuture<Self::Response, Self::Error>;\n\n\n\n fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n self.next_svc.poll_ready(cx)\n\n }\n", "file_path": "rust/xaynet-server/src/services/messages/message_parser.rs", "rank": 31, "score": 101639.49433436838 }, { "content": "/// Checks PET settings.\n\nfn validate_pet(s: &PetSettings) -> Result<(), ValidationError> {\n\n validate_phase_times(s)?;\n\n validate_fractions(s)\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/settings.rs", "rank": 32, "score": 101178.89867685089 }, { "content": "/// Checks pathological cases of deadlocks.\n\nfn validate_fractions(s: &PetSettings) -> Result<(), ValidationError> {\n\n if 0. < s.sum\n\n && s.sum < 1.\n\n && 0. < s.update\n\n && s.update < 1.\n\n && 0. < s.sum + s.update - s.sum * s.update\n\n && s.sum + s.update - s.sum * s.update < 1.\n\n {\n\n Ok(())\n\n } else {\n\n Err(ValidationError::new(\"starvation\"))\n\n }\n\n}\n\n\n\n#[derive(Debug, Validate, Deserialize, Clone, Copy)]\n\n/// REST API settings.\n\npub struct ApiSettings {\n\n /// The address to which the REST API should be bound.\n\n ///\n\n /// # Examples\n", "file_path": "rust/xaynet-server/src/settings.rs", "rank": 33, "score": 101178.89867685089 }, { "content": "#[derive(Clone)]\n\nstruct RawDecryptor {\n\n /// A listener to retrieve the latest coordinator keys. These are\n\n /// necessary for decrypting messages and verifying their\n\n /// signature.\n\n keys_events: EventListener<EncryptKeyPair>,\n\n\n\n /// Thread-pool the CPU-intensive tasks are offloaded to.\n\n thread_pool: Arc<ThreadPool>,\n\n}\n\n\n\nimpl<T> Service<T> for RawDecryptor\n\nwhere\n\n T: AsRef<[u8]> + Sync + Send + 'static,\n\n{\n\n type Response = Vec<u8>;\n\n type Error = ServiceError;\n\n #[allow(clippy::type_complexity)]\n\n type Future =\n\n Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>> + 'static + Send + Sync>>;\n\n\n", "file_path": "rust/xaynet-server/src/services/messages/decryptor.rs", "rank": 34, "score": 101017.72547648224 }, { "content": "#[derive(Debug, Clone)]\n\nstruct BufferWrapper<S>(S);\n\n\n\nimpl<S, T> Service<T> for BufferWrapper<S>\n\nwhere\n\n T: AsRef<[u8]> + Send + 'static,\n\n S: Service<RawMessage<T>, Response = Message, Error = ServiceError>,\n\n S::Future: Sync + Send + 'static,\n\n{\n\n type Response = Message;\n\n type Error = ServiceError;\n\n type Future = BoxedServiceFuture<Self::Response, Self::Error>;\n\n\n\n fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n self.0.poll_ready(cx)\n\n }\n\n\n\n fn call(&mut self, req: T) -> Self::Future {\n\n debug!(\"creating a RawMessage request\");\n\n match MessageBuffer::new(req) {\n\n Ok(buffer) => {\n", "file_path": "rust/xaynet-server/src/services/messages/message_parser.rs", "rank": 35, "score": 100363.05548412848 }, { "content": "/// Create an [`EventPublisher`]/[`EventSubscriber`] pair with default\n\n/// values similar to those produced in practice when instantiating a\n\n/// new coordinator.\n\npub fn new_event_channels() -> (EventPublisher, EventSubscriber) {\n\n let keys = EncryptKeyPair::generate();\n\n let params = RoundParameters {\n\n pk: keys.public.clone(),\n\n sum: 0.0,\n\n update: 0.0,\n\n seed: RoundSeed::generate(),\n\n };\n\n let phase = PhaseName::Idle;\n\n let round_id = 0;\n\n EventPublisher::init(round_id, keys, params, phase)\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/services/tests/utils.rs", "rank": 36, "score": 99354.32384723991 }, { "content": "/// An interface for serializable message types.\n\n///\n\n/// See also [`FromBytes`] for deserialization.\n\npub trait ToBytes {\n\n /// The length of the buffer for encoding the type.\n\n fn buffer_length(&self) -> usize;\n\n\n\n /// Serialize the type in the given buffer.\n\n ///\n\n /// # Panics\n\n /// This method may panic if the given buffer is too small. Thus, [`buffer_length()`] must be\n\n /// called prior to calling this, and a large enough buffer must be provided.\n\n ///\n\n /// [`buffer_length()`]: #method.buffer_length\n\n fn to_bytes<T: AsMut<[u8]> + AsRef<[u8]>>(&self, buffer: &mut T);\n\n}\n\n\n", "file_path": "rust/xaynet-core/src/message/traits.rs", "rank": 37, "score": 98721.49583115129 }, { "content": "/// An interface for deserializable message types.\n\n///\n\n/// See also [`ToBytes`] for serialization.\n\npub trait FromBytes: Sized {\n\n /// Deserialize the type from the given buffer.\n\n ///\n\n /// # Errors\n\n /// May fail if certain parts of the deserialized buffer don't pass message validity checks.\n\n fn from_bytes<T: AsRef<[u8]>>(buffer: &T) -> Result<Self, DecodeError>;\n\n}\n\n\n\nimpl<T> FromBytes for T\n\nwhere\n\n T: ByteObject,\n\n{\n\n fn from_bytes<U: AsRef<[u8]>>(buffer: &U) -> Result<Self, DecodeError> {\n\n Self::from_slice(buffer.as_ref())\n\n .ok_or_else(|| anyhow!(\"failed to deserialize byte object\"))\n\n }\n\n}\n\n\n\nimpl<T> ToBytes for T\n\nwhere\n", "file_path": "rust/xaynet-core/src/message/traits.rs", "rank": 38, "score": 94276.55416521442 }, { "content": "/// Extracts a participant public key from a request body\n\nfn part_pk() -> impl Filter<Extract = (ParticipantPublicKey,), Error = warp::Rejection> + Clone {\n\n warp::body::bytes().and_then(|body: Bytes| async move {\n\n if let Some(pk) = ParticipantPublicKey::from_slice(body.bytes()) {\n\n Ok(pk)\n\n } else {\n\n Err(warp::reject::custom(InvalidPublicKey))\n\n }\n\n })\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/rest.rs", "rank": 39, "score": 85840.8604520305 }, { "content": "fn deserialize_env_filter<'de, D>(deserializer: D) -> Result<EnvFilter, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n struct EnvFilterVisitor;\n\n\n\n impl<'de> Visitor<'de> for EnvFilterVisitor {\n\n type Value = EnvFilter;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n write!(formatter, \"a valid tracing filter directive: https://docs.rs/tracing-subscriber/0.2.6/tracing_subscriber/filter/struct.EnvFilter.html#directives\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n EnvFilter::try_new(value)\n\n .map_err(|_| de::Error::invalid_value(serde::de::Unexpected::Str(value), &self))\n\n }\n\n }\n\n\n\n deserializer.deserialize_str(EnvFilterVisitor)\n\n}\n", "file_path": "rust/xaynet-server/src/settings.rs", "rank": 40, "score": 85819.99598114849 }, { "content": "/// Generates a secure pseudo-random integer.\n\n///\n\n/// Draws from a uniform distribution over the integers between zero (included) and\n\n/// `max_int` (excluded). Employs the `ChaCha20` stream cipher as a PRNG.\n\npub fn generate_integer(prng: &mut ChaCha20Rng, max_int: &BigUint) -> BigUint {\n\n if max_int.is_zero() {\n\n return BigUint::zero();\n\n }\n\n let mut bytes = max_int.to_bytes_le();\n\n let mut rand_int = max_int.clone();\n\n while &rand_int >= max_int {\n\n prng.fill_bytes(&mut bytes);\n\n rand_int = BigUint::from_bytes_le(&bytes);\n\n }\n\n rand_int\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use num::traits::{pow::Pow, Num};\n\n use rand::SeedableRng;\n\n\n\n use super::*;\n\n\n", "file_path": "rust/xaynet-core/src/crypto/prng.rs", "rank": 41, "score": 83301.67411626896 }, { "content": "pub fn generate_updater(seed: &RoundSeed, sum_ratio: f64, update_ratio: f64) -> Participant {\n\n loop {\n\n let mut participant = Participant::new().unwrap();\n\n participant.compute_signatures(seed.as_slice());\n\n match participant.check_task(sum_ratio, update_ratio) {\n\n Task::Update => return participant,\n\n _ => {}\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/state_machine/tests/utils.rs", "rank": 42, "score": 80629.16925786785 }, { "content": "pub fn generate_summer(seed: &RoundSeed, sum_ratio: f64, update_ratio: f64) -> Participant {\n\n loop {\n\n let mut participant = Participant::new().unwrap();\n\n participant.compute_signatures(seed.as_slice());\n\n match participant.check_task(sum_ratio, update_ratio) {\n\n Task::Sum => return participant,\n\n _ => {}\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/state_machine/tests/utils.rs", "rank": 43, "score": 80629.16925786785 }, { "content": "\n\nconst SUM_SIGNATURE_RANGE: Range<usize> = range(0, ParticipantTaskSignature::LENGTH);\n\n\n\n#[derive(Clone, Debug, Eq, PartialEq, Hash)]\n\n/// A wrapper around a buffer that contains a [`Sum2`] message.\n\n///\n\n/// It provides getters and setters to access the different fields of the message safely.\n\npub struct Sum2Buffer<T> {\n\n inner: T,\n\n}\n\n\n\nimpl<T: AsRef<[u8]>> Sum2Buffer<T> {\n\n /// Performs bound checks for the various message fields on `bytes` and returns a new\n\n /// [`Sum2Buffer`].\n\n ///\n\n /// # Errors\n\n /// Fails if the `bytes` are smaller than a minimal-sized sum2 message buffer.\n\n pub fn new(bytes: T) -> Result<Self, DecodeError> {\n\n let buffer = Self { inner: bytes };\n\n buffer\n", "file_path": "rust/xaynet-core/src/message/payload/sum2.rs", "rank": 44, "score": 80271.01947231237 }, { "content": " }\n\n}\n\n\n\n#[derive(Eq, PartialEq, Clone, Debug)]\n\n/// A high level representation of a sum2 message.\n\n///\n\n/// These messages are sent by sum participants during the sum2 phase.\n\npub struct Sum2 {\n\n /// The signature of the round seed and the word \"sum\".\n\n ///\n\n /// This is used to determine whether a participant is selected for the sum task.\n\n pub sum_signature: ParticipantTaskSignature,\n\n\n\n /// A model mask computed by the participant.\n\n pub model_mask: MaskObject,\n\n\n\n /// A scalar mask computed by the participant.\n\n pub scalar_mask: MaskObject,\n\n}\n\n\n", "file_path": "rust/xaynet-core/src/message/payload/sum2.rs", "rank": 45, "score": 80268.61283483794 }, { "content": " .check_buffer_length()\n\n .context(\"not a valid Sum2Buffer\")?;\n\n Ok(buffer)\n\n }\n\n\n\n /// Returns a `Sum2Buffer` with the given `bytes` without performing bound checks.\n\n ///\n\n /// This means that accessing the message fields may panic.\n\n pub fn new_unchecked(bytes: T) -> Self {\n\n Self { inner: bytes }\n\n }\n\n\n\n /// Performs bound checks for the various message fields on this buffer.\n\n pub fn check_buffer_length(&self) -> Result<(), DecodeError> {\n\n let len = self.inner.as_ref().len();\n\n if len < SUM_SIGNATURE_RANGE.end {\n\n return Err(anyhow!(\n\n \"invalid buffer length: {} < {}\",\n\n len,\n\n SUM_SIGNATURE_RANGE.end\n", "file_path": "rust/xaynet-core/src/message/payload/sum2.rs", "rank": 46, "score": 80267.26697453049 }, { "content": "impl ToBytes for Sum2 {\n\n fn buffer_length(&self) -> usize {\n\n SUM_SIGNATURE_RANGE.end + self.model_mask.buffer_length() + self.scalar_mask.buffer_length()\n\n }\n\n\n\n fn to_bytes<T: AsMut<[u8]> + AsRef<[u8]>>(&self, buffer: &mut T) {\n\n let mut writer = Sum2Buffer::new_unchecked(buffer.as_mut());\n\n self.sum_signature.to_bytes(&mut writer.sum_signature_mut());\n\n self.model_mask.to_bytes(&mut writer.model_mask_mut());\n\n self.scalar_mask.to_bytes(&mut writer.scalar_mask_mut());\n\n }\n\n}\n\n\n\nimpl FromBytes for Sum2 {\n\n fn from_bytes<T: AsRef<[u8]>>(buffer: &T) -> Result<Self, DecodeError> {\n\n let reader = Sum2Buffer::new(buffer.as_ref())?;\n\n Ok(Self {\n\n sum_signature: ParticipantTaskSignature::from_bytes(&reader.sum_signature())\n\n .context(\"invalid sum signature\")?,\n\n model_mask: MaskObject::from_bytes(&reader.model_mask())\n", "file_path": "rust/xaynet-core/src/message/payload/sum2.rs", "rank": 47, "score": 80261.70894539072 }, { "content": "\n\n#[cfg(test)]\n\npub(in crate::message) mod tests {\n\n pub(in crate::message) use super::tests_helpers as helpers;\n\n use super::*;\n\n\n\n #[test]\n\n fn buffer_read() {\n\n let bytes = helpers::sum2().1;\n\n let buffer = Sum2Buffer::new(&bytes).unwrap();\n\n assert_eq!(buffer.sum_signature(), &helpers::signature().1[..]);\n\n let expected = helpers::mask().1;\n\n assert_eq!(&buffer.model_mask()[..expected.len()], &expected[..]);\n\n assert_eq!(buffer.scalar_mask(), &helpers::mask_1().1[..]);\n\n }\n\n\n\n #[test]\n\n fn buffer_write() {\n\n let mut bytes = vec![0xff; 110];\n\n {\n", "file_path": "rust/xaynet-core/src/message/payload/sum2.rs", "rank": 48, "score": 80261.52466056468 }, { "content": "//! Sum2 message payloads.\n\n//!\n\n//! See the [message module] documentation since this is a private module anyways.\n\n//!\n\n//! [message module]: ../index.html\n\n\n\nuse std::ops::Range;\n\n\n\nuse anyhow::{anyhow, Context};\n\n\n\nuse crate::{\n\n crypto::ByteObject,\n\n mask::object::{serialization::MaskObjectBuffer, MaskObject},\n\n message::{\n\n traits::{FromBytes, ToBytes},\n\n utils::range,\n\n DecodeError,\n\n },\n\n ParticipantTaskSignature,\n\n};\n", "file_path": "rust/xaynet-core/src/message/payload/sum2.rs", "rank": 49, "score": 80259.03148647556 }, { "content": " (object(), bytes())\n\n }\n\n\n\n pub fn mask_1() -> (MaskObject, Vec<u8>) {\n\n use crate::mask::object::serialization::tests::{bytes_1, object_1};\n\n (object_1(), bytes_1())\n\n }\n\n\n\n pub fn sum2() -> (Sum2, Vec<u8>) {\n\n let mut bytes = signature().1;\n\n bytes.extend(mask().1);\n\n bytes.extend(mask_1().1);\n\n let sum2 = Sum2 {\n\n sum_signature: signature().0,\n\n model_mask: mask().0,\n\n scalar_mask: mask_1().0,\n\n };\n\n (sum2, bytes)\n\n }\n\n}\n", "file_path": "rust/xaynet-core/src/message/payload/sum2.rs", "rank": 50, "score": 80258.67731799056 }, { "content": " .context(\"invalid model mask\")?,\n\n scalar_mask: MaskObject::from_bytes(&reader.scalar_mask())\n\n .context(\"invalid scalar mask\")?,\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub(in crate::message) mod tests_helpers {\n\n use super::*;\n\n use crate::{crypto::ByteObject, mask::object::MaskObject};\n\n\n\n pub fn signature() -> (ParticipantTaskSignature, Vec<u8>) {\n\n let bytes = vec![0x99; ParticipantTaskSignature::LENGTH];\n\n let signature = ParticipantTaskSignature::from_slice(&bytes[..]).unwrap();\n\n (signature, bytes)\n\n }\n\n\n\n pub fn mask() -> (MaskObject, Vec<u8>) {\n\n use crate::mask::object::serialization::tests::{bytes, object};\n", "file_path": "rust/xaynet-core/src/message/payload/sum2.rs", "rank": 51, "score": 80256.97976839583 }, { "content": " pub fn model_mask_mut(&mut self) -> &mut [u8] {\n\n let offset = self.model_mask_offset();\n\n &mut self.inner.as_mut()[offset..]\n\n }\n\n\n\n /// Gets a mutable reference to the scalar mask field.\n\n ///\n\n /// # Panics\n\n /// Accessing the field may panic if the buffer has not been checked before.\n\n pub fn scalar_mask_mut(&mut self) -> &mut [u8] {\n\n let offset = self.scalar_mask_offset();\n\n &mut self.inner.as_mut()[offset..]\n\n }\n\n}\n\n\n\nimpl<'a, T: AsRef<[u8]> + ?Sized> Sum2Buffer<&'a T> {\n\n /// Gets a reference to the sum signature field.\n\n ///\n\n /// # Panics\n\n /// Accessing the field may panic if the buffer has not been checked before.\n", "file_path": "rust/xaynet-core/src/message/payload/sum2.rs", "rank": 52, "score": 80255.13901681274 }, { "content": " fn scalar_mask_offset(&self) -> usize {\n\n let model_mask =\n\n MaskObjectBuffer::new_unchecked(&self.inner.as_ref()[self.model_mask_offset()..]);\n\n self.model_mask_offset() + model_mask.len()\n\n }\n\n}\n\n\n\nimpl<T: AsRef<[u8]> + AsMut<[u8]>> Sum2Buffer<T> {\n\n /// Gets a mutable reference to the sum signature field.\n\n ///\n\n /// # Panics\n\n /// Accessing the field may panic if the buffer has not been checked before.\n\n pub fn sum_signature_mut(&mut self) -> &mut [u8] {\n\n &mut self.inner.as_mut()[SUM_SIGNATURE_RANGE]\n\n }\n\n\n\n /// Gets a mutable reference to the model mask field.\n\n ///\n\n /// # Panics\n\n /// Accessing the field may panic if the buffer has not been checked before.\n", "file_path": "rust/xaynet-core/src/message/payload/sum2.rs", "rank": 53, "score": 80253.69114632062 }, { "content": " assert_eq!(buf, bytes);\n\n }\n\n\n\n #[test]\n\n fn decode() {\n\n let (sum2, bytes) = helpers::sum2();\n\n let parsed = Sum2::from_bytes(&bytes).unwrap();\n\n assert_eq!(parsed, sum2);\n\n }\n\n}\n", "file_path": "rust/xaynet-core/src/message/payload/sum2.rs", "rank": 54, "score": 80251.12421714676 }, { "content": " let mut buffer = Sum2Buffer::new_unchecked(&mut bytes);\n\n buffer\n\n .sum_signature_mut()\n\n .copy_from_slice(&helpers::signature().1[..]);\n\n let expected = helpers::mask().1;\n\n buffer.model_mask_mut()[..expected.len()].copy_from_slice(&expected[..]);\n\n buffer\n\n .scalar_mask_mut()\n\n .copy_from_slice(&helpers::mask_1().1[..]);\n\n }\n\n assert_eq!(&bytes[..], &helpers::sum2().1[..]);\n\n }\n\n\n\n #[test]\n\n fn encode() {\n\n let (sum2, bytes) = helpers::sum2();\n\n assert_eq!(sum2.buffer_length(), bytes.len());\n\n\n\n let mut buf = vec![0xff; sum2.buffer_length()];\n\n sum2.to_bytes(&mut buf);\n", "file_path": "rust/xaynet-core/src/message/payload/sum2.rs", "rank": 55, "score": 80250.95763570249 }, { "content": " pub fn sum_signature(&self) -> &'a [u8] {\n\n &self.inner.as_ref()[SUM_SIGNATURE_RANGE]\n\n }\n\n\n\n /// Gets a reference to the model mask field.\n\n ///\n\n /// # Panics\n\n /// Accessing the field may panic if the buffer has not been checked before.\n\n pub fn model_mask(&self) -> &'a [u8] {\n\n let offset = self.model_mask_offset();\n\n &self.inner.as_ref()[offset..]\n\n }\n\n\n\n /// Gets a reference to the scalar mask field.\n\n ///\n\n /// # Panics\n\n /// Accessing the field may panic if the buffer has not been checked before.\n\n pub fn scalar_mask(&self) -> &'a [u8] {\n\n let offset = self.scalar_mask_offset();\n\n &self.inner.as_ref()[offset..]\n", "file_path": "rust/xaynet-core/src/message/payload/sum2.rs", "rank": 56, "score": 80249.8646414593 }, { "content": " ));\n\n }\n\n\n\n // Check the length of the model mask field\n\n let _ = MaskObjectBuffer::new(&self.inner.as_ref()[self.model_mask_offset()..])\n\n .context(\"invalid model mask field\")?;\n\n\n\n // Check the length of the scalar mask field\n\n let _ = MaskObjectBuffer::new(&self.inner.as_ref()[self.scalar_mask_offset()..])\n\n .context(\"invalid scalar mask field\")?;\n\n\n\n Ok(())\n\n }\n\n\n\n /// Gets the offset of the model mask field.\n\n fn model_mask_offset(&self) -> usize {\n\n SUM_SIGNATURE_RANGE.end\n\n }\n\n\n\n /// Gets the offset of the scalar mask field.\n", "file_path": "rust/xaynet-core/src/message/payload/sum2.rs", "rank": 57, "score": 80248.4171050924 }, { "content": "\n\n#[async_trait]\n\nimpl Phase for PhaseState<Unmask> {\n\n const NAME: PhaseName = PhaseName::Unmask;\n\n\n\n /// Run the unmasking phase\n\n async fn run(&mut self) -> Result<(), StateError> {\n\n metrics!(\n\n self.shared.io.metrics_tx,\n\n metrics::masks::total_number::update(\n\n self.inner.model_mask_dict.len(),\n\n self.shared.state.round_id,\n\n Self::NAME\n\n )\n\n );\n\n\n\n let global_model = self.end_round()?;\n\n\n\n info!(\"broadcasting the new global model\");\n\n self.shared\n", "file_path": "rust/xaynet-server/src/state_machine/phases/unmask.rs", "rank": 58, "score": 79482.40961948875 }, { "content": "use std::{cmp::Ordering, sync::Arc};\n\n\n\nuse xaynet_core::mask::{Aggregation, MaskObject, Model};\n\n\n\nuse crate::state_machine::{\n\n coordinator::MaskDict,\n\n events::ModelUpdate,\n\n phases::{Idle, Phase, PhaseName, PhaseState, Shared, StateError},\n\n RoundFailed,\n\n StateMachine,\n\n};\n\n\n\n#[cfg(feature = \"metrics\")]\n\nuse crate::metrics;\n\n\n\n/// Unmask state\n\n#[derive(Debug)]\n\npub struct Unmask {\n\n /// The aggregator for masked models.\n\n model_agg: Option<Aggregation>,\n", "file_path": "rust/xaynet-server/src/state_machine/phases/unmask.rs", "rank": 59, "score": 79475.53451942855 }, { "content": " .io\n\n .events\n\n .broadcast_model(ModelUpdate::New(Arc::new(global_model)));\n\n\n\n Ok(())\n\n }\n\n\n\n /// Moves from the unmask state to the next state.\n\n ///\n\n /// See the [module level documentation](../index.html) for more details.\n\n fn next(self) -> Option<StateMachine> {\n\n info!(\"going back to idle phase\");\n\n Some(PhaseState::<Idle>::new(self.shared).into())\n\n }\n\n}\n\n\n\nimpl PhaseState<Unmask> {\n\n /// Creates a new unmask state.\n\n pub fn new(\n\n shared: Shared,\n", "file_path": "rust/xaynet-server/src/state_machine/phases/unmask.rs", "rank": 60, "score": 79467.50163752107 }, { "content": "\n\n /// The aggregator for masked scalars.\n\n scalar_agg: Option<Aggregation>,\n\n\n\n /// The model mask dictionary built during the sum2 phase.\n\n model_mask_dict: MaskDict,\n\n\n\n /// The scalar mask dictionary built during the sum2 phase.\n\n scalar_mask_dict: MaskDict,\n\n}\n\n\n\n#[cfg(test)]\n\nimpl Unmask {\n\n pub fn aggregation(&self) -> Option<&Aggregation> {\n\n self.model_agg.as_ref()\n\n }\n\n pub fn mask_dict(&self) -> &MaskDict {\n\n &self.model_mask_dict\n\n }\n\n}\n", "file_path": "rust/xaynet-server/src/state_machine/phases/unmask.rs", "rank": 61, "score": 79464.55098705199 }, { "content": " model_agg: Aggregation,\n\n scalar_agg: Aggregation,\n\n model_mask_dict: MaskDict,\n\n scalar_mask_dict: MaskDict,\n\n ) -> Self {\n\n info!(\"state transition\");\n\n Self {\n\n inner: Unmask {\n\n model_agg: Some(model_agg),\n\n scalar_agg: Some(scalar_agg),\n\n model_mask_dict,\n\n scalar_mask_dict,\n\n },\n\n shared,\n\n }\n\n }\n\n\n\n /// Freezes the mask dictionary.\n\n fn freeze_mask_dict(&mut self) -> Result<(MaskObject, MaskObject), RoundFailed> {\n\n if self.inner.model_mask_dict.is_empty() {\n", "file_path": "rust/xaynet-server/src/state_machine/phases/unmask.rs", "rank": 62, "score": 79460.61119418935 }, { "content": " // Safe unwrap: State::<Unmask>::new always creates Some(aggregation)\n\n let model_agg = self.inner.model_agg.take().unwrap();\n\n let scalar_agg = self.inner.scalar_agg.take().unwrap();\n\n\n\n model_agg\n\n .validate_unmasking(&model_mask)\n\n .map_err(RoundFailed::from)?;\n\n scalar_agg\n\n .validate_unmasking(&scalar_mask)\n\n .map_err(RoundFailed::from)?;\n\n\n\n let model = model_agg.unmask(model_mask);\n\n let scalar = scalar_agg.unmask(scalar_mask);\n\n\n\n Ok(Aggregation::correct(model, scalar))\n\n }\n\n}\n", "file_path": "rust/xaynet-server/src/state_machine/phases/unmask.rs", "rank": 63, "score": 79452.30487567233 }, { "content": " .inner\n\n .scalar_mask_dict\n\n .drain()\n\n .fold(\n\n (None, 0_usize),\n\n |(unique_mask, unique_count), (mask, count)| match unique_count.cmp(&count) {\n\n Ordering::Less => (Some(mask), count),\n\n Ordering::Greater => (unique_mask, unique_count),\n\n Ordering::Equal => (None, unique_count),\n\n },\n\n )\n\n .0\n\n .ok_or(RoundFailed::AmbiguousMasks)?;\n\n\n\n Ok((model_mask, scalar_mask))\n\n }\n\n\n\n fn end_round(&mut self) -> Result<Model, RoundFailed> {\n\n let (model_mask, scalar_mask) = self.freeze_mask_dict()?;\n\n\n", "file_path": "rust/xaynet-server/src/state_machine/phases/unmask.rs", "rank": 64, "score": 79446.88519158897 }, { "content": " return Err(RoundFailed::NoMask);\n\n }\n\n\n\n let model_mask = self\n\n .inner\n\n .model_mask_dict\n\n .drain()\n\n .fold(\n\n (None, 0_usize),\n\n |(unique_mask, unique_count), (mask, count)| match unique_count.cmp(&count) {\n\n Ordering::Less => (Some(mask), count),\n\n Ordering::Greater => (unique_mask, unique_count),\n\n Ordering::Equal => (None, unique_count),\n\n },\n\n )\n\n .0\n\n .ok_or(RoundFailed::AmbiguousMasks)?;\n\n\n\n // TODO remove duplication\n\n let scalar_mask = self\n", "file_path": "rust/xaynet-server/src/state_machine/phases/unmask.rs", "rank": 65, "score": 79442.82929803901 }, { "content": "fn dummy_sum_dict() -> SumDict {\n\n let mut dict = HashMap::new();\n\n dict.insert(\n\n PublicSigningKey::fill_with(0xaa),\n\n PublicEncryptKey::fill_with(0xcc),\n\n );\n\n dict.insert(\n\n PublicSigningKey::fill_with(0xbb),\n\n PublicEncryptKey::fill_with(0xdd),\n\n );\n\n dict\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_sum_dict_svc() {\n\n let (mut publisher, subscriber) = new_event_channels();\n\n\n\n let mut task = Spawn::new(SumDictService::new(&subscriber));\n\n assert_ready!(task.poll_ready()).unwrap();\n\n\n", "file_path": "rust/xaynet-server/src/services/tests/fetchers.rs", "rank": 82, "score": 77482.69353814822 }, { "content": "fn redis_type_error(desc: &'static str, details: Option<String>) -> RedisError {\n\n if let Some(details) = details {\n\n RedisError::from((ErrorKind::TypeError, desc, details))\n\n } else {\n\n RedisError::from((ErrorKind::TypeError, desc))\n\n }\n\n}\n\n\n\n/// Implements ['FromRedisValue'] and ['ToRedisArgs'] for types that implement ['ByteObject'].\n\n/// The Redis traits as well as the crypto types are both defined in foreign crates.\n\n/// To bypass the restrictions of orphan rule, we use `Newtypes` for the crypto types.\n\n///\n\n/// Each crypto type has two `Newtypes`, one for reading and one for writing.\n\n/// The difference between `Read` and `Write` is that the write `Newtype` does not take the\n\n/// ownership of the value but only a reference. This allows us to use references in the\n\n/// [`Client`] methods. The `Read` Newtype also implements [`ToRedisArgs`] to reduce the\n\n/// conversion overhead that you would get if you wanted to reuse a `Read` value for another\n\n/// Redis query.\n\n///\n\n/// Example:\n", "file_path": "rust/xaynet-server/src/storage/impls.rs", "rank": 83, "score": 76248.64943196566 }, { "content": "#[derive(Debug, StructOpt)]\n\n#[structopt(name = \"Test Drive\")]\n\nstruct Opt {\n\n #[structopt(\n\n default_value = \"http://127.0.0.1:8081\",\n\n short,\n\n help = \"The URL of the coordinator\"\n\n )]\n\n url: String,\n\n #[structopt(default_value = \"4\", short, help = \"The length of the model\")]\n\n len: u32,\n\n}\n\n\n", "file_path": "rust/examples/mobile-client.rs", "rank": 84, "score": 67362.77289069298 }, { "content": "#[derive(Debug, StructOpt)]\n\n#[structopt(name = \"Test Drive\")]\n\nstruct Opt {\n\n #[structopt(\n\n default_value = \"http://127.0.0.1:8081\",\n\n short,\n\n help = \"The URL of the coordinator\"\n\n )]\n\n url: String,\n\n #[structopt(default_value = \"4\", short, help = \"The length of the model\")]\n\n len: u32,\n\n #[structopt(\n\n default_value = \"1\",\n\n short,\n\n help = \"The time period at which to poll for service data, in seconds\"\n\n )]\n\n period: u64,\n\n #[structopt(default_value = \"10\", short, help = \"The number of clients\")]\n\n nb_client: u32,\n\n}\n\n\n\n/// Test-drive script of a (local, but networked) federated\n", "file_path": "rust/examples/test-drive-net.rs", "rank": 85, "score": 66294.08776068858 }, { "content": "struct Send {\n\n sender: Expr,\n\n metrics: Vec<Expr>,\n\n}\n\n\n\nimpl Parse for Send {\n\n fn parse(input: ParseStream) -> Result<Self> {\n\n // metrics!(sender, metric_1);\n\n let sender = input.parse()?; // sender\n\n let mut metrics = Vec::new();\n\n\n\n // at least one metric is required, otherwise parse will fail.\n\n input.parse::<Token![,]>()?; // ,\n\n let metric = input.parse()?; // metric_1\n\n metrics.push(metric);\n\n\n\n // metrics!(sender, metric_1, metric_N);\n\n loop {\n\n if input.is_empty() {\n\n break;\n", "file_path": "rust/xaynet-macros/src/lib.rs", "rank": 86, "score": 66279.93120259121 }, { "content": "fn pause() {\n\n let mut stdout = stdout();\n\n stdout.write_all(b\"Press Enter to continue...\").unwrap();\n\n stdout.flush().unwrap();\n\n stdin().read_exact(&mut [0]).unwrap();\n\n}\n\n\n", "file_path": "rust/examples/mobile-client.rs", "rank": 87, "score": 66264.73120818 }, { "content": "#[derive(Debug)]\n\nstruct InvalidPublicKey;\n\n\n\nimpl warp::reject::Reject for InvalidPublicKey {}\n\n\n\n/// Handles `warp` rejections of bad requests.\n\nasync fn handle_reject(err: warp::Rejection) -> Result<impl warp::Reply, Infallible> {\n\n let code = if err.is_not_found() {\n\n StatusCode::NOT_FOUND\n\n } else if let Some(InvalidPublicKey) = err.find() {\n\n StatusCode::BAD_REQUEST\n\n } else {\n\n error!(\"unhandled rejection: {:?}\", err);\n\n StatusCode::INTERNAL_SERVER_ERROR\n\n };\n\n // reply with empty body; the status code is the interesting part\n\n Ok(warp::reply::with_status(Vec::new(), code))\n\n}\n", "file_path": "rust/xaynet-server/src/rest.rs", "rank": 88, "score": 64299.32091176561 }, { "content": "#[async_trait]\n\npub trait Fetcher {\n\n /// Fetch the parameters for the current round\n\n async fn round_params(&mut self) -> Result<RoundParamsResponse, FetchError>;\n\n\n\n /// Fetch the mask length for the current round. The sum\n\n /// participants need this value during the sum2 phase to derive\n\n /// masks from the update participant's masking seeds.\n\n async fn mask_length(&mut self) -> Result<MaskLengthResponse, FetchError>;\n\n\n\n /// Fetch the latest global model.\n\n async fn model(&mut self) -> Result<ModelResponse, FetchError>;\n\n\n\n /// Fetch the global seed dictionary. Each sum2 participant needs a\n\n /// different portion of that dictionary.\n\n async fn seed_dict(&mut self) -> Result<SeedDictResponse, FetchError>;\n\n\n\n /// Fetch the sum dictionary. The update participants need this\n\n /// dictionary to encrypt their masking seed for each sum\n\n /// participant.\n\n async fn sum_dict(&mut self) -> Result<SumDictResponse, FetchError>;\n\n}\n\n\n\n/// An error returned by the [`Fetcher`]'s method.\n\npub type FetchError = anyhow::Error;\n\n\n", "file_path": "rust/xaynet-server/src/services/fetchers/mod.rs", "rank": 89, "score": 59261.33555055328 }, { "content": "#[async_trait]\n\npub trait ApiClient {\n\n type Error: ::std::fmt::Debug + ::std::error::Error + 'static;\n\n\n\n /// Retrieve the current round parameters\n\n async fn get_round_params(&mut self) -> Result<RoundParameters, Self::Error>;\n\n\n\n /// Retrieve the current sum dictionary, if available\n\n async fn get_sums(&mut self) -> Result<Option<SumDict>, Self::Error>;\n\n\n\n /// Retrieve the current seed dictionary for the given sum\n\n /// participant, if available.\n\n async fn get_seeds(\n\n &mut self,\n\n pk: SumParticipantPublicKey,\n\n ) -> Result<Option<UpdateSeedDict>, Self::Error>;\n\n\n\n /// Retrieve the current model/mask length, if available\n\n async fn get_mask_length(&mut self) -> Result<Option<u64>, Self::Error>;\n\n\n\n /// Retrieve the current global model, if available.\n\n async fn get_model(&mut self) -> Result<Option<Model>, Self::Error>;\n\n\n\n /// Send an encrypted and signed PET message to the coordinator.\n\n async fn send_message(&mut self, msg: Vec<u8>) -> Result<(), Self::Error>;\n\n}\n", "file_path": "rust/xaynet-client/src/api/mod.rs", "rank": 90, "score": 59261.33555055328 }, { "content": "fn get_participant_settings() -> ParticipantSettings {\n\n sodiumoxide::init().unwrap();\n\n\n\n let secret_key = MobileClient::create_participant_secret_key();\n\n ParticipantSettings {\n\n secret_key,\n\n aggregation_config: AggregationConfig {\n\n mask: MaskConfig {\n\n group_type: GroupType::Prime,\n\n data_type: DataType::F32,\n\n bound_type: BoundType::B0,\n\n model_type: ModelType::M3,\n\n },\n\n scalar: 1_f64,\n\n },\n\n }\n\n}\n\n\n\n// // How a Dart API could look like:\n\n\n", "file_path": "rust/examples/mobile-client.rs", "rank": 91, "score": 58814.49166001135 }, { "content": "fn dummy_config() -> MaskConfig {\n\n MaskConfig {\n\n group_type: GroupType::Prime,\n\n data_type: DataType::F32,\n\n bound_type: BoundType::B0,\n\n model_type: ModelType::M3,\n\n }\n\n}\n", "file_path": "rust/xaynet-client/src/participant.rs", "rank": 92, "score": 58814.49166001135 }, { "content": "#[async_trait]\n\npub trait LocalModel {\n\n async fn get_local_model(&mut self) -> Option<Model>;\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct ClientState<Type> {\n\n participant: Participant<Type>,\n\n round_params: RoundParameters,\n\n}\n\n\n\nimpl<Type> ClientState<Type> {\n\n async fn check_round_freshness<T: ApiClient>(\n\n &self,\n\n api: &mut T,\n\n ) -> Result<(), ClientError<T::Error>> {\n\n debug!(\"fetching round parameters\");\n\n let round_params = api.get_round_params().await?;\n\n if round_params.seed != self.round_params.seed {\n\n info!(\"new round parameters\");\n\n Err(ClientError::RoundOutdated)\n", "file_path": "rust/xaynet-client/src/mobile_client/client.rs", "rank": 93, "score": 58380.926843165274 }, { "content": "fn dummy_seed_dict() -> SeedDict {\n\n let mut dict = HashMap::new();\n\n dict.insert(PublicSigningKey::fill_with(0xaa), dummy_update_dict());\n\n dict.insert(PublicSigningKey::fill_with(0xbb), dummy_update_dict());\n\n dict\n\n}\n\n\n", "file_path": "rust/xaynet-server/src/services/tests/fetchers.rs", "rank": 94, "score": 56291.70571185902 }, { "content": "/// An interface for slicing into cryptographic byte objects.\n\npub trait ByteObject: Sized {\n\n /// Length in bytes of this object\n\n const LENGTH: usize;\n\n\n\n /// Creates a new object with all the bytes initialized to `0`.\n\n fn zeroed() -> Self;\n\n\n\n /// Gets the object byte representation.\n\n fn as_slice(&self) -> &[u8];\n\n\n\n /// Creates an object from the given buffer.\n\n ///\n\n /// # Errors\n\n /// Returns `None` if the length of the byte-slice isn't equal to the length of the object.\n\n fn from_slice(bytes: &[u8]) -> Option<Self>;\n\n\n\n /// Creates an object from the given buffer.\n\n ///\n\n /// # Panics\n\n /// Panics if the length of the byte-slice isn't equal to the length of the object.\n", "file_path": "rust/xaynet-core/src/crypto/mod.rs", "rank": 95, "score": 55744.29858549506 }, { "content": "fn dummy_update_dict() -> UpdateSeedDict {\n\n let mut dict = HashMap::new();\n\n dict.insert(\n\n PublicSigningKey::fill_with(0x11),\n\n EncryptedMaskSeed::fill_with(0x11),\n\n );\n\n dict.insert(\n\n PublicSigningKey::fill_with(0x22),\n\n EncryptedMaskSeed::fill_with(0x22),\n\n );\n\n dict\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_seed_dict_svc() {\n\n let (mut publisher, subscriber) = new_event_channels();\n\n\n\n let mut task = Spawn::new(SeedDictService::new(&subscriber));\n\n assert_ready!(task.poll_ready()).unwrap();\n\n\n", "file_path": "rust/xaynet-server/src/services/tests/fetchers.rs", "rank": 96, "score": 55521.97815553008 }, { "content": "struct LocalModelCache(Option<Model>);\n\n\n\nimpl LocalModelCache {\n\n fn set_local_model(&mut self, model: Model) {\n\n self.0 = Some(model);\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl LocalModel for LocalModelCache {\n\n async fn get_local_model(&mut self) -> Option<Model> {\n\n self.0.clone()\n\n }\n\n}\n", "file_path": "rust/xaynet-client/src/mobile_client/mod.rs", "rank": 97, "score": 54528.709367955395 }, { "content": "/// An interface to convert a collection of numerical values into an iterator of primitive values.\n\n///\n\n/// This trait is used to convert a [`Model`], which has its own internal representation of the\n\n/// weights, into primitive types ([`f32`], [`f64`], [`i32`], [`i64`]). The opposite trait is\n\n/// [`FromPrimitives`].\n\npub trait IntoPrimitives<P: 'static>: Sized {\n\n /// Creates an iterator from numerical values that yields converted primitive values.\n\n ///\n\n /// # Errors\n\n /// Yields an error for each numerical value that can't be converted into a primitive value.\n\n fn into_primitives(self) -> Box<dyn Iterator<Item = Result<P, ModelCastError>>>;\n\n\n\n /// Creates an iterator from numerical values that yields converted primitive values.\n\n ///\n\n /// # Errors\n\n /// Yields an error for each numerical value that can't be converted into a primitive value.\n\n fn to_primitives(&self) -> Box<dyn Iterator<Item = Result<P, ModelCastError>>>;\n\n\n\n /// Consume this model and into an iterator that yields `P` values.\n\n ///\n\n /// # Panics\n\n /// Panics if a numerical value can't be converted into a primitive value.\n\n fn into_primitives_unchecked(self) -> Box<dyn Iterator<Item = P>> {\n\n Box::new(\n\n self.into_primitives()\n\n .map(|res| res.expect(\"conversion to primitive type failed\")),\n\n )\n\n }\n\n}\n\n\n", "file_path": "rust/xaynet-core/src/mask/model.rs", "rank": 98, "score": 51268.66426698577 }, { "content": " /// A tag for [`Sum2`] messages\n\n Sum2,\n\n}\n\n\n\nimpl TryFrom<u8> for Tag {\n\n type Error = DecodeError;\n\n\n\n fn try_from(value: u8) -> Result<Self, Self::Error> {\n\n Ok(match value {\n\n 1 => Tag::Sum,\n\n 2 => Tag::Update,\n\n 3 => Tag::Sum2,\n\n _ => return Err(anyhow!(\"invalid tag {}\", value)),\n\n })\n\n }\n\n}\n\n\n\nimpl Into<u8> for Tag {\n\n fn into(self) -> u8 {\n\n match self {\n", "file_path": "rust/xaynet-core/src/message/message.rs", "rank": 99, "score": 50082.242690241874 } ]
Rust
src/ppu/ppu_memory.rs
bheisler/Corrosion
5ca2b3a03825c3d58623df774a8596de32b46812
use super::Color; use super::TilePattern; use cart::Cart; use memory::MemSegment; use std::cell::UnsafeCell; use std::rc::Rc; pub struct PPUMemory { cart: Rc<UnsafeCell<Cart>>, vram: Box<[u8; 0x0F00]>, palette: [Color; 0x20], } impl PPUMemory { pub fn new(cart: Rc<UnsafeCell<Cart>>) -> PPUMemory { PPUMemory { cart: cart, vram: Box::new([0u8; 0x0F00]), palette: [Color::from_bits_truncate(0); 0x20], } } } fn get_tile_addr(tile_id: u8, plane: u8, fine_y_scroll: u16, tile_table: u16) -> u16 { let mut tile_addr = 0u16; tile_addr |= fine_y_scroll; tile_addr |= plane as u16; tile_addr |= (tile_id as u16) << 4; tile_addr |= tile_table; tile_addr } impl PPUMemory { pub fn read_bypass_palette(&mut self, idx: u16) -> u8 { let idx = self.translate_vram_address(idx); self.vram[idx] } fn translate_vram_address(&self, idx: u16) -> usize { let idx = idx & 0x0FFF; let nametable_num = (idx / 0x0400) as usize; let idx_in_nametable = idx % 0x400; let table: &[u16; 4] = unsafe { (*self.cart.get()).get_mirroring_table() }; let translated = table[nametable_num] + idx_in_nametable; translated as usize % self.vram.len() } #[cfg(feature = "vectorize")] pub fn get_palettes(&self) -> (::simd::u8x16, ::simd::u8x16) { let palette_bytes: &[u8; 0x20] = unsafe { ::std::mem::transmute(&self.palette) }; ( ::simd::u8x16::load(palette_bytes, 0), ::simd::u8x16::load(palette_bytes, 16), ) } #[cfg(not(feature = "vectorize"))] pub fn read_palette(&self, idx: super::PaletteIndex) -> Color { self.palette[idx.to_index()] } pub fn read_tile_pattern( &mut self, tile_id: u8, fine_y_scroll: u16, tile_table: u16, ) -> TilePattern { let lo_addr = get_tile_addr(tile_id, 0, fine_y_scroll, tile_table); let hi_addr = get_tile_addr(tile_id, 8, fine_y_scroll, tile_table); TilePattern { lo: self.read(lo_addr), hi: self.read(hi_addr), } } #[allow(dead_code)] pub fn dump_nametable(&mut self, idx: u16) { let start_idx = 0x2000 + (idx * 0x400); println!("Nametable {}:", idx); self.print_columns(start_idx..(start_idx + 0x3C0), 32) } #[allow(dead_code)] pub fn dump_attribute_table(&mut self, idx: u16) { let start_idx = 0x2000 + (idx * 0x400); println!("Attribute table {}:", idx); self.print_columns((start_idx + 0x3C0)..(start_idx + 0x400), 32); } } impl MemSegment for PPUMemory { fn read(&mut self, idx: u16) -> u8 { match idx { 0x0000...0x1FFF => unsafe { (*self.cart.get()).chr_read(idx) }, 0x2000...0x3EFF => self.read_bypass_palette(idx), 0x3F00...0x3FFF => self.palette[(idx & 0x1F) as usize].bits(), x => invalid_address!(x), } } fn write(&mut self, idx: u16, val: u8) { match idx { 0x0000...0x1FFF => unsafe { (*self.cart.get()).chr_write(idx, val) }, 0x2000...0x3EFF => { let idx = self.translate_vram_address(idx); self.vram[idx] = val; } 0x3F00...0x3FFF => { let val = Color::from_bits_truncate(val); let idx = (idx & 0x001F) as usize; match idx { 0x10 => self.palette[0x00] = val, 0x00 => self.palette[0x10] = val, 0x14 => self.palette[0x04] = val, 0x04 => self.palette[0x14] = val, 0x18 => self.palette[0x08] = val, 0x08 => self.palette[0x18] = val, 0x1C => self.palette[0x0C] = val, 0x0C => self.palette[0x1C] = val, _ => (), }; self.palette[idx] = val; } x => invalid_address!(x), } } } #[cfg(test)] mod tests { use cart::ScreenMode; use memory::MemSegment; use ppu::{Color, PPU}; use ppu::tests::*; #[test] fn ppu_can_read_write_palette() { let mut ppu = create_test_ppu(); ppu.reg.v = 0x3F00; ppu.write(0x2007, 12); ppu.reg.v = 0x3F00; assert_eq!(ppu.ppu_mem.palette[0], Color::from_bits_truncate(12)); ppu.reg.v = 0x3F01; ppu.write(0x2007, 212); ppu.reg.v = 0x3F01; assert_eq!(ppu.read(0x2007), 212 & 0x3F); } #[test] fn test_palette_mirroring() { let mut ppu = create_test_ppu(); let mirrors = [0x3F10, 0x3F14, 0x3F18, 0x3F1C]; let targets = [0x3F00, 0x3F04, 0x3F08, 0x3F0C]; for x in 0..4 { ppu.reg.v = targets[x]; ppu.write(0x2007, 12); ppu.reg.v = mirrors[x]; assert_eq!(ppu.read(0x2007), 12); ppu.reg.v = mirrors[x]; ppu.write(0x2007, 12); ppu.reg.v = targets[x]; assert_eq!(ppu.read(0x2007), 12); } } fn to_nametable_idx(idx: u16, tbl: u16) -> u16 { 0x2000 + (0x0400 * tbl) + idx } fn assert_mirrored(ppu: &mut PPU, tbl1: u16, tbl2: u16) { for idx in 0x0000..0x0400 { let tbl1_idx = to_nametable_idx(idx, tbl1); let tbl2_idx = to_nametable_idx(idx, tbl2); println!( "Translated: tbl1: {:04X}, tbl2: {:04X}", ppu.ppu_mem.translate_vram_address(tbl1_idx), ppu.ppu_mem.translate_vram_address(tbl2_idx), ); ppu.ppu_mem.write(tbl1_idx, 0xFF); assert_eq!(0xFF, ppu.ppu_mem.read(tbl2_idx)); ppu.ppu_mem.write(tbl2_idx, 0x61); assert_eq!(0x61, ppu.ppu_mem.read(tbl1_idx)); } } fn assert_not_mirrored(ppu: &mut PPU, tbl1: u16, tbl2: u16) { for idx in 0x0000..0x0400 { let tbl1_idx = to_nametable_idx(idx, tbl1); let tbl2_idx = to_nametable_idx(idx, tbl2); println!( "Translated: tbl1: {:04X}, tbl2: {:04X}", ppu.ppu_mem.translate_vram_address(tbl1_idx), ppu.ppu_mem.translate_vram_address(tbl2_idx), ); ppu.ppu_mem.write(tbl1_idx, 0x00); ppu.ppu_mem.write(tbl2_idx, 0x00); ppu.ppu_mem.write(tbl1_idx, 0xFF); assert_eq!(0x00, ppu.ppu_mem.read(tbl2_idx)); ppu.ppu_mem.write(tbl2_idx, 0x61); assert_eq!(0xFF, ppu.ppu_mem.read(tbl1_idx)); } } #[test] fn single_screen_mirroring_mirrors_both_ways() { let mut ppu = create_test_ppu_with_mirroring(ScreenMode::OneScreenLow); assert_mirrored(&mut ppu, 0, 1); assert_mirrored(&mut ppu, 1, 2); assert_mirrored(&mut ppu, 2, 3); } #[test] fn four_screen_mirroring_mirrors_both_ways() { let mut ppu = create_test_ppu_with_mirroring(ScreenMode::FourScreen); assert_not_mirrored(&mut ppu, 0, 1); assert_not_mirrored(&mut ppu, 1, 2); assert_not_mirrored(&mut ppu, 2, 3); } #[test] fn horizontal_mirroring_mirrors_horizontally() { let mut ppu = create_test_ppu_with_mirroring(ScreenMode::Horizontal); assert_mirrored(&mut ppu, 0, 1); assert_mirrored(&mut ppu, 2, 3); assert_not_mirrored(&mut ppu, 0, 2); assert_not_mirrored(&mut ppu, 1, 3); } #[test] fn vertical_mirroring_mirrors_vertically() { let mut ppu = create_test_ppu_with_mirroring(ScreenMode::Vertical); assert_not_mirrored(&mut ppu, 0, 1); assert_not_mirrored(&mut ppu, 2, 3); assert_mirrored(&mut ppu, 0, 2); assert_mirrored(&mut ppu, 1, 3); } }
use super::Color; use super::TilePattern; use cart::Cart; use memory::MemSegment; use std::cell::UnsafeCell; use std::rc::Rc; pub struct PPUMemory { cart: Rc<UnsafeCell<Cart>>, vram: Box<[u8; 0x0F00]>, palette: [Color; 0x20], } impl PPUMemory { pub fn new(cart: Rc<UnsafeCell<Cart>>) -> PPUMemory { PPUMemory { cart: cart, vram: Box::new([0u8; 0x0F00]), palette: [Color::from_bits_truncate(0); 0x20], } } } fn get_tile_addr(tile_id: u8, plane: u8, fine_y_scroll: u16, tile_table: u16) -> u16 { let mut tile_addr = 0u16; tile_addr |= fine_y_scroll; tile_addr |= plane as u16; tile_addr |= (tile_id as u16) << 4; tile_addr |= tile_table; tile_addr } impl PPUMemory { pub fn read_bypass_palette(&mut self, idx: u16) -> u8 { let idx = self.translate_vram_address(idx); self.vram[idx] } fn translate_vram_address(&self, idx: u16) -> usize { let idx = idx & 0x0FFF; let nametable_num = (idx / 0x0400) as usize; let idx_in_nametable = idx % 0x400; let table: &[u16; 4] = unsafe { (*self.cart.get()).get_mirroring_table() }; let translated = table[nametable_num] + idx_in_nametable; translated as usize % self.vram.len() } #[cfg(feature = "vectorize")] pub fn get_palettes(&self) -> (::simd::u8x16, ::simd::u8x16) { let palette_bytes: &[u8; 0x20] = unsafe { ::std::mem::transmute(&self.palette) }; ( ::simd::u8x16::load(palette_bytes, 0), ::simd::u8x16::load(palette_bytes, 16), ) } #[cfg(not(feature = "vectorize"))] pub fn read_palette(&self, idx: super::PaletteIndex) -> Color { self.palette[idx.to_index()] } pub fn read_tile_pattern( &mut self, tile_id: u8, fine_y_scroll: u16, tile_table: u16, ) -> TilePattern { let lo_addr = get_tile_addr(tile_id, 0, fine_y_scroll, tile_table); let hi_addr = get_tile_addr(tile_id, 8, fine_y_scroll, tile_table); TilePattern { lo: self.read(lo_addr), hi: self.read(hi_addr), } } #[allow(dead_code)] pub fn dump_nametable(&mut self, idx: u16) { let start_idx = 0x2000 + (idx * 0x400); println!("Nametable {}:", idx); self.print_columns(start_idx..(start_idx + 0x3C0), 32) } #[allow(dead_code)] pub fn dump_attribute_table(&mut self, idx: u16) { let start_idx = 0x2000 + (idx * 0x400); println!("Attribute table {}:", idx); self.print_columns((start_idx + 0x3C0)..(start_idx + 0x400), 32); } } impl MemSegment for PPUMemory { fn read(&mut self, idx: u16) -> u8 { match idx { 0x0000...0x1FFF => unsafe { (*self.cart.get()).chr_read(idx) }, 0x2000...0x3EFF => self.read_bypass_palette(idx), 0x3F00...0x3FFF => self.palette[(idx & 0x1F) as usize].bits(), x => invalid_address!(x), } } fn write(&mut self, idx: u16, val: u8) { match idx { 0x0000...0x1FFF => unsafe { (*self.cart.get()).chr_write(idx, val) }, 0x2000...0x3EFF => { let idx = self.translate_vram_address(idx); self.vram[idx] = val; } 0x3F00...0x3FFF => { let val = Color::from_bits_truncate(val); let idx = (idx & 0x001F) as usize; match idx { 0x10 => self.palette[0x00] = val, 0x00 => self.palette[0x10] = val, 0x14 => self.palette[0x04] = val, 0x04 => self.palette[0x14] = val, 0x18 => self.palette[0x08] = val, 0x08 => self.palette[0x18] = val, 0x1C => self.palette[0x0C] = val, 0x0C => self.palette[0x1C] = val, _ => (), }; self.palette[idx] = val; } x => invalid_address!(x), } } } #[cfg(test)] mod tests { use cart::ScreenMode; use memory::MemSegment; use ppu::{Color, PPU}; use ppu::tests::*; #[test] fn ppu_can_read_write_palette() { let mut ppu = create_test_ppu(); ppu.reg.v = 0x3F00; ppu.write(0x2007, 12); ppu.reg.v = 0x3F00; assert_eq!(ppu.ppu_mem.palette[0], Color::from_bits_truncate(12)); ppu.reg.v = 0x3F01; ppu.write(0x2007, 212); ppu.reg.v = 0x3F01; assert_eq!(ppu.read(0x2007), 212 & 0x3F); } #[test] fn test_palette_mirroring() { let mut ppu = create_test_ppu(); let mirrors = [0x3F10, 0x3F14, 0x3F18, 0x3F1C]; let targets = [0x3F00, 0x3F04, 0x3F08, 0x3F0C]; for x in 0..4 { ppu.reg.v = targets[x]; ppu.write(0x2007, 12); ppu.reg.v = mirrors[x]; assert_eq!(ppu.read(0x2007), 12); ppu.reg.v = mirrors[x]; ppu.write(0x2007, 12); ppu.reg.v = targets[x]; assert_eq!(ppu.read(0x2007), 12); } } fn to_nametable_idx(idx: u16, tbl: u16) -> u16 { 0x2000 + (0x0400 * tbl) + idx } fn assert_mirrored(ppu: &mut PPU, tbl1: u16, tbl2: u16) { for idx in 0x0000..0x0400 { let tbl1_idx = to_nametable_idx(idx, tbl1); let tbl2_idx = to_nametable_idx(idx, tbl2); println!( "Translated: tbl1: {:04X}, tbl2: {:04X}", ppu.ppu_mem.translate_vram_address(tbl1_idx), ppu.ppu_mem.translate_vram_address(tbl2_idx), ); ppu.ppu_mem.write(tbl1_idx, 0xFF); assert_eq!(0xFF, ppu.ppu_mem.read(tbl2_idx)); ppu.ppu_mem.write(tbl2_idx, 0x61); assert_eq!(0x61, ppu.ppu_mem.read(tbl1_idx)); } } fn assert_not_mirrored(ppu: &mut PPU, tbl1: u16, tbl2: u16) { for idx in 0x0000..0x0400 { let tbl1_idx = to_nametable_idx(idx, tbl1); let tbl2_idx = to_nametable_idx(idx, tbl2); println!( "Translated: tbl1: {:04X}, tbl2: {:04X}", ppu.ppu_mem.translate_vram_address(tbl1_idx), ppu.ppu_mem.translate_vram_address(tbl2_idx), ); ppu.ppu_mem.write(tbl1_idx, 0x00); ppu.ppu_mem.write(tbl2_idx, 0x00); ppu.ppu_mem.write(tbl1_idx, 0xFF); assert_eq!(0x00, ppu.ppu_mem.read(tbl2_idx)); ppu.ppu_mem.write(tbl2_idx, 0x61); assert_eq!(0xFF, ppu.ppu_mem.read(tbl1_idx)); } } #[test] fn single_screen_mirroring_mirrors_both_ways() { let mut ppu = create_test_ppu_with_mirroring(ScreenMode::OneScreenLow); assert_mirrored(&mut ppu, 0, 1); assert_mirrored(&mut ppu, 1, 2); assert_mirrored(&mut ppu, 2, 3); } #[test]
#[test] fn horizontal_mirroring_mirrors_horizontally() { let mut ppu = create_test_ppu_with_mirroring(ScreenMode::Horizontal); assert_mirrored(&mut ppu, 0, 1); assert_mirrored(&mut ppu, 2, 3); assert_not_mirrored(&mut ppu, 0, 2); assert_not_mirrored(&mut ppu, 1, 3); } #[test] fn vertical_mirroring_mirrors_vertically() { let mut ppu = create_test_ppu_with_mirroring(ScreenMode::Vertical); assert_not_mirrored(&mut ppu, 0, 1); assert_not_mirrored(&mut ppu, 2, 3); assert_mirrored(&mut ppu, 0, 2); assert_mirrored(&mut ppu, 1, 3); } }
fn four_screen_mirroring_mirrors_both_ways() { let mut ppu = create_test_ppu_with_mirroring(ScreenMode::FourScreen); assert_not_mirrored(&mut ppu, 0, 1); assert_not_mirrored(&mut ppu, 1, 2); assert_not_mirrored(&mut ppu, 2, 3); }
function_block-full_function
[ { "content": "#[test]\n\nfn blargg_ppu_test_vram_access() {\n\n let mut hashes: HashMap<u32, &'static str> = HashMap::new();\n\n let commands: HashMap<u32, &'static str> = HashMap::new();\n\n\n\n hashes.insert(18, \"cb15f68f631c1d409beefb775bcff990286096fb\");\n\n\n\n run_system_test(\n\n 19,\n\n Path::new(\"nes-test-roms/blargg_ppu_tests_2005.09.15b/vram_access.nes\"),\n\n hashes,\n\n commands,\n\n );\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 1, "score": 162266.16046087412 }, { "content": "#[test]\n\nfn blargg_ppu_test_palette_ram() {\n\n let mut hashes: HashMap<u32, &'static str> = HashMap::new();\n\n let commands: HashMap<u32, &'static str> = HashMap::new();\n\n\n\n hashes.insert(18, \"cb15f68f631c1d409beefb775bcff990286096fb\");\n\n\n\n run_system_test(\n\n 19,\n\n Path::new(\"nes-test-roms/blargg_ppu_tests_2005.09.15b/palette_ram.nes\"),\n\n hashes,\n\n commands,\n\n );\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 2, "score": 162266.16046087415 }, { "content": "fn copy_to_texture(buf: &[Color; SCREEN_BUFFER_SIZE], buffer: &mut [u8], pitch: usize) {\n\n for y in 0..SCREEN_HEIGHT {\n\n for x in 0..SCREEN_WIDTH {\n\n let nes_idx = y * SCREEN_WIDTH + x;\n\n let color = buf[nes_idx];\n\n let pal_idx = color.bits() as usize * 3;\n\n let offset = y * pitch + x * 3;\n\n buffer[offset] = PALETTE[pal_idx];\n\n buffer[offset + 1] = PALETTE[pal_idx + 1];\n\n buffer[offset + 2] = PALETTE[pal_idx + 2];\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Screen for SDLScreen<'a> {\n\n fn draw(&mut self, buf: &[Color; SCREEN_BUFFER_SIZE]) {\n\n self.texture\n\n .with_lock(None, |buffer: &mut [u8], pitch: usize| {\n\n copy_to_texture(buf, buffer, pitch);\n\n })\n", "file_path": "src/screen/sdl.rs", "rank": 3, "score": 154930.48584535485 }, { "content": "#[cfg(not(feature = \"debug_features\"))]\n\nfn disasm_function(_: &mut CPU, _: u16) {}\n\n\n\nimpl Default for Dispatcher {\n\n fn default() -> Dispatcher {\n\n Dispatcher::new()\n\n }\n\n}\n\n\n\n#[cfg(target_arch = \"x86_64\")]\n\nimpl Dispatcher {\n\n pub fn new() -> Dispatcher {\n\n Dispatcher {\n\n table: FnvHashMap::default(),\n\n compiling: FnvHashSet::default(),\n\n }\n\n }\n\n\n\n fn get_rom_addr(&self, addr: u16, cpu: &CPU) -> RomAddress {\n\n unsafe { (*cpu.cart.get()).prg_rom_address(addr) }\n\n }\n", "file_path": "src/cpu/dispatcher.rs", "rank": 4, "score": 151272.96419123188 }, { "content": "fn prg_ram_addr(idx: u16) -> u16 {\n\n idx - 0x6000\n\n}\n\n\n", "file_path": "src/mappers/mmc1.rs", "rank": 5, "score": 149901.311253489 }, { "content": "fn standard_mapping_tables(mode: ScreenMode) -> &'static [u16; 4] {\n\n match mode {\n\n ScreenMode::Vertical => &VERTICAL,\n\n ScreenMode::Horizontal => &HORIZONTAL,\n\n ScreenMode::OneScreenHigh => &ONE_SCREEN_HIGH,\n\n ScreenMode::OneScreenLow => &ONE_SCREEN_LOW,\n\n ScreenMode::FourScreen => &FOUR_SCREEN,\n\n }\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq, Hash, Clone, Copy)]\n\npub struct RomAddress {\n\n pub window_id: usize,\n\n pub offset: u16,\n\n}\n\n\n", "file_path": "src/mappers/mod.rs", "rank": 6, "score": 139825.9292592497 }, { "content": "#[test]\n\nfn blargg_apu_test_len_table() {\n\n let mut hashes: HashMap<u32, &'static str> = HashMap::new();\n\n let commands: HashMap<u32, &'static str> = HashMap::new();\n\n\n\n hashes.insert(13, \"90a61bd003c5794713aa5f207b9b70c8862d892b\");\n\n\n\n run_system_test(\n\n 14,\n\n Path::new(\"nes-test-roms/apu_test/rom_singles/2-len_table.nes\"),\n\n hashes,\n\n commands,\n\n );\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 7, "score": 136165.2974599061 }, { "content": "#[cfg(test)]\n\npub fn create_test_mapper(prg_rom: Vec<u8>, chr_rom: Vec<u8>, mode: ScreenMode) -> Box<Mapper> {\n\n let path_buf = ::std::path::PathBuf::new();\n\n let path = path_buf.as_path();\n\n let mut params = MapperParams::simple(path, prg_rom, chr_rom);\n\n params.mirroring_mode = mode;\n\n Mapper::new(0, params)\n\n}\n", "file_path": "src/mappers/mod.rs", "rank": 8, "score": 135375.9961323205 }, { "content": "#[test]\n\nfn blargg_ppu_test_sprite_ram() {\n\n let mut hashes: HashMap<u32, &'static str> = HashMap::new();\n\n let commands: HashMap<u32, &'static str> = HashMap::new();\n\n\n\n hashes.insert(18, \"cb15f68f631c1d409beefb775bcff990286096fb\");\n\n\n\n run_system_test(\n\n 19,\n\n Path::new(\"nes-test-roms/blargg_ppu_tests_2005.09.15b/sprite_ram.nes\"),\n\n hashes,\n\n commands,\n\n );\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 9, "score": 135242.15939071853 }, { "content": "pub fn run_benchmark(\n\n bencher: &mut Bencher,\n\n file_name: &Path,\n\n commands: HashMap<u32, &'static str>,\n\n settings: Settings,\n\n) {\n\n\n\n let cart = ::cart::Cart::read(file_name).expect(\"Failed to read ROM File\");\n\n let mut builder = ::EmulatorBuilder::new(cart, settings);\n\n builder.io = Box::new(test_io::TestIO::new(commands));\n\n\n\n let mut emulator = builder.build();\n\n\n\n while !emulator.rendering_enabled() {\n\n assert!(!emulator.halted());\n\n emulator.run_frame();\n\n }\n\n\n\n bencher.iter(|| {\n\n assert!(!emulator.halted());\n\n emulator.run_frame();\n\n });\n\n}\n", "file_path": "src/tests/bench.rs", "rank": 10, "score": 129371.07453886792 }, { "content": "fn cyc_to_px(ppu_cyc: u64) -> usize {\n\n let mut pixel: usize = 0;\n\n let mut rem = ppu_cyc;\n\n\n\n rem += 241 * CYCLES_PER_SCANLINE; // Skip to the position at power-on.\n\n\n\n let (frames, rem_t) = div_rem(rem, CYCLES_PER_FRAME);\n\n rem = rem_t;\n\n pixel += frames as usize * SCREEN_BUFFER_SIZE;\n\n\n\n // Skip the pre-render scanline.\n\n rem = rem.saturating_sub(CYCLES_PER_SCANLINE);\n\n\n\n // Cut off the VBLANK scanlines.\n\n rem = cmp::min(rem, SCREEN_HEIGHT as u64 * CYCLES_PER_SCANLINE);\n\n\n\n let (scanlines, rem_t) = div_rem(rem, CYCLES_PER_SCANLINE);\n\n rem = rem_t;\n\n pixel += scanlines as usize * SCREEN_WIDTH;\n\n\n", "file_path": "src/ppu/mod.rs", "rank": 11, "score": 125688.21365465313 }, { "content": "fn parse(string: &str) -> u8 {\n\n string.char_indices().filter(|&(_, c)| c != '.').fold(\n\n 0u8,\n\n |acc,\n\n (idx,\n\n _)| {\n\n acc | 1u8 << (7 - idx)\n\n },\n\n )\n\n}\n\n\n\nimpl MemSegment for TestIO {\n\n fn read(&mut self, idx: u16) -> u8 {\n\n match idx {\n\n 0x4016 => OPEN_BUS | self.controller1.shift(),\n\n 0x4017 => OPEN_BUS | self.controller2.shift(),\n\n x => invalid_address!(x),\n\n }\n\n }\n\n\n", "file_path": "src/tests/test_io.rs", "rank": 12, "score": 125565.97510689212 }, { "content": "fn read_key(state: &KeyboardState, key: Scancode, val: u8) -> u8 {\n\n if state.is_scancode_pressed(key) {\n\n val\n\n } else {\n\n 0\n\n }\n\n}\n\n\n\nimpl IO for SdlIO {\n\n #[cfg_attr(rustfmt, rustfmt_skip)]\n\n fn poll(&mut self) {\n\n let pump_ref = self.event_pump.borrow();\n\n let state = KeyboardState::new(&*pump_ref);\n\n\n\n let c1 =\n\n read_key(&state, Scancode::Z, A) |\n\n read_key(&state, Scancode::X, B) |\n\n read_key(&state, Scancode::Return, START) |\n\n read_key(&state, Scancode::Backspace, SELECT) |\n\n read_key(&state, Scancode::Up, UP) |\n\n read_key(&state, Scancode::Down, DOWN) |\n\n read_key(&state, Scancode::Right, RIGHT) |\n\n read_key(&state, Scancode::Left, LEFT);\n\n self.controller1.load(c1);\n\n }\n\n}\n", "file_path": "src/io/sdl.rs", "rank": 13, "score": 124664.65968861576 }, { "content": "pub fn compile(\n\n addr: u16,\n\n cpu: &mut CPU,\n\n dispatcher: &mut Dispatcher,\n\n) -> FnvHashMap<u16, ExecutableBlock> {\n\n let analysis = Analyst::new(cpu).analyze(addr);\n\n Compiler::new(cpu, dispatcher, analysis).compile_block()\n\n}\n\n\n\n// rcx and sub-sections thereof are the general-purpose scratch register.\n\n// Sometimes r8 and rax are used as scratch registers as well\n\ndynasm!(this\n\n ; .alias cpu, rbx\n\n ; .alias ram, rdx\n\n ; .alias arg, r8b\n\n ; .alias arg_w, r8w\n\n ; .alias n_a, r9b\n\n ; .alias n_x, r10b\n\n ; .alias n_y, r11b\n\n ; .alias n_p, r12b\n", "file_path": "src/cpu/x86_64_compiler/mod.rs", "rank": 14, "score": 123300.84571858565 }, { "content": "fn parse_rom(input: &[u8]) -> IResult<&[u8], Rom> {\n\n do_parse!(input,\n\n tag!(b\"NES\\x1A\") >>\n\n prg_pages: be_u8 >>\n\n chr_pages: be_u8 >>\n\n flags_6: bits!(tuple!(\n\n take_bits!(u8, 4),\n\n map_opt!(take_bits!(u8, 4), Flags6::from_bits))) >>\n\n flags_7: bits!(tuple!(\n\n take_bits!(u8, 4),\n\n map_opt!(take_bits!(u8, 4), Flags7::from_bits))) >>\n\n call!(validate_not_nes2, flags_7.1) >>\n\n prg_ram_pages: be_u8 >>\n\n flags_9: map_opt!(be_u8, Flags9::from_bits) >>\n\n tag!([0u8; 6]) >>\n\n //Skip the trainer if there is one\n\n cond!(flags_6.1.contains(TRAINER), take!(TRAINER_LENGTH)) >>\n\n prg_rom: take!(prg_pages as usize * PRG_ROM_PAGE_SIZE) >>\n\n chr_rom: take!(chr_pages as usize * CHR_ROM_PAGE_SIZE) >>\n\n ( Rom {\n", "file_path": "src/cart/ines.rs", "rank": 15, "score": 121662.9576805167 }, { "content": "#[cfg(feature = \"debug_features\")]\n\nfn disasm_function(cpu: &mut CPU, addr: u16) {\n\n ::cpu::disasm::Disassembler::new(cpu).disasm_function(addr);\n\n}\n\n\n", "file_path": "src/cpu/dispatcher.rs", "rank": 16, "score": 120326.75046634645 }, { "content": "#[bench]\n\nfn bench_sprite(b: &mut Bencher) {\n\n run_benchmark(\n\n b,\n\n Path::new(\"nes-test-roms/other/SPRITE.NES\"),\n\n HashMap::new(),\n\n Default::default(),\n\n );\n\n}\n\n\n", "file_path": "src/tests/bench.rs", "rank": 17, "score": 118579.58397364483 }, { "content": "#[bench]\n\nfn bench_blocks(b: &mut Bencher) {\n\n run_benchmark(\n\n b,\n\n Path::new(\"nes-test-roms/other/BLOCKS.NES\"),\n\n HashMap::new(),\n\n Default::default(),\n\n );\n\n}\n\n\n", "file_path": "src/tests/bench.rs", "rank": 18, "score": 118579.58397364483 }, { "content": "fn validate_not_nes2(input: &[u8], flags_7: Flags7) -> IResult<&[u8], ()> {\n\n if (flags_7.bits() & 0b0000_1100) == 0b0000_1000 {\n\n IResult::Error(error_code!(ErrorKind::Custom(1)))\n\n } else {\n\n IResult::Done(input, ())\n\n }\n\n}\n\n\n", "file_path": "src/cart/ines.rs", "rank": 19, "score": 116704.41900023902 }, { "content": "fn run_system_test(\n\n frames: u32,\n\n file_name: &Path,\n\n hashes: HashMap<u32, &'static str>,\n\n commands: HashMap<u32, &'static str>,\n\n) {\n\n\n\n let cart = ::cart::Cart::read(file_name).expect(\"Failed to read ROM File\");\n\n let settings = Settings {\n\n jit: true,\n\n ..Default::default()\n\n };\n\n let mut builder = ::EmulatorBuilder::new(cart, settings);\n\n builder.io = Box::new(test_io::TestIO::new(commands));\n\n builder.screen = Box::new(hash_screen::HashVerifier::new(hashes));\n\n builder.screen = Box::new(hash_screen::HashPrinter::new(builder.screen));\n\n\n\n let mut emulator = builder.build();\n\n\n\n for _ in 0..frames {\n\n assert!(!emulator.halted());\n\n emulator.run_frame();\n\n }\n\n}\n", "file_path": "src/tests/mod.rs", "rank": 20, "score": 116499.31621243953 }, { "content": "fn to_page_num(addr: u16) -> usize {\n\n assert!(addr >= 0x8000);\n\n ((addr >> 12) & 0b0111) as usize\n\n}\n\n\n\nimpl MappingTable {\n\n /// Create a MappingTable from the given PRM ROM data and minimum window\n\n /// size (in units of BANK_SIZE bytes)\n\n pub fn new(rom: Vec<u8>, min_window_size: usize) -> MappingTable {\n\n assert!(min_window_size <= 8);\n\n let mut banks: Vec<RomBank> = vec![];\n\n let bank_count = rom.len() / BANK_SIZE;\n\n let mut remaining_rom = rom;\n\n for _ in 0..bank_count {\n\n let mut current_bank = remaining_rom;\n\n remaining_rom = current_bank.split_off(BANK_SIZE);\n\n banks.push(RomBank::new(current_bank));\n\n }\n\n\n\n MappingTable {\n", "file_path": "src/mappers/bank.rs", "rank": 21, "score": 115288.78373621272 }, { "content": "fn get_fine_scroll(size: u16, screen_dist: u16, sprite_dist: u16, flip: bool) -> u16 {\n\n let scroll = screen_dist - sprite_dist;\n\n if flip { (size - 1) - scroll } else { scroll }\n\n}\n\n\n\nimpl SpriteRenderer {\n\n pub fn render(\n\n &mut self,\n\n buffer: &mut [PaletteIndex; SCREEN_BUFFER_SIZE],\n\n reg: &mut PPUReg,\n\n start: usize,\n\n stop: usize,\n\n ) {\n\n self.draw(buffer, reg, start, stop)\n\n }\n\n\n\n pub fn sprite_eval(&mut self, scanline: u16, reg: &PPUReg, mem: &mut PPUMemory) {\n\n if scanline + 1 >= SCREEN_HEIGHT as u16 {\n\n return;\n\n }\n", "file_path": "src/ppu/sprite_rendering.rs", "rank": 22, "score": 113623.24316613888 }, { "content": "#[bench]\n\nfn bench_cpu_sprite_jit(b: &mut Bencher) {\n\n run_benchmark(\n\n b,\n\n Path::new(\"nes-test-roms/other/SPRITE.NES\"),\n\n HashMap::new(),\n\n cpu_benchmark_settings(true),\n\n );\n\n}\n\n\n", "file_path": "src/tests/bench.rs", "rank": 23, "score": 111550.64854400695 }, { "content": "#[bench]\n\nfn bench_cpu_blocks_jit(b: &mut Bencher) {\n\n run_benchmark(\n\n b,\n\n Path::new(\"nes-test-roms/other/BLOCKS.NES\"),\n\n HashMap::new(),\n\n cpu_benchmark_settings(true),\n\n );\n\n}\n\n\n", "file_path": "src/tests/bench.rs", "rank": 24, "score": 111550.64854400695 }, { "content": "#[bench]\n\nfn bench_cpu_blocks_no_jit(b: &mut Bencher) {\n\n run_benchmark(\n\n b,\n\n Path::new(\"nes-test-roms/other/BLOCKS.NES\"),\n\n HashMap::new(),\n\n cpu_benchmark_settings(false),\n\n );\n\n}\n\n\n", "file_path": "src/tests/bench.rs", "rank": 25, "score": 111550.64854400695 }, { "content": "#[bench]\n\nfn bench_cpu_sprite_no_jit(b: &mut Bencher) {\n\n run_benchmark(\n\n b,\n\n Path::new(\"nes-test-roms/other/SPRITE.NES\"),\n\n HashMap::new(),\n\n cpu_benchmark_settings(false),\n\n );\n\n}\n\n\n", "file_path": "src/tests/bench.rs", "rank": 26, "score": 111550.64854400695 }, { "content": "#[test]\n\nfn oam_read() {\n\n let mut hashes: HashMap<u32, &'static str> = HashMap::new();\n\n let commands: HashMap<u32, &'static str> = HashMap::new();\n\n\n\n hashes.insert(27, \"cc2447362cceb400803a18c2e4b5d5d4e4aa2ea7\");\n\n\n\n run_system_test(\n\n 28,\n\n Path::new(\"nes-test-roms/oam_read/oam_read.nes\"),\n\n hashes,\n\n commands,\n\n );\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 27, "score": 110357.82520644902 }, { "content": "#[test]\n\nfn blargg_apu_test_irq_flag() {\n\n let mut hashes: HashMap<u32, &'static str> = HashMap::new();\n\n let commands: HashMap<u32, &'static str> = HashMap::new();\n\n\n\n hashes.insert(18, \"09e4ad012c8fddfd8e3b4cc6d1b395c5062768c2\");\n\n\n\n run_system_test(\n\n 19,\n\n Path::new(\"nes-test-roms/apu_test/rom_singles/3-irq_flag.nes\"),\n\n hashes,\n\n commands,\n\n );\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 28, "score": 109159.81327701543 }, { "content": "#[test]\n\nfn blargg_apu_test_len_ctr() {\n\n let mut hashes: HashMap<u32, &'static str> = HashMap::new();\n\n let commands: HashMap<u32, &'static str> = HashMap::new();\n\n\n\n hashes.insert(18, \"ea9ac1696a5cec416f0a9f34c052815ca59850d5\");\n\n\n\n run_system_test(\n\n 19,\n\n Path::new(\"nes-test-roms/apu_test/rom_singles/1-len_ctr.nes\"),\n\n hashes,\n\n commands,\n\n );\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 29, "score": 109159.81327701543 }, { "content": "#[test]\n\nfn sprite_hit_alignment() {\n\n let mut hashes: HashMap<u32, &'static str> = HashMap::new();\n\n let commands: HashMap<u32, &'static str> = HashMap::new();\n\n\n\n hashes.insert(31, \"33815f5682dda683d1a9fe7495f6358c0e741a9d\");\n\n\n\n run_system_test(\n\n 32,\n\n Path::new(\"nes-test-roms/sprite_hit_tests_2005.10.05/02.alignment.nes\"),\n\n hashes,\n\n commands,\n\n );\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 30, "score": 106052.80841741877 }, { "content": "#[test]\n\nfn sprite_hit_basics() {\n\n let mut hashes: HashMap<u32, &'static str> = HashMap::new();\n\n let commands: HashMap<u32, &'static str> = HashMap::new();\n\n\n\n hashes.insert(33, \"1437c48bb22dd3be0d37449171d2120e13877326\");\n\n\n\n run_system_test(\n\n 33,\n\n Path::new(\"nes-test-roms/sprite_hit_tests_2005.10.05/01.basics.nes\"),\n\n hashes,\n\n commands,\n\n );\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 31, "score": 106052.80841741877 }, { "content": "#[test]\n\nfn sprite_hit_flip() {\n\n let mut hashes: HashMap<u32, &'static str> = HashMap::new();\n\n let commands: HashMap<u32, &'static str> = HashMap::new();\n\n\n\n hashes.insert(21, \"e16e43e5efdeacfd999a8ea031fa5058ec202f96\");\n\n\n\n run_system_test(\n\n 22,\n\n Path::new(\"nes-test-roms/sprite_hit_tests_2005.10.05/04.flip.nes\"),\n\n hashes,\n\n commands,\n\n );\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 32, "score": 106052.80841741877 }, { "content": "#[test]\n\nfn sprite_hit_corners() {\n\n let mut hashes: HashMap<u32, &'static str> = HashMap::new();\n\n let commands: HashMap<u32, &'static str> = HashMap::new();\n\n\n\n hashes.insert(21, \"760203cab0bc4df16bda48438f67a91e8a152fb9\");\n\n\n\n run_system_test(\n\n 22,\n\n Path::new(\"nes-test-roms/sprite_hit_tests_2005.10.05/03.corners.nes\"),\n\n hashes,\n\n commands,\n\n );\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 33, "score": 106052.80841741877 }, { "content": "#[test]\n\nfn verify_completes_nestest() {\n\n let mut hashes: HashMap<u32, &'static str> = HashMap::new();\n\n let mut commands: HashMap<u32, &'static str> = HashMap::new();\n\n\n\n // Run the main tests\n\n commands.insert(10, \"....T...|........\");\n\n hashes.insert(35, \"2bfe5ffe2fae65fa730c04735a3b25115c5fb65e\");\n\n\n\n // Switch to the unofficial opcode tests and run them\n\n commands.insert(40, \".....S..|........\");\n\n commands.insert(45, \"....T...|........\");\n\n hashes.insert(65, \"0b6895e6ff0e8be76e805a067be6ebec89e7d6ad\");\n\n\n\n run_system_test(\n\n 70,\n\n Path::new(\"nes-test-roms/other/nestest.nes\"),\n\n hashes,\n\n commands,\n\n );\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 34, "score": 106052.80841741877 }, { "content": "pub fn cpu_benchmark_settings(jit: bool) -> Settings {\n\n Settings {\n\n jit: jit,\n\n graphics_enabled: false,\n\n sound_enabled: false,\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "src/tests/bench.rs", "rank": 35, "score": 105581.90481210615 }, { "content": "fn ppu_to_cpu_cyc(ppu_cyc: u64) -> u64 {\n\n let (div, rem) = div_rem(ppu_cyc, 3);\n\n if rem == 0 {\n\n div\n\n } else {\n\n div + 1\n\n }\n\n}\n\n\n", "file_path": "src/ppu/mod.rs", "rank": 36, "score": 102567.88256699315 }, { "content": "fn start_emulator(cart: Cart, config: Config) {\n\n let sdl = corrosion::sdl2::init().unwrap();\n\n let event_pump = Rc::new(RefCell::new(sdl.event_pump().unwrap()));\n\n\n\n let mut builder =\n\n EmulatorBuilder::new_sdl(cart, make_emulator_settings(&config), &sdl, &event_pump);\n\n\n\n if let Some(file) = get_movie_file() {\n\n let fm2io = corrosion::io::fm2::FM2IO::read(file).unwrap();\n\n builder.io = Box::new(fm2io)\n\n }\n\n\n\n let mut emulator = builder.build();\n\n\n\n let mut stopwatch = Stopwatch::start_new();\n\n let smoothing = 0.9;\n\n let mut avg_frame_time = 0.0f64;\n\n let mousepick_enabled = config.get_bool(\"debug.mousepick\").unwrap_or(false);\n\n loop {\n\n if pump_events(&event_pump) || emulator.halted() {\n", "file_path": "app/src/main.rs", "rank": 37, "score": 101723.34980075159 }, { "content": "fn hash_screen(buf: &[Color; SCREEN_BUFFER_SIZE]) -> Digest {\n\n let newbuf: Vec<u8> = buf.iter().map(|col: &Color| col.bits()).collect();\n\n\n\n let mut s = Sha1::new();\n\n s.update(&newbuf);\n\n s.digest()\n\n}\n\n\n\n#[allow(dead_code)]\n\npub struct HashPrinter {\n\n frames: u32,\n\n\n\n delegate: Box<Screen>,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl HashPrinter {\n\n pub fn new(delegate: Box<Screen>) -> HashPrinter {\n\n HashPrinter {\n\n frames: 0,\n", "file_path": "src/tests/hash_screen.rs", "rank": 38, "score": 100694.52528804677 }, { "content": "fn cpu_to_ppu_cyc(cpu_cyc: u64) -> u64 {\n\n cpu_cyc * 3\n\n}\n\n\n", "file_path": "src/ppu/mod.rs", "rank": 39, "score": 96821.08680252555 }, { "content": "fn parse(string: &str) -> u8 {\n\n string.char_indices().filter(|&(_, c)| c != '.').fold(\n\n 0u8,\n\n |acc,\n\n (idx,\n\n _)| {\n\n acc | 1u8 << (7 - idx)\n\n },\n\n )\n\n}\n\n\n\nimpl MemSegment for FM2IO {\n\n fn read(&mut self, idx: u16) -> u8 {\n\n match idx {\n\n 0x4016 => OPEN_BUS | self.controller1.shift(),\n\n 0x4017 => OPEN_BUS | self.controller2.shift(),\n\n x => invalid_address!(x),\n\n }\n\n }\n\n\n", "file_path": "src/io/fm2.rs", "rank": 40, "score": 90662.88701115365 }, { "content": "#[derive(Debug)]\n\nstruct Interval {\n\n start: usize,\n\n end: usize,\n\n}\n\n\n\nimpl Interval {\n\n fn new(start: usize, end: usize) -> Interval {\n\n Interval {\n\n start: start,\n\n end: end,\n\n }\n\n }\n\n\n\n fn intersects_with(&self, other: &Interval) -> bool {\n\n self.start < other.end && self.end > other.start\n\n }\n\n\n\n fn intersection(&self, other: &Interval) -> Interval {\n\n Interval {\n\n start: cmp::max(self.start, other.start),\n", "file_path": "src/ppu/sprite_rendering.rs", "rank": 41, "score": 86101.65612090549 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nstruct SpriteDetails {\n\n idx: usize,\n\n x: u8,\n\n attr: OAMAttr,\n\n tile: TilePattern,\n\n}\n\nconst NO_SPRITE: SpriteDetails = SpriteDetails {\n\n idx: 0xFF,\n\n x: 0xFF,\n\n attr: OAMAttr { bits: 0 },\n\n tile: ::ppu::NO_TILE,\n\n};\n\nconst EMPTY_SECONDARY_OAM_LINE: [SpriteDetails; 8] = [NO_SPRITE; 8];\n\n\n\nimpl SpriteDetails {\n\n fn do_get_pixel(&self, x: u16) -> PaletteIndex {\n\n let fine_x = get_fine_scroll(8, x, self.x as u16, self.attr.contains(FLIP_HORZ));\n\n let attr = self.attr;\n\n let color_id = self.tile.get_color_in_pattern(fine_x as u32);\n\n PaletteIndex::from_unpacked(PaletteSet::Sprite, attr.palette(), color_id)\n", "file_path": "src/ppu/sprite_rendering.rs", "rank": 42, "score": 83182.44922194342 }, { "content": "#[derive(Debug, Copy, Clone, PartialEq)]\n\nstruct TileAttribute {\n\n bits: u8,\n\n}\n\n\n\nimpl TileAttribute {\n\n fn new(bits: u8) -> TileAttribute {\n\n TileAttribute { bits: bits }\n\n }\n\n\n\n fn get_palette(&self, x: u16, y: u16) -> u8 {\n\n let y = y % SCREEN_HEIGHT as u16;\n\n let mut at = self.bits;\n\n if y & 0x10 != 0 {\n\n at >>= 4\n\n }\n\n if x & 0x10 != 0 {\n\n at >>= 2\n\n }\n\n at & 0x03\n\n }\n", "file_path": "src/ppu/background_rendering.rs", "rank": 43, "score": 83182.44922194342 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nstruct OAMEntry {\n\n y: u16,\n\n tile: u8,\n\n attr: OAMAttr,\n\n x: u8,\n\n}\n\n\n\nimpl OAMEntry {\n\n fn is_on_scanline(&self, scanline: u16, sprite_height: u16) -> bool {\n\n self.y <= scanline && scanline < self.y + sprite_height\n\n }\n\n\n\n fn build_details(\n\n &self,\n\n idx: usize,\n\n sl: u16,\n\n reg: &PPUReg,\n\n mem: &mut PPUMemory,\n\n ) -> SpriteDetails {\n\n let tile_id = self.tile;\n", "file_path": "src/ppu/sprite_rendering.rs", "rank": 44, "score": 83182.44922194342 }, { "content": "pub trait Mapper {\n\n fn prg_rom_read(&mut self, idx: u16) -> &RomBank;\n\n fn prg_rom_write(&mut self, idx: u16, val: u8) -> &mut RomBank;\n\n\n\n /// Returns a struct which uniquely identifies the ROM cell backing the\n\n /// given address.\n\n fn prg_rom_address(&self, idx: u16) -> RomAddress;\n\n\n\n fn prg_ram_read(&mut self, idx: u16) -> u8;\n\n fn prg_ram_write(&mut self, idx: u16, val: u8);\n\n\n\n fn chr_read(&mut self, idx: u16) -> u8;\n\n fn chr_write(&mut self, idx: u16, val: u8);\n\n\n\n fn get_mirroring_table(&self) -> &[u16; 4];\n\n}\n\n\n\npub struct MapperParams<'a> {\n\n pub prg_rom: Vec<u8>,\n\n pub chr_rom: Vec<u8>,\n", "file_path": "src/mappers/mod.rs", "rank": 45, "score": 82592.18385628305 }, { "content": "pub trait Screen {\n\n fn draw(&mut self, buf: &[Color; SCREEN_BUFFER_SIZE]);\n\n}\n\n\n\npub struct DummyScreen;\n\n\n\nimpl Default for DummyScreen {\n\n fn default() -> DummyScreen {\n\n DummyScreen\n\n }\n\n}\n\n\n\nimpl Screen for DummyScreen {\n\n fn draw(&mut self, _: &[Color; SCREEN_BUFFER_SIZE]) {}\n\n}\n", "file_path": "src/screen/mod.rs", "rank": 46, "score": 82592.18385628305 }, { "content": "pub trait AudioOut {\n\n fn play(&mut self, buffer: &[Sample]);\n\n fn sample_rate(&self) -> f64;\n\n}\n\n\n\npub struct DummyAudioOut;\n\n\n\nimpl AudioOut for DummyAudioOut {\n\n fn play(&mut self, _: &[Sample]) {}\n\n fn sample_rate(&self) -> f64 {\n\n 44100.0\n\n }\n\n}\n", "file_path": "src/audio/mod.rs", "rank": 47, "score": 82592.18385628305 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nstruct AccumulatorAddressingMode;\n\nimpl AddressingMode for AccumulatorAddressingMode {\n\n fn read(self, cpu: &mut CPU) -> u8 {\n\n cpu.regs.a\n\n }\n\n fn write(self, cpu: &mut CPU, val: u8) {\n\n cpu.regs.a = val\n\n }\n\n fn tick_cycle(self, _: &mut CPU) {}\n\n fn untick_cycle(self, _: &mut CPU) {}\n\n}\n\n\n", "file_path": "src/cpu/mod.rs", "rank": 48, "score": 81775.08978684712 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nstruct MemoryAddressingMode {\n\n ptr_base: u16,\n\n ptr: u16,\n\n}\n\nimpl AddressingMode for MemoryAddressingMode {\n\n fn read(self, cpu: &mut CPU) -> u8 {\n\n cpu.read(self.ptr)\n\n }\n\n fn write(self, cpu: &mut CPU, val: u8) {\n\n cpu.write(self.ptr, val)\n\n }\n\n fn tick_cycle(self, cpu: &mut CPU) {\n\n cpu.inc_page_cycle(self.ptr_base, self.ptr)\n\n }\n\n fn untick_cycle(self, cpu: &mut CPU) {\n\n cpu.dec_page_cycle(self.ptr_base, self.ptr)\n\n }\n\n}\n\n\n\nbitflags! {\n", "file_path": "src/cpu/mod.rs", "rank": 49, "score": 81775.08978684712 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nstruct ImmediateAddressingMode;\n\nimpl AddressingMode for ImmediateAddressingMode {\n\n fn read(self, cpu: &mut CPU) -> u8 {\n\n cpu.load_incr_pc()\n\n }\n\n fn write(self, _: &mut CPU, val: u8) {\n\n panic!(\"Tried to write {:02X} to an immediate address.\", val)\n\n }\n\n fn tick_cycle(self, _: &mut CPU) {}\n\n fn untick_cycle(self, _: &mut CPU) {}\n\n}\n\n\n", "file_path": "src/cpu/mod.rs", "rank": 50, "score": 81775.08978684712 }, { "content": "fn div_rem(num: u64, den: u64) -> (u64, u64) {\n\n (num / den, num % den)\n\n}\n\n\n", "file_path": "src/ppu/mod.rs", "rank": 51, "score": 80661.93854061386 }, { "content": "struct Compiler<'a> {\n\n asm: ::dynasmrt::x64::Assembler,\n\n cpu: &'a mut CPU,\n\n dispatcher: &'a mut Dispatcher,\n\n analysis: BlockAnalysis,\n\n\n\n entry_point: u16,\n\n pc: u16,\n\n current_instruction: u16,\n\n current_instr_analysis: InstructionAnalysis,\n\n\n\n branch_targets: FnvHashMap<u16, DynamicLabel>,\n\n}\n\n\n\nimpl<'a> Compiler<'a> {\n\n fn new(\n\n cpu: &'a mut CPU,\n\n dispatcher: &'a mut Dispatcher,\n\n analysis: BlockAnalysis,\n\n ) -> Compiler<'a> {\n", "file_path": "src/cpu/x86_64_compiler/mod.rs", "rank": 52, "score": 79229.97201003722 }, { "content": "pub fn new(params: MapperParams) -> Box<Mapper> {\n\n let chr_ram = if params.chr_rom.is_empty() {\n\n vec![0u8; 0x2000].into_boxed_slice()\n\n } else {\n\n vec![0u8; 0].into_boxed_slice()\n\n };\n\n\n\n let prg_ram: Box<MemSegment> = if params.has_battery_backed_ram {\n\n Box::new(\n\n BatteryBackedRam::new(params.rom_path, params.prg_ram_size as u32).unwrap(),\n\n )\n\n } else {\n\n Box::new(VolatileRam::new(params.prg_ram_size as usize))\n\n };\n\n\n\n let mut mapper = MMC1 {\n\n regs: Regs {\n\n control: Ctrl {\n\n mode: PrgMode::FixLast,\n\n mirroring: super::standard_mapping_tables(ScreenMode::OneScreenLow),\n", "file_path": "src/mappers/mmc1.rs", "rank": 53, "score": 78681.81515667572 }, { "content": "pub fn new(params: MapperParams) -> Box<Mapper> {\n\n let chr_ram = if params.chr_rom.is_empty() {\n\n vec![0u8; 0x2000].into_boxed_slice()\n\n } else {\n\n vec![0u8; 0].into_boxed_slice()\n\n };\n\n\n\n let mut prg_rom_table = MappingTable::new(params.prg_rom, 8);\n\n let bank_count = prg_rom_table.bank_count();\n\n for page in 0..8 {\n\n prg_rom_table.map_page(page, page % bank_count);\n\n }\n\n\n\n Box::new(Mapper000 {\n\n prg_rom: prg_rom_table,\n\n chr_rom: params.chr_rom.into_boxed_slice(),\n\n chr_ram: chr_ram,\n\n prg_ram: vec![0u8; params.prg_ram_size].into_boxed_slice(),\n\n mode: super::standard_mapping_tables(params.mirroring_mode),\n\n })\n", "file_path": "src/mappers/mapper000.rs", "rank": 54, "score": 78681.81515667572 }, { "content": "fn draw_segment(\n\n pattern_line: &[TilePattern; TILES_PER_LINE],\n\n attr_line: &[u8; TILES_PER_LINE],\n\n pixel_line: &mut [PaletteIndex],\n\n fine_x_scroll: usize,\n\n start: usize,\n\n stop: usize,\n\n) {\n\n for (pixel, item) in pixel_line.iter_mut().enumerate().take(stop).skip(start) {\n\n let displayed_pixel = pixel + fine_x_scroll;\n\n render_single_pixel(pattern_line, attr_line, displayed_pixel, item);\n\n }\n\n}\n\n\n", "file_path": "src/ppu/background_rendering.rs", "rank": 55, "score": 78131.88426385428 }, { "content": "fn render_single_pixel(\n\n pattern_line: &[TilePattern],\n\n attr_line: &[u8],\n\n displayed_pixel: usize,\n\n item: &mut PaletteIndex,\n\n) {\n\n let tile_idx = displayed_pixel / 8;\n\n let pattern = pattern_line[tile_idx];\n\n let fine_x = displayed_pixel as u32 & 0x07;\n\n let color_id = pattern.get_color_in_pattern(fine_x);\n\n\n\n let palette_id = attr_line[tile_idx];\n\n\n\n *item = PaletteIndex::from_packed(color_id | palette_id);\n\n}\n\n\n\nimpl BackgroundRenderer {\n\n pub fn render(\n\n &mut self,\n\n buffer: &mut [PaletteIndex; SCREEN_BUFFER_SIZE],\n", "file_path": "src/ppu/background_rendering.rs", "rank": 56, "score": 75496.34905770577 }, { "content": "pub trait IO: MemSegment {\n\n fn poll(&mut self);\n\n}\n\n\n\npub enum DummyIO {\n\n Dummy,\n\n}\n\n\n\nimpl DummyIO {\n\n pub fn new() -> DummyIO {\n\n DummyIO::Dummy\n\n }\n\n}\n\n\n\nimpl MemSegment for DummyIO {\n\n fn read(&mut self, _: u16) -> u8 {\n\n 0\n\n }\n\n\n\n fn write(&mut self, _: u16, _: u8) {\n\n ()\n\n }\n\n}\n\n\n\nimpl IO for DummyIO {\n\n fn poll(&mut self) {\n\n ()\n\n }\n\n}\n", "file_path": "src/io/mod.rs", "rank": 57, "score": 75031.19074045238 }, { "content": "fn trampoline_to_nes(\n\n f: fn(*mut CPU, *mut [u8; 0x800]) -> (),\n\n cpu: *mut CPU,\n\n ram: *mut [u8; 0x800],\n\n) {\n\n unsafe {\n\n asm!(\n\n \"\n\n ${:comment} Recieve the function pointer, CPU and RAM\n\n mov rax, $0\n\n mov rbx, $1\n\n mov rdx, $2\n\n\n\n ${:comment} Load registers, etc. from struct\n\n xor r8, r8\n\n movzx r9, byte ptr [rbx+$3]\n\n movzx r10, byte ptr [rbx+$4]\n\n movzx r11, byte ptr [rbx+$5]\n\n movzx r12, byte ptr [rbx+$6]\n\n movzx r13, byte ptr [rbx+$7]\n", "file_path": "src/cpu/x86_64_compiler/mod.rs", "rank": 58, "score": 74153.31451011963 }, { "content": " pub fn prg_rom_write(&mut self, idx: u16, val: u8) -> &mut RomBank {\n\n self.mapper.prg_rom_write(idx, val)\n\n }\n\n pub fn prg_rom_address(&self, idx: u16) -> RomAddress {\n\n self.mapper.prg_rom_address(idx)\n\n }\n\n pub fn prg_ram_read(&mut self, idx: u16) -> u8 {\n\n self.mapper.prg_ram_read(idx)\n\n }\n\n pub fn prg_ram_write(&mut self, idx: u16, val: u8) {\n\n self.mapper.prg_ram_write(idx, val)\n\n }\n\n pub fn chr_read(&mut self, idx: u16) -> u8 {\n\n self.mapper.chr_read(idx)\n\n }\n\n pub fn chr_write(&mut self, idx: u16, val: u8) {\n\n self.mapper.chr_write(idx, val)\n\n }\n\n\n\n pub fn new(mapper: Box<Mapper>) -> Cart {\n", "file_path": "src/cart/mod.rs", "rank": 59, "score": 66037.51415786422 }, { "content": " Cart {\n\n mapper: mapper,\n\n system: System::NES,\n\n tv: TvFormat::NTSC,\n\n }\n\n }\n\n\n\n pub fn get_mirroring_table(&self) -> &[u16; 4] {\n\n self.mapper.get_mirroring_table()\n\n }\n\n\n\n pub fn read(path: &Path) -> Result<Cart, RomReadError> {\n\n let mut file = try!(File::open(path));\n\n let mut buf = vec![];\n\n try!(file.read_to_end(&mut buf));\n\n let rom = try!(Rom::parse(&buf));\n\n\n\n let mapper = rom.mapper();\n\n let screen_mode = rom.screen_mode();\n\n let system = rom.system();\n", "file_path": "src/cart/mod.rs", "rank": 60, "score": 66029.17649567519 }, { "content": " pub enum RomReadError {\n\n Io(err: io::Error) {\n\n display(\"IO Error: {}\", err)\n\n description(err.description())\n\n cause(err)\n\n from()\n\n }\n\n Parse(err: RomError) {\n\n display(\"ROM Error: {}\", err)\n\n description(err.description())\n\n cause(err)\n\n from()\n\n }\n\n }\n\n}\n\n\n\nimpl Cart {\n\n pub fn prg_rom_read(&mut self, idx: u16) -> &RomBank {\n\n self.mapper.prg_rom_read(idx)\n\n }\n", "file_path": "src/cart/mod.rs", "rank": 61, "score": 66026.99358396338 }, { "content": "pub mod ines;\n\n\n\n\n\nuse cart::ines::{Rom, RomError};\n\nuse mappers::{Mapper, MapperParams, RomAddress, RomBank};\n\nuse std::fs::File;\n\nuse std::io;\n\nuse std::io::prelude::*;\n\nuse std::path::Path;\n\n\n\n#[derive(PartialEq, Debug, Clone, Copy)]\n\npub enum ScreenMode {\n\n Horizontal,\n\n Vertical,\n\n FourScreen,\n\n OneScreenLow,\n\n OneScreenHigh,\n\n}\n\n\n\n#[derive(PartialEq, Debug, Clone, Copy)]\n", "file_path": "src/cart/mod.rs", "rank": 62, "score": 66023.10814396724 }, { "content": "pub enum System {\n\n NES,\n\n Vs,\n\n PC10,\n\n}\n\n\n\n#[derive(PartialEq, Debug, Clone, Copy)]\n\npub enum TvFormat {\n\n NTSC,\n\n PAL,\n\n}\n\n\n\npub struct Cart {\n\n mapper: Box<Mapper>,\n\n pub system: System,\n\n pub tv: TvFormat,\n\n}\n\n\n\nquick_error! {\n\n #[derive(Debug)]\n", "file_path": "src/cart/mod.rs", "rank": 63, "score": 66018.96068837564 }, { "content": " let tv = rom.tv_system();\n\n let sram = rom.sram();\n\n let (prg_rom, chr_rom, prg_ram_size) = (rom.prg_rom, rom.chr_rom, rom.prg_ram_size);\n\n\n\n let params = MapperParams {\n\n prg_rom: prg_rom,\n\n chr_rom: chr_rom,\n\n\n\n prg_ram_size: prg_ram_size,\n\n\n\n rom_path: path,\n\n\n\n has_battery_backed_ram: sram,\n\n mirroring_mode: screen_mode,\n\n };\n\n\n\n let mapper = Mapper::new(mapper as u16, params);\n\n Ok(Cart {\n\n mapper: mapper,\n\n system: system,\n\n tv: tv,\n\n })\n\n }\n\n}\n", "file_path": "src/cart/mod.rs", "rank": 64, "score": 66017.23072651478 }, { "content": "mod hash_screen;\n\nmod test_io;\n\nmod bench;\n\n\n\nuse Settings;\n\nuse std::collections::HashMap;\n\nuse std::path::Path;\n\n\n\n#[test]\n", "file_path": "src/tests/mod.rs", "rank": 65, "score": 65990.69550778296 }, { "content": "\n\n fn write(&mut self, idx: u16, val: u8) {\n\n match idx % 8 {\n\n 0x0004 => {\n\n self.sprite_data.write(self.reg.oamaddr as u16, val);\n\n self.reg.incr_oamaddr();\n\n }\n\n 0x0007 => {\n\n self.ppu_mem.write(self.reg.v, val);\n\n self.reg.incr_ppuaddr();\n\n }\n\n _ => self.reg.write(idx, val),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use cart::{Cart, ScreenMode};\n", "file_path": "src/ppu/mod.rs", "rank": 66, "score": 65143.08852141728 }, { "content": " }\n\n }\n\n\n\n #[cfg(feature = \"vectorize\")]\n\n fn colorize(&mut self, start: usize, stop: usize) {\n\n use std::mem;\n\n use std::cmp;\n\n use simd::u8x16;\n\n use simd::x86::ssse3::Ssse3U8x16;\n\n\n\n let (background_pal, sprite_pal) = self.ppu_mem.get_palettes();\n\n let index_bytes: &[u8; SCREEN_BUFFER_SIZE] =\n\n unsafe { mem::transmute(&self.palette_buffer) };\n\n let color_bytes: &mut [u8; SCREEN_BUFFER_SIZE] =\n\n unsafe { mem::transmute(&mut self.screen_buffer) };\n\n\n\n let mut start = start;\n\n\n\n while start < stop {\n\n start = cmp::min(start, SCREEN_BUFFER_SIZE - 16);\n", "file_path": "src/ppu/mod.rs", "rank": 67, "score": 65134.73223243545 }, { "content": " let palette_idx = u8x16::load(index_bytes, start);\n\n\n\n let table: u8x16 = palette_idx >> 4;\n\n let use_sprite_table = table.ne(u8x16::splat(0));\n\n let color_id = palette_idx & u8x16::splat(0b0000_1111);\n\n\n\n let background_shuf = background_pal.shuffle_bytes(color_id);\n\n let sprite_shuf = sprite_pal.shuffle_bytes(color_id);\n\n\n\n let final_color = use_sprite_table.select(sprite_shuf, background_shuf);\n\n final_color.store(&mut *color_bytes, start);\n\n start += 16;\n\n }\n\n }\n\n\n\n #[cfg(not(feature = \"vectorize\"))]\n\n fn colorize(&mut self, start: usize, stop: usize) {\n\n let color_slice = &mut self.screen_buffer[start..stop];\n\n let index_slice = &self.palette_buffer[start..stop];\n\n\n", "file_path": "src/ppu/mod.rs", "rank": 68, "score": 65134.475607598324 }, { "content": " addr |= (palette_id & 0x03) << 2;\n\n addr |= color_id & 0x03;\n\n PaletteIndex { addr: addr }\n\n }\n\n\n\n #[cfg(not(feature = \"vectorize\"))]\n\n fn to_index(self) -> usize {\n\n self.addr as usize\n\n }\n\n\n\n fn is_transparent(&self) -> bool {\n\n self.addr == 0\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub struct TilePattern {\n\n lo: u8,\n\n hi: u8,\n\n}\n", "file_path": "src/ppu/mod.rs", "rank": 69, "score": 65134.216388369154 }, { "content": " use mappers::create_test_mapper;\n\n use memory::MemSegment;\n\n use ppu::ppu_reg::PPUCtrl;\n\n use screen::DummyScreen;\n\n use std::cell::UnsafeCell;\n\n use std::rc::Rc;\n\n\n\n pub fn create_test_ppu() -> PPU {\n\n create_test_ppu_with_rom(vec![0u8; 0x1000])\n\n }\n\n\n\n pub fn create_test_ppu_with_rom(chr_rom: Vec<u8>) -> PPU {\n\n let mapper = create_test_mapper(vec![0u8; 0x1000], chr_rom, ScreenMode::FourScreen);\n\n let cart = Cart::new(mapper);\n\n let settings = Settings {\n\n graphics_enabled: true,\n\n ..Default::default()\n\n };\n\n PPU::new(\n\n Rc::new(settings),\n", "file_path": "src/ppu/mod.rs", "rank": 70, "score": 65132.45388821286 }, { "content": " addr: u8,\n\n}\n\n\n\nconst TRANSPARENT: PaletteIndex = PaletteIndex { addr: 0x00 };\n\n\n\nimpl PaletteIndex {\n\n pub fn from_packed(addr: u8) -> PaletteIndex {\n\n if addr & 0x03 == 0 {\n\n PaletteIndex { addr: 0 }\n\n } else {\n\n PaletteIndex { addr: addr }\n\n }\n\n }\n\n\n\n pub fn from_unpacked(set: PaletteSet, palette_id: u8, color_id: u8) -> PaletteIndex {\n\n if color_id == 0 {\n\n return PaletteIndex { addr: 0 };\n\n }\n\n let mut addr: u8 = 0x00;\n\n addr |= set.table();\n", "file_path": "src/ppu/mod.rs", "rank": 71, "score": 65131.53949329605 }, { "content": " Rc::new(UnsafeCell::new(cart)),\n\n Box::new(DummyScreen::default()),\n\n )\n\n }\n\n\n\n pub fn create_test_ppu_with_mirroring(mode: ScreenMode) -> PPU {\n\n let mapper = create_test_mapper(vec![0u8; 0x1000], vec![0u8; 0x1000], mode);\n\n let cart = Cart::new(mapper);\n\n let settings = Settings {\n\n graphics_enabled: true,\n\n ..Default::default()\n\n };\n\n PPU::new(\n\n Rc::new(settings),\n\n Rc::new(UnsafeCell::new(cart)),\n\n Box::new(DummyScreen::default()),\n\n )\n\n }\n\n\n\n #[test]\n", "file_path": "src/ppu/mod.rs", "rank": 72, "score": 65130.84445244395 }, { "content": "\n\npub const NO_TILE: TilePattern = TilePattern { lo: 0, hi: 0 };\n\n\n\nimpl Default for TilePattern {\n\n fn default() -> TilePattern {\n\n NO_TILE\n\n }\n\n}\n\n\n\nimpl TilePattern {\n\n fn get_color_in_pattern(&self, fine_x: u32) -> u8 {\n\n let lo = self.lo;\n\n let hi = self.hi;\n\n let shift = 0x07 - fine_x;\n\n let color_id_lo = lo.wrapping_shr(shift) & 0x01;\n\n let color_id_hi = (hi.wrapping_shr(shift) & 0x01) << 1;\n\n color_id_lo | color_id_hi\n\n }\n\n}\n\n\n", "file_path": "src/ppu/mod.rs", "rank": 73, "score": 65130.635872224455 }, { "content": "\n\npub const SCREEN_WIDTH: usize = 256;\n\npub const SCREEN_HEIGHT: usize = 240;\n\npub const SCREEN_BUFFER_SIZE: usize = SCREEN_WIDTH * SCREEN_HEIGHT;\n\n\n\nconst CYCLES_PER_SCANLINE: u64 = 341;\n\nconst SCANLINES_PER_FRAME: u64 = 262;\n\nconst CYCLES_PER_FRAME: u64 = CYCLES_PER_SCANLINE * SCANLINES_PER_FRAME;\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\n#[repr(C)]\n\npub struct Color(u8);\n\nimpl Color {\n\n fn from_bits_truncate(val: u8) -> Color {\n\n Color(val & 0b0011_1111)\n\n }\n\n\n\n pub fn bits(&self) -> u8 {\n\n self.0\n\n }\n", "file_path": "src/ppu/mod.rs", "rank": 74, "score": 65129.99683684146 }, { "content": " pub fn frame(&self) -> u32 {\n\n self.frame\n\n }\n\n\n\n #[cfg(feature = \"debug_features\")]\n\n pub fn mouse_pick(&self, px_x: i32, px_y: i32) {\n\n self.background_data.mouse_pick(&self.reg, px_x, px_y);\n\n self.sprite_data.mouse_pick(px_x, px_y);\n\n }\n\n\n\n pub fn rendering_enabled(&self) -> bool {\n\n self.reg.ppumask.rendering_enabled()\n\n }\n\n}\n\n\n\nimpl MemSegment for PPU {\n\n fn read(&mut self, idx: u16) -> u8 {\n\n match idx % 8 {\n\n 0x0004 => self.sprite_data.read(self.reg.oamaddr as u16),\n\n 0x0007 => {\n", "file_path": "src/ppu/mod.rs", "rank": 75, "score": 65129.46064595364 }, { "content": "use Settings;\n\nuse cart::Cart;\n\nuse memory::MemSegment;\n\nuse screen::Screen;\n\nuse std::cell::UnsafeCell;\n\nuse std::cmp;\n\nuse std::default::Default;\n\nuse std::rc::Rc;\n\n\n\nmod ppu_reg;\n\nuse ppu::ppu_reg::*;\n\n\n\nmod ppu_memory;\n\nuse ppu::ppu_memory::*;\n\n\n\nmod sprite_rendering;\n\nuse ppu::sprite_rendering::*;\n\n\n\nmod background_rendering;\n\nuse ppu::background_rendering::*;\n", "file_path": "src/ppu/mod.rs", "rank": 76, "score": 65128.87162512211 }, { "content": " for (src, dest) in index_slice.iter().zip(color_slice.iter_mut()) {\n\n *dest = self.ppu_mem.read_palette(*src);\n\n }\n\n }\n\n\n\n #[cfg(feature = \"debug_features\")]\n\n pub fn cycle(&self) -> u16 {\n\n self.cyc\n\n }\n\n\n\n #[cfg(feature = \"debug_features\")]\n\n pub fn scanline(&self) -> i16 {\n\n self.sl\n\n }\n\n\n\n #[cfg(feature = \"debug_features\")]\n\n pub fn vram_addr(&self) -> u16 {\n\n self.reg.v\n\n }\n\n\n", "file_path": "src/ppu/mod.rs", "rank": 77, "score": 65128.83726776704 }, { "content": "}\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub enum PaletteSet {\n\n Background,\n\n Sprite,\n\n}\n\n\n\nimpl PaletteSet {\n\n fn table(&self) -> u8 {\n\n match *self {\n\n PaletteSet::Background => 0x00,\n\n PaletteSet::Sprite => 0x10,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\n#[repr(C)]\n\npub struct PaletteIndex {\n", "file_path": "src/ppu/mod.rs", "rank": 78, "score": 65128.15993109605 }, { "content": " rem = rem.saturating_sub(1); // Skip idle cycle\n\n rem = cmp::min(rem, SCREEN_WIDTH as u64); // Cut off HBLANK\n\n\n\n pixel += rem as usize;\n\n pixel\n\n}\n\n\n\nimpl PPU {\n\n pub fn new(settings: Rc<Settings>, cart: Rc<UnsafeCell<Cart>>, screen: Box<Screen>) -> PPU {\n\n PPU {\n\n settings: settings,\n\n\n\n reg: Default::default(),\n\n ppudata_read_buffer: 0,\n\n ppu_mem: PPUMemory::new(cart),\n\n\n\n palette_buffer: Box::new([TRANSPARENT; SCREEN_BUFFER_SIZE]),\n\n screen_buffer: Box::new([Color::from_bits_truncate(0x00); SCREEN_BUFFER_SIZE]),\n\n screen: screen,\n\n\n", "file_path": "src/ppu/mod.rs", "rank": 79, "score": 65127.84685337779 }, { "content": " let mut chr_rom = vec![0u8; 0x2000];\n\n chr_rom[0x0ABC] = 12;\n\n chr_rom[0x0DBA] = 212;\n\n let mut ppu = create_test_ppu_with_rom(chr_rom);\n\n\n\n ppu.reg.v = 0x0ABC;\n\n ppu.read(0x2007); // Dummy Read\n\n assert_eq!(ppu.read(0x2007), 12);\n\n\n\n ppu.reg.v = 0x0DBA;\n\n ppu.read(0x2007); // Dummy Read\n\n assert_eq!(ppu.read(0x2007), 212);\n\n }\n\n\n\n #[test]\n\n fn ppu_can_read_write_vram() {\n\n let mut ppu = create_test_ppu();\n\n\n\n ppu.reg.v = 0x2ABC;\n\n ppu.write(0x2007, 12);\n", "file_path": "src/ppu/mod.rs", "rank": 80, "score": 65124.747970058044 }, { "content": "pub struct PPU {\n\n settings: Rc<Settings>,\n\n\n\n reg: PPUReg,\n\n ppudata_read_buffer: u8,\n\n ppu_mem: PPUMemory,\n\n\n\n screen: Box<Screen>,\n\n palette_buffer: Box<[PaletteIndex; SCREEN_BUFFER_SIZE]>,\n\n screen_buffer: Box<[Color; SCREEN_BUFFER_SIZE]>,\n\n\n\n sprite_data: SpriteRenderer,\n\n background_data: BackgroundRenderer,\n\n\n\n global_cyc: u64,\n\n cyc: u16,\n\n sl: i16,\n\n frame: u32,\n\n\n\n next_vblank_ppu_cyc: u64,\n\n next_vblank_cpu_cyc: u64,\n\n}\n\n\n\n#[derive(Copy, Debug, PartialEq, Clone)]\n\npub enum StepResult {\n\n NMI,\n\n Continue,\n\n}\n\n\n", "file_path": "src/ppu/mod.rs", "rank": 81, "score": 65124.42844190489 }, { "content": " if self.reg.ppumask.contains(S_SPR) {\n\n self.sprite_data\n\n .render(&mut self.palette_buffer, &mut self.reg, start_px, stop_px);\n\n }\n\n\n\n self.colorize(start_px, stop_px);\n\n }\n\n\n\n if hit_nmi {\n\n StepResult::NMI\n\n } else {\n\n StepResult::Continue\n\n }\n\n }\n\n\n\n /// Returns the CPU cycle number representing the next time the CPU should\n\n /// run the PPU. When the CPU cycle reaches this number, the CPU must run\n\n /// the PPU.\n\n pub fn requested_run_cycle(&self) -> u64 {\n\n self.next_vblank_cpu_cyc\n", "file_path": "src/ppu/mod.rs", "rank": 82, "score": 65121.42629959519 }, { "content": " fn reading_oamdata_doesnt_increment_oamaddr() {\n\n let mut ppu = create_test_ppu();\n\n ppu.reg.oamaddr = 0;\n\n ppu.read(0x2004);\n\n assert_eq!(ppu.reg.oamaddr, 0);\n\n }\n\n\n\n #[test]\n\n fn writing_oamdata_increments_oamaddr() {\n\n let mut ppu = create_test_ppu();\n\n ppu.reg.oamaddr = 0;\n\n ppu.write(0x2004, 12);\n\n assert_eq!(ppu.reg.oamaddr, 1);\n\n ppu.reg.oamaddr = 255;\n\n ppu.write(0x2004, 12);\n\n assert_eq!(ppu.reg.oamaddr, 0);\n\n }\n\n\n\n #[test]\n\n fn ppu_can_read_chr_rom() {\n", "file_path": "src/ppu/mod.rs", "rank": 83, "score": 65121.36965503801 }, { "content": " let mut ppu = create_test_ppu();\n\n ppu.reg.v = 0x3F16;\n\n ppu.write(0x2007, 21);\n\n ppu.reg.v = 0x3F16;\n\n assert_eq!(ppu.read(0x2007), 21);\n\n }\n\n\n\n #[test]\n\n fn accessing_ppudata_increments_ppuaddr() {\n\n let mut ppu = create_test_ppu();\n\n ppu.reg.v = 0x2000;\n\n ppu.read(0x2007);\n\n assert_eq!(ppu.reg.v, 0x2001);\n\n ppu.write(0x2007, 0);\n\n assert_eq!(ppu.reg.v, 0x2002);\n\n }\n\n\n\n #[test]\n\n fn accessing_ppudata_increments_ppuaddr_by_32_when_ctrl_flag_is_set() {\n\n let mut ppu = create_test_ppu();\n\n ppu.reg.ppuctrl = PPUCtrl::new(0b0000_0100);\n\n ppu.reg.v = 0x2000;\n\n ppu.read(0x2007);\n\n assert_eq!(ppu.reg.v, 0x2020);\n\n ppu.write(0x2007, 0);\n\n assert_eq!(ppu.reg.v, 0x2040);\n\n }\n\n}\n", "file_path": "src/ppu/mod.rs", "rank": 84, "score": 65121.30783768476 }, { "content": " ppu.reg.v = 0x2ABC;\n\n ppu.read(0x2007); // Dummy read\n\n assert_eq!(ppu.read(0x2007), 12);\n\n\n\n ppu.reg.v = 0x2DBA;\n\n ppu.write(0x2007, 212);\n\n ppu.reg.v = 0x2DBA;\n\n ppu.read(0x2007); // Dummy Read\n\n assert_eq!(ppu.read(0x2007), 212);\n\n\n\n // Mirroring\n\n ppu.reg.v = 0x2EFC;\n\n ppu.write(0x2007, 128);\n\n ppu.reg.v = 0x3EFC;\n\n ppu.read(0x2007); // Dummy Read\n\n assert_eq!(ppu.read(0x2007), 128);\n\n }\n\n\n\n #[test]\n\n fn ppu_needs_no_dummy_read_for_palette_data() {\n", "file_path": "src/ppu/mod.rs", "rank": 85, "score": 65120.54622524237 }, { "content": " }\n\n }\n\n match (self.cyc, self.sl) {\n\n (_, -1) => self.prerender_scanline(),\n\n\n\n // Visible scanlines\n\n (0, 0...239) => {\n\n if self.settings.graphics_enabled {\n\n self.sprite_data\n\n .sprite_eval(self.sl as u16, &self.reg, &mut self.ppu_mem)\n\n }\n\n }\n\n (_, 0...239) => (),\n\n\n\n (_, 240) => (), //Post-render idle scanline\n\n (1, 241) => self.start_vblank(hit_nmi),\n\n (_, 241...260) => (), //VBlank lines\n\n _ => (),\n\n }\n\n }\n", "file_path": "src/ppu/mod.rs", "rank": 86, "score": 65120.13829493372 }, { "content": " let addr = self.reg.v;\n\n match addr {\n\n 0x0000...0x3EFF => {\n\n let old_buffer = self.ppudata_read_buffer;\n\n self.ppudata_read_buffer = self.ppu_mem.read(addr);\n\n self.reg.incr_ppuaddr();\n\n old_buffer\n\n }\n\n 0x3F00...0x3FFF => {\n\n let read_result = self.ppu_mem.read(addr);\n\n self.reg.incr_ppuaddr();\n\n self.ppudata_read_buffer = self.ppu_mem.read_bypass_palette(addr);\n\n read_result\n\n }\n\n x => invalid_address!(x),\n\n }\n\n }\n\n _ => self.reg.read(idx),\n\n }\n\n }\n", "file_path": "src/ppu/mod.rs", "rank": 87, "score": 65119.743351017154 }, { "content": " sprite_data: Default::default(),\n\n background_data: Default::default(),\n\n\n\n global_cyc: 0,\n\n cyc: 0,\n\n sl: 241,\n\n frame: 0,\n\n\n\n next_vblank_ppu_cyc: 1,\n\n next_vblank_cpu_cyc: ppu_to_cpu_cyc(1),\n\n }\n\n }\n\n\n\n pub fn run_to(&mut self, cpu_cycle: u64) -> StepResult {\n\n let start = self.global_cyc;\n\n let stop = cpu_to_ppu_cyc(cpu_cycle);\n\n\n\n let start_px = cyc_to_px(start);\n\n let delta_px = cyc_to_px(stop) - start_px;\n\n let start_px = start_px % SCREEN_BUFFER_SIZE;\n", "file_path": "src/ppu/mod.rs", "rank": 88, "score": 65116.77900491698 }, { "content": "\n\n fn prerender_scanline(&mut self) {\n\n if self.cyc == 1 {\n\n self.reg.ppustat.remove(VBLANK | SPRITE_0 | SPRITE_OVERFLOW);\n\n }\n\n if self.cyc == 339 && self.frame % 2 == 1 {\n\n self.tick_cycle()\n\n }\n\n }\n\n\n\n fn start_vblank(&mut self, hit_nmi: &mut bool) {\n\n self.next_vblank_ppu_cyc += CYCLES_PER_FRAME;\n\n self.next_vblank_cpu_cyc = ppu_to_cpu_cyc(self.next_vblank_ppu_cyc);\n\n\n\n let buf = &self.screen_buffer;\n\n self.screen.draw(buf);\n\n\n\n if self.frame > 0 {\n\n self.reg.ppustat.insert(VBLANK);\n\n *hit_nmi |= self.reg.ppuctrl.generate_vblank_nmi();\n", "file_path": "src/ppu/mod.rs", "rank": 89, "score": 65116.29607524837 }, { "content": " let stop_px = start_px + delta_px;\n\n\n\n let rendering_enabled = self.reg.ppumask.rendering_enabled();\n\n\n\n let mut hit_nmi = false;\n\n while self.global_cyc < stop {\n\n self.tick_cycle();\n\n self.run_cycle(rendering_enabled, &mut hit_nmi);\n\n }\n\n\n\n if self.settings.graphics_enabled {\n\n if self.reg.ppumask.contains(S_BCK) {\n\n self.background_data\n\n .render(&mut self.palette_buffer, start_px, stop_px, &self.reg);\n\n } else {\n\n let slice = &mut self.palette_buffer[start_px..stop_px];\n\n for dest in slice.iter_mut() {\n\n *dest = TRANSPARENT;\n\n }\n\n }\n", "file_path": "src/ppu/mod.rs", "rank": 90, "score": 65116.23735319523 }, { "content": " }\n\n\n\n fn tick_cycle(&mut self) {\n\n self.global_cyc += 1;\n\n self.cyc += 1;\n\n if self.cyc == 341 {\n\n self.cyc = 0;\n\n self.sl += 1;\n\n if self.sl == 261 {\n\n self.sl = -1;\n\n self.frame += 1;\n\n }\n\n }\n\n }\n\n\n\n fn run_cycle(&mut self, rendering_enabled: bool, hit_nmi: &mut bool) {\n\n if let -1...239 = self.sl {\n\n if rendering_enabled && self.settings.graphics_enabled {\n\n self.background_data\n\n .run_cycle(self.cyc, self.sl, &mut self.reg, &mut self.ppu_mem);\n", "file_path": "src/ppu/mod.rs", "rank": 91, "score": 65116.05636309128 }, { "content": "#[cfg(not(feature = \"debug_features\"))]\n\nfn mouse_pick(_: &Rc<RefCell<EventPump>>, _: &Emulator) {}\n\n\n", "file_path": "app/src/main.rs", "rank": 92, "score": 58944.43604335991 }, { "content": "#[cfg(target_arch = \"x86_64\")]\n\nstruct Block {\n\n code: ExecutableBlock,\n\n locked: bool,\n\n}\n\n\n", "file_path": "src/cpu/dispatcher.rs", "rank": 93, "score": 57725.83393029243 }, { "content": "/// Represents the frequency-sweep units used by the two square channels.\n\nstruct Sweep {\n\n enable: bool,\n\n period: u8,\n\n negate: bool,\n\n shift: u8,\n\n\n\n is_square2: bool,\n\n divider: u8,\n\n reload: bool,\n\n}\n\n\n\nimpl Sweep {\n\n fn new(is_square2: bool) -> Sweep {\n\n Sweep {\n\n enable: false,\n\n period: 0,\n\n negate: false,\n\n shift: 0,\n\n\n\n is_square2: is_square2,\n", "file_path": "src/apu/square.rs", "rank": 94, "score": 57725.42270708186 }, { "content": "struct MMC1 {\n\n regs: Regs,\n\n\n\n accumulator: u8,\n\n write_counter: u8,\n\n\n\n prg_rom: MappingTable,\n\n chr_ram: Box<[u8]>,\n\n prg_ram: Box<MemSegment>,\n\n}\n\n\n\nimpl MMC1 {\n\n fn update_mapping(&mut self) {\n\n match self.regs.control.mode {\n\n PrgMode::Switch32Kb => self.prg_rom\n\n .map_pages_linear(0..8, (self.regs.prg_bank & 0b0000_1110) * 8),\n\n PrgMode::FixFirst => {\n\n self.prg_rom.map_pages_linear(0..4, 0);\n\n self.prg_rom\n\n .map_pages_linear(4..8, (self.regs.prg_bank & 0b0000_1111) * 4);\n", "file_path": "src/mappers/mmc1.rs", "rank": 95, "score": 57721.53728702296 }, { "content": "struct BufferOut {\n\n samples: [Sample; BUFFER_SIZE],\n\n input_counter: usize,\n\n playback_counter: usize,\n\n input_samples: usize,\n\n too_slow: bool,\n\n condvar: Arc<Condvar>,\n\n}\n\n\n\nimpl AudioCallback for BufferOut {\n\n type Channel = Sample;\n\n\n\n fn callback(&mut self, out: &mut [Sample]) {\n\n {\n\n let out_iter = out.iter_mut();\n\n let in_iter = self.samples\n\n .iter()\n\n .cycle()\n\n .skip(self.playback_counter)\n\n .take(self.input_samples);\n", "file_path": "src/audio/sdl.rs", "rank": 96, "score": 57721.53728702296 }, { "content": "#[derive(Debug, Clone, PartialEq)]\n\nstruct Regs {\n\n control: Ctrl,\n\n\n\n chr_0: u8,\n\n chr_1: u8,\n\n prg_bank: usize,\n\n}\n\n\n", "file_path": "src/mappers/mmc1.rs", "rank": 97, "score": 57721.53728702296 }, { "content": "#[derive(Debug, Clone, PartialEq)]\n\nstruct Ctrl {\n\n mode: PrgMode,\n\n mirroring: &'static [u16; 4], // TODO: Add chr mode\n\n}\n\n\n", "file_path": "src/mappers/mmc1.rs", "rank": 98, "score": 57721.53728702296 }, { "content": "struct Mapper000 {\n\n prg_rom: MappingTable,\n\n chr_rom: Box<[u8]>,\n\n chr_ram: Box<[u8]>,\n\n prg_ram: Box<[u8]>,\n\n\n\n mode: &'static [u16; 4],\n\n}\n\n\n", "file_path": "src/mappers/mapper000.rs", "rank": 99, "score": 57721.53728702296 } ]
Rust
lib/src/api/tests.rs
untoldwind/t-rust-less
e4ecd17b624e303626b55350b06319e1632a5743
use crate::{ api::{ Identity, PasswordStrength, Secret, SecretAttachment, SecretEntry, SecretEntryMatch, SecretList, SecretListFilter, SecretProperties, SecretType, SecretVersion, SecretVersionRef, Status, ZeroizeDateTime, }, memguard::SecretBytes, }; use chrono::{TimeZone, Utc}; use quickcheck::{quickcheck, Arbitrary, Gen}; use std::collections::{BTreeMap, HashMap}; use super::{Command, PasswordGeneratorCharsParam, PasswordGeneratorParam, PasswordGeneratorWordsParam, StoreConfig}; use crate::memguard::ZeroizeBytesBuffer; impl Arbitrary for Identity { fn arbitrary(g: &mut Gen) -> Self { Identity { id: String::arbitrary(g), name: String::arbitrary(g), email: String::arbitrary(g), hidden: bool::arbitrary(g), } } } impl Arbitrary for ZeroizeDateTime { fn arbitrary(g: &mut Gen) -> Self { ZeroizeDateTime::from(Utc.timestamp_millis(u32::arbitrary(g) as i64)) } } impl Arbitrary for Status { fn arbitrary(g: &mut Gen) -> Self { Status { locked: bool::arbitrary(g), unlocked_by: Option::arbitrary(g), autolock_at: Option::arbitrary(g), version: String::arbitrary(g), autolock_timeout: u64::arbitrary(g), } } } impl Arbitrary for SecretType { fn arbitrary(g: &mut Gen) -> Self { match g.choose(&[0, 1, 2, 3, 4, 5]).unwrap() { 0 => SecretType::Login, 1 => SecretType::Note, 2 => SecretType::Licence, 3 => SecretType::Wlan, 4 => SecretType::Password, _ => SecretType::Other, } } } impl Arbitrary for SecretListFilter { fn arbitrary(g: &mut Gen) -> Self { SecretListFilter { url: Option::arbitrary(g), tag: Option::arbitrary(g), secret_type: Option::arbitrary(g), name: Option::arbitrary(g), deleted: bool::arbitrary(g), } } } impl Arbitrary for SecretEntry { fn arbitrary(g: &mut Gen) -> Self { SecretEntry { id: String::arbitrary(g), name: String::arbitrary(g), secret_type: SecretType::arbitrary(g), tags: Vec::arbitrary(g), urls: Vec::arbitrary(g), timestamp: ZeroizeDateTime::arbitrary(g), deleted: bool::arbitrary(g), } } } impl Arbitrary for SecretEntryMatch { fn arbitrary(g: &mut Gen) -> Self { SecretEntryMatch { entry: SecretEntry::arbitrary(g), name_score: isize::arbitrary(g), name_highlights: Vec::arbitrary(g), url_highlights: Vec::arbitrary(g), tags_highlights: Vec::arbitrary(g), } } } impl Arbitrary for SecretList { fn arbitrary(g: &mut Gen) -> Self { SecretList { all_tags: Vec::arbitrary(g), entries: vec![SecretEntryMatch::arbitrary(g)], } } } impl Arbitrary for SecretAttachment { fn arbitrary(g: &mut Gen) -> Self { SecretAttachment { name: String::arbitrary(g), mime_type: String::arbitrary(g), content: Vec::arbitrary(g), } } } impl Arbitrary for SecretProperties { fn arbitrary(g: &mut Gen) -> Self { let keys = Vec::<String>::arbitrary(g); let mut properties = BTreeMap::new(); for key in keys { properties.insert(key, String::arbitrary(g)); } SecretProperties::new(properties) } } impl Arbitrary for SecretVersion { fn arbitrary(g: &mut Gen) -> Self { SecretVersion { secret_id: String::arbitrary(g), secret_type: SecretType::arbitrary(g), timestamp: ZeroizeDateTime::arbitrary(g), name: String::arbitrary(g), tags: Vec::arbitrary(g), urls: Vec::arbitrary(g), properties: SecretProperties::arbitrary(g), attachments: Vec::arbitrary(g), deleted: bool::arbitrary(g), recipients: Vec::arbitrary(g), } } } impl Arbitrary for SecretVersionRef { fn arbitrary(g: &mut Gen) -> Self { SecretVersionRef { block_id: String::arbitrary(g), timestamp: ZeroizeDateTime::arbitrary(g), } } } impl Arbitrary for PasswordStrength { fn arbitrary(g: &mut Gen) -> Self { let entropy = f64::arbitrary(g); let crack_time = f64::arbitrary(g); PasswordStrength { entropy: if entropy.is_finite() { entropy } else { 0.0 }, crack_time: if crack_time.is_finite() { crack_time } else { 0.0 }, crack_time_display: String::arbitrary(g), score: u8::arbitrary(g), } } } impl Arbitrary for Secret { fn arbitrary(g: &mut Gen) -> Self { Secret { id: String::arbitrary(g), secret_type: SecretType::arbitrary(g), current: SecretVersion::arbitrary(g), current_block_id: String::arbitrary(g), versions: Vec::arbitrary(g), password_strengths: HashMap::arbitrary(g), } } } impl Arbitrary for StoreConfig { fn arbitrary(g: &mut Gen) -> Self { StoreConfig { name: String::arbitrary(g), store_url: String::arbitrary(g), remote_url: Option::arbitrary(g), sync_interval_sec: u32::arbitrary(g), client_id: String::arbitrary(g), autolock_timeout_secs: u64::arbitrary(g), default_identity_id: Option::arbitrary(g), } } } impl Arbitrary for PasswordGeneratorParam { fn arbitrary(g: &mut Gen) -> Self { match g.choose(&[0, 1]).unwrap() { 0 => PasswordGeneratorParam::Chars(PasswordGeneratorCharsParam { num_chars: u8::arbitrary(g), include_uppers: bool::arbitrary(g), include_numbers: bool::arbitrary(g), include_symbols: bool::arbitrary(g), require_upper: bool::arbitrary(g), require_number: bool::arbitrary(g), require_symbol: bool::arbitrary(g), exclude_similar: bool::arbitrary(g), exclude_ambiguous: bool::arbitrary(g), }), _ => PasswordGeneratorParam::Words(PasswordGeneratorWordsParam { num_words: u8::arbitrary(g), delim: char::arbitrary(g), }), } } } impl Arbitrary for SecretBytes { fn arbitrary(g: &mut Gen) -> Self { SecretBytes::from(Vec::arbitrary(g)) } } impl Arbitrary for Command { fn arbitrary(g: &mut Gen) -> Self { match g .choose(&[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, ]) .unwrap() { 0 => Command::ListStores, 1 => Command::UpsertStoreConfig(StoreConfig::arbitrary(g)), 2 => Command::DeleteStoreConfig(String::arbitrary(g)), 3 => Command::GetDefaultStore, 4 => Command::SetDefaultStore(String::arbitrary(g)), 5 => Command::GenerateId, 6 => Command::GeneratePassword(PasswordGeneratorParam::arbitrary(g)), 7 => Command::PollEvents(u64::arbitrary(g)), 8 => Command::Status(String::arbitrary(g)), 9 => Command::Lock(String::arbitrary(g)), 10 => Command::Unlock { store_name: String::arbitrary(g), identity_id: String::arbitrary(g), passphrase: SecretBytes::arbitrary(g), }, 11 => Command::Identities(String::arbitrary(g)), 12 => Command::AddIdentity { store_name: String::arbitrary(g), identity: Identity::arbitrary(g), passphrase: SecretBytes::arbitrary(g), }, 13 => Command::ChangePassphrase { store_name: String::arbitrary(g), passphrase: SecretBytes::arbitrary(g), }, 14 => Command::List { store_name: String::arbitrary(g), filter: SecretListFilter::arbitrary(g), }, 15 => Command::UpdateIndex(String::arbitrary(g)), 16 => Command::Add { store_name: String::arbitrary(g), secret_version: SecretVersion::arbitrary(g), }, 17 => Command::Get { store_name: String::arbitrary(g), secret_id: String::arbitrary(g), }, 18 => Command::GetVersion { store_name: String::arbitrary(g), block_id: String::arbitrary(g), }, 19 => Command::SecretToClipboard { store_name: String::arbitrary(g), block_id: String::arbitrary(g), properties: Vec::arbitrary(g), }, 20 => Command::ClipboardIsDone, 21 => Command::ClipboardCurrentlyProviding, 22 => Command::ClipboardProvideNext, _ => Command::ClipboardDestroy, } } } #[test] fn identity_capnp_serialization() { fn check_serialize(identity: Identity) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &identity).unwrap(); let deserialized: Identity = rmp_serde::from_read_ref(&buf).unwrap(); identity == deserialized } quickcheck(check_serialize as fn(Identity) -> bool); } #[test] fn status_capnp_serialization() { fn check_serialize(status: Status) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &status).unwrap(); let deserialized: Status = rmp_serde::from_read_ref(&buf).unwrap(); status == deserialized } quickcheck(check_serialize as fn(Status) -> bool); } #[test] fn secret_list_filter_capnp_serialization() { fn check_serialize(filter: SecretListFilter) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &filter).unwrap(); let deserialized: SecretListFilter = rmp_serde::from_read_ref(&buf).unwrap(); filter == deserialized } quickcheck(check_serialize as fn(SecretListFilter) -> bool); } #[test] fn secret_list_capnp_serialization() { fn check_serialize(list: SecretList) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &list).unwrap(); let deserialized: SecretList = rmp_serde::from_read_ref(&buf).unwrap(); list == deserialized } quickcheck(check_serialize as fn(SecretList) -> bool); } #[test] fn secret_version_capnp_serialization() { fn check_serialize(secret_version: SecretVersion) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &secret_version).unwrap(); let deserialized: SecretVersion = rmp_serde::from_read_ref(&buf).unwrap(); secret_version == deserialized } quickcheck(check_serialize as fn(SecretVersion) -> bool); } #[test] fn password_strength_capnp_serialization() { fn check_serialize(password_strength: PasswordStrength) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &password_strength).unwrap(); let deserialized: PasswordStrength = rmp_serde::from_read_ref(&buf).unwrap(); password_strength == deserialized } quickcheck(check_serialize as fn(PasswordStrength) -> bool); } #[test] fn secret_capnp_serialization() { fn check_serialize(secret: Secret) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &secret).unwrap(); let deserialized: Secret = rmp_serde::from_read_ref(&buf).unwrap(); secret == deserialized } quickcheck(check_serialize as fn(Secret) -> bool); } #[test] fn command_serialization() { fn check_serialize(command: Command) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &command).unwrap(); let deserialized: Command = rmp_serde::from_read_ref(&buf).unwrap(); command == deserialized } quickcheck(check_serialize as fn(Command) -> bool); }
use crate::{ api::{ Identity, PasswordStrength, Secret, SecretAttachment, SecretEntry, SecretEntryMatch, SecretList, SecretListFilter, SecretProperties, SecretType, SecretVersion, SecretVersionRef, Status, ZeroizeDateTime, }, memguard::SecretBytes, }; use chrono::{TimeZone, Utc}; use quickcheck::{quickcheck, Arbitrary, Gen}; use std::collections::{BTreeMap, HashMap}; use super::{Command, PasswordGeneratorCharsParam, PasswordGeneratorParam, PasswordGeneratorWordsParam, StoreConfig}; use crate::memguard::ZeroizeBytesBuffer; impl Arbitrary for Identity { fn arbitrary(g: &mut Gen) -> Self { Identity { id: String::arbitrary(g), name: String::arbitrary(g), email: String::arbitrary(g), hidden: bool::arbitrary(g), } } } impl Arbitrary for ZeroizeDateTime { fn arbitrary(g: &mut Gen) -> Self { ZeroizeDateTime::from(Utc.timestamp_millis(u32::arbitrary(g) as i64)) } } impl Arbitrary for Status { fn arbitrary(g: &mut Gen) -> Self { Status { locked: bool::arbitrary(g), unlocked_by: Option::arbitrary(g), autolock_at: Option::arbitrary(g), version: String::arbitrary(g), autolock_timeout: u64::arbitrary(g), } } } impl Arbitrary for SecretType { fn arbitrary(g: &mut Gen) -> Self {
} } impl Arbitrary for SecretListFilter { fn arbitrary(g: &mut Gen) -> Self { SecretListFilter { url: Option::arbitrary(g), tag: Option::arbitrary(g), secret_type: Option::arbitrary(g), name: Option::arbitrary(g), deleted: bool::arbitrary(g), } } } impl Arbitrary for SecretEntry { fn arbitrary(g: &mut Gen) -> Self { SecretEntry { id: String::arbitrary(g), name: String::arbitrary(g), secret_type: SecretType::arbitrary(g), tags: Vec::arbitrary(g), urls: Vec::arbitrary(g), timestamp: ZeroizeDateTime::arbitrary(g), deleted: bool::arbitrary(g), } } } impl Arbitrary for SecretEntryMatch { fn arbitrary(g: &mut Gen) -> Self { SecretEntryMatch { entry: SecretEntry::arbitrary(g), name_score: isize::arbitrary(g), name_highlights: Vec::arbitrary(g), url_highlights: Vec::arbitrary(g), tags_highlights: Vec::arbitrary(g), } } } impl Arbitrary for SecretList { fn arbitrary(g: &mut Gen) -> Self { SecretList { all_tags: Vec::arbitrary(g), entries: vec![SecretEntryMatch::arbitrary(g)], } } } impl Arbitrary for SecretAttachment { fn arbitrary(g: &mut Gen) -> Self { SecretAttachment { name: String::arbitrary(g), mime_type: String::arbitrary(g), content: Vec::arbitrary(g), } } } impl Arbitrary for SecretProperties { fn arbitrary(g: &mut Gen) -> Self { let keys = Vec::<String>::arbitrary(g); let mut properties = BTreeMap::new(); for key in keys { properties.insert(key, String::arbitrary(g)); } SecretProperties::new(properties) } } impl Arbitrary for SecretVersion { fn arbitrary(g: &mut Gen) -> Self { SecretVersion { secret_id: String::arbitrary(g), secret_type: SecretType::arbitrary(g), timestamp: ZeroizeDateTime::arbitrary(g), name: String::arbitrary(g), tags: Vec::arbitrary(g), urls: Vec::arbitrary(g), properties: SecretProperties::arbitrary(g), attachments: Vec::arbitrary(g), deleted: bool::arbitrary(g), recipients: Vec::arbitrary(g), } } } impl Arbitrary for SecretVersionRef { fn arbitrary(g: &mut Gen) -> Self { SecretVersionRef { block_id: String::arbitrary(g), timestamp: ZeroizeDateTime::arbitrary(g), } } } impl Arbitrary for PasswordStrength { fn arbitrary(g: &mut Gen) -> Self { let entropy = f64::arbitrary(g); let crack_time = f64::arbitrary(g); PasswordStrength { entropy: if entropy.is_finite() { entropy } else { 0.0 }, crack_time: if crack_time.is_finite() { crack_time } else { 0.0 }, crack_time_display: String::arbitrary(g), score: u8::arbitrary(g), } } } impl Arbitrary for Secret { fn arbitrary(g: &mut Gen) -> Self { Secret { id: String::arbitrary(g), secret_type: SecretType::arbitrary(g), current: SecretVersion::arbitrary(g), current_block_id: String::arbitrary(g), versions: Vec::arbitrary(g), password_strengths: HashMap::arbitrary(g), } } } impl Arbitrary for StoreConfig { fn arbitrary(g: &mut Gen) -> Self { StoreConfig { name: String::arbitrary(g), store_url: String::arbitrary(g), remote_url: Option::arbitrary(g), sync_interval_sec: u32::arbitrary(g), client_id: String::arbitrary(g), autolock_timeout_secs: u64::arbitrary(g), default_identity_id: Option::arbitrary(g), } } } impl Arbitrary for PasswordGeneratorParam { fn arbitrary(g: &mut Gen) -> Self { match g.choose(&[0, 1]).unwrap() { 0 => PasswordGeneratorParam::Chars(PasswordGeneratorCharsParam { num_chars: u8::arbitrary(g), include_uppers: bool::arbitrary(g), include_numbers: bool::arbitrary(g), include_symbols: bool::arbitrary(g), require_upper: bool::arbitrary(g), require_number: bool::arbitrary(g), require_symbol: bool::arbitrary(g), exclude_similar: bool::arbitrary(g), exclude_ambiguous: bool::arbitrary(g), }), _ => PasswordGeneratorParam::Words(PasswordGeneratorWordsParam { num_words: u8::arbitrary(g), delim: char::arbitrary(g), }), } } } impl Arbitrary for SecretBytes { fn arbitrary(g: &mut Gen) -> Self { SecretBytes::from(Vec::arbitrary(g)) } } impl Arbitrary for Command { fn arbitrary(g: &mut Gen) -> Self { match g .choose(&[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, ]) .unwrap() { 0 => Command::ListStores, 1 => Command::UpsertStoreConfig(StoreConfig::arbitrary(g)), 2 => Command::DeleteStoreConfig(String::arbitrary(g)), 3 => Command::GetDefaultStore, 4 => Command::SetDefaultStore(String::arbitrary(g)), 5 => Command::GenerateId, 6 => Command::GeneratePassword(PasswordGeneratorParam::arbitrary(g)), 7 => Command::PollEvents(u64::arbitrary(g)), 8 => Command::Status(String::arbitrary(g)), 9 => Command::Lock(String::arbitrary(g)), 10 => Command::Unlock { store_name: String::arbitrary(g), identity_id: String::arbitrary(g), passphrase: SecretBytes::arbitrary(g), }, 11 => Command::Identities(String::arbitrary(g)), 12 => Command::AddIdentity { store_name: String::arbitrary(g), identity: Identity::arbitrary(g), passphrase: SecretBytes::arbitrary(g), }, 13 => Command::ChangePassphrase { store_name: String::arbitrary(g), passphrase: SecretBytes::arbitrary(g), }, 14 => Command::List { store_name: String::arbitrary(g), filter: SecretListFilter::arbitrary(g), }, 15 => Command::UpdateIndex(String::arbitrary(g)), 16 => Command::Add { store_name: String::arbitrary(g), secret_version: SecretVersion::arbitrary(g), }, 17 => Command::Get { store_name: String::arbitrary(g), secret_id: String::arbitrary(g), }, 18 => Command::GetVersion { store_name: String::arbitrary(g), block_id: String::arbitrary(g), }, 19 => Command::SecretToClipboard { store_name: String::arbitrary(g), block_id: String::arbitrary(g), properties: Vec::arbitrary(g), }, 20 => Command::ClipboardIsDone, 21 => Command::ClipboardCurrentlyProviding, 22 => Command::ClipboardProvideNext, _ => Command::ClipboardDestroy, } } } #[test] fn identity_capnp_serialization() { fn check_serialize(identity: Identity) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &identity).unwrap(); let deserialized: Identity = rmp_serde::from_read_ref(&buf).unwrap(); identity == deserialized } quickcheck(check_serialize as fn(Identity) -> bool); } #[test] fn status_capnp_serialization() { fn check_serialize(status: Status) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &status).unwrap(); let deserialized: Status = rmp_serde::from_read_ref(&buf).unwrap(); status == deserialized } quickcheck(check_serialize as fn(Status) -> bool); } #[test] fn secret_list_filter_capnp_serialization() { fn check_serialize(filter: SecretListFilter) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &filter).unwrap(); let deserialized: SecretListFilter = rmp_serde::from_read_ref(&buf).unwrap(); filter == deserialized } quickcheck(check_serialize as fn(SecretListFilter) -> bool); } #[test] fn secret_list_capnp_serialization() { fn check_serialize(list: SecretList) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &list).unwrap(); let deserialized: SecretList = rmp_serde::from_read_ref(&buf).unwrap(); list == deserialized } quickcheck(check_serialize as fn(SecretList) -> bool); } #[test] fn secret_version_capnp_serialization() { fn check_serialize(secret_version: SecretVersion) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &secret_version).unwrap(); let deserialized: SecretVersion = rmp_serde::from_read_ref(&buf).unwrap(); secret_version == deserialized } quickcheck(check_serialize as fn(SecretVersion) -> bool); } #[test] fn password_strength_capnp_serialization() { fn check_serialize(password_strength: PasswordStrength) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &password_strength).unwrap(); let deserialized: PasswordStrength = rmp_serde::from_read_ref(&buf).unwrap(); password_strength == deserialized } quickcheck(check_serialize as fn(PasswordStrength) -> bool); } #[test] fn secret_capnp_serialization() { fn check_serialize(secret: Secret) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &secret).unwrap(); let deserialized: Secret = rmp_serde::from_read_ref(&buf).unwrap(); secret == deserialized } quickcheck(check_serialize as fn(Secret) -> bool); } #[test] fn command_serialization() { fn check_serialize(command: Command) -> bool { let mut buf = ZeroizeBytesBuffer::with_capacity(8192); rmp_serde::encode::write_named(&mut buf, &command).unwrap(); let deserialized: Command = rmp_serde::from_read_ref(&buf).unwrap(); command == deserialized } quickcheck(check_serialize as fn(Command) -> bool); }
match g.choose(&[0, 1, 2, 3, 4, 5]).unwrap() { 0 => SecretType::Login, 1 => SecretType::Note, 2 => SecretType::Licence, 3 => SecretType::Wlan, 4 => SecretType::Password, _ => SecretType::Other, }
if_condition
[ { "content": "fn secret_to_clipboard(properties: &'static [&'static str]) -> impl Fn(&mut Cursive) {\n\n move |s: &mut Cursive| {\n\n let maybe_secret = {\n\n let secret_view = s.find_name::<SecretView>(\"secret_view\").unwrap();\n\n secret_view.current_secret()\n\n };\n\n let state = s.user_data::<ListUIState>().unwrap();\n\n\n\n if let Some(secret) = maybe_secret {\n\n state\n\n .service\n\n .secret_to_clipboard(&state.store_name, &secret.current_block_id, properties)\n\n .ok_or_exit(\"Copy to clipboard\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "cli/src/commands/list_secrets.rs", "rank": 0, "score": 196272.26463459083 }, { "content": "fn add_secrets_versions(secrets_store: &dyn SecretsStore, ids_with_passphrase: &[(Identity, SecretBytes)]) {\n\n let version1 = SecretVersion {\n\n secret_id: \"secret1\".to_string(),\n\n secret_type: SecretType::Login,\n\n timestamp: Utc::now().into(),\n\n name: \"First secret\".to_string(),\n\n tags: vec![],\n\n urls: vec![],\n\n properties: Default::default(),\n\n attachments: vec![],\n\n deleted: false,\n\n recipients: ids_with_passphrase.iter().map(|(id, _)| id.id.clone()).collect(),\n\n };\n\n\n\n assert_that(&secrets_store.unlock(&ids_with_passphrase[0].0.id, ids_with_passphrase[0].1.clone())).is_ok();\n\n\n\n assert_that(&secrets_store.add(version1)).is_ok();\n\n\n\n assert_that(&secrets_store.update_index()).is_ok();\n\n\n\n let secret = secrets_store.get(\"secret1\").unwrap();\n\n\n\n assert_that(&secret.id).is_equal_to(\"secret1\".to_string());\n\n assert_that(&secret.current.name).is_equal_to(\"First secret\".to_string());\n\n}\n\n\n", "file_path": "lib/src/secrets_store/tests.rs", "rank": 1, "score": 179262.78704981238 }, { "content": "fn update_name_filter(s: &mut Cursive, name_filter: &str, _: usize) {\n\n let next_entries: Vec<SecretEntryMatch> = {\n\n let state = s.user_data::<ListUIState>().unwrap();\n\n state.filter.name = if name_filter.is_empty() {\n\n None\n\n } else {\n\n Some(name_filter.to_string())\n\n };\n\n\n\n let mut list = state.secrets_store.list(&state.filter).ok_or_exit(\"List entries\");\n\n list.entries.sort();\n\n list.entries.drain(..).collect()\n\n };\n\n\n\n let mut entry_select = s.find_name::<SelectView<SecretEntry>>(\"entry_list\").unwrap();\n\n let mut secret_view = s.find_name::<SecretView>(\"secret_view\").unwrap();\n\n match next_entries.first() {\n\n Some(new_selection) => secret_view.show_secret(&new_selection.entry.id),\n\n None => secret_view.clear(),\n\n }\n\n entry_select.clear();\n\n entry_select.add_all(next_entries.into_iter().map(entry_list_item));\n\n}\n\n\n", "file_path": "cli/src/commands/list_secrets.rs", "rank": 3, "score": 166311.4023866 }, { "content": "fn create_identity(s: &mut Cursive) {\n\n let identity = Identity {\n\n id: s.find_name::<EditView>(\"id\").unwrap().get_content().to_string(),\n\n name: s.find_name::<EditView>(\"name\").unwrap().get_content().to_string(),\n\n email: s.find_name::<EditView>(\"email\").unwrap().get_content().to_string(),\n\n hidden: false,\n\n };\n\n let passphrase = s.find_name::<PasswordView>(\"passphrase\").unwrap().get_content();\n\n\n\n if identity.id.is_empty() {\n\n s.add_layer(Dialog::info(\"Id must not be empty\"));\n\n return;\n\n }\n\n if identity.name.is_empty() {\n\n s.add_layer(Dialog::info(\"Name must not be empty\"));\n\n return;\n\n }\n\n if identity.email.is_empty() {\n\n s.add_layer(Dialog::info(\"Email must not be empty\"));\n\n return;\n\n }\n\n\n\n let secrets_store: &Arc<dyn SecretsStore> = s.user_data().unwrap();\n\n match secrets_store.add_identity(identity, passphrase) {\n\n Ok(_) => s.quit(),\n\n Err(error) => s.add_layer(Dialog::info(format!(\"Failed to create identity: {}\", error))),\n\n }\n\n}\n", "file_path": "cli/src/commands/add_identity.rs", "rank": 4, "score": 162065.73898485143 }, { "content": "pub fn unlock_store(siv: &mut CursiveRunnable, secrets_store: &Arc<dyn SecretsStore>, name: &str) -> Status {\n\n if !atty::is(Stream::Stdout) {\n\n println!(\"Please use a terminal\");\n\n process::exit(1);\n\n }\n\n\n\n let identities = secrets_store.identities().ok_or_exit(\"Get identities\");\n\n\n\n if identities.is_empty() {\n\n println!(\"Store does not have any identities to unlock\");\n\n process::exit(1)\n\n }\n\n\n\n unlock_dialog(siv, secrets_store, name, identities);\n\n\n\n let status = secrets_store.status().ok_or_exit(\"Get status\");\n\n\n\n if status.locked {\n\n println!(\"Unlock failed\");\n\n process::exit(1);\n\n }\n\n\n\n status\n\n}\n\n\n", "file_path": "cli/src/commands/unlock.rs", "rank": 5, "score": 156165.31415649533 }, { "content": "pub fn add_identity_dialog(siv: &mut Cursive, secrets_store: Arc<dyn SecretsStore>, title: &str) {\n\n siv.set_user_data(secrets_store);\n\n siv.add_layer(\n\n Dialog::around(\n\n LinearLayout::vertical()\n\n .child(TextView::new(\"Id\"))\n\n .child(EditView::new().content(generate_id(40)).disabled().with_name(\"id\"))\n\n .child(DummyView {})\n\n .child(TextView::new(\"Name\"))\n\n .child(EditView::new().with_name(\"name\").fixed_width(50))\n\n .child(DummyView {})\n\n .child(TextView::new(\"Email\"))\n\n .child(EditView::new().with_name(\"email\").fixed_width(50))\n\n .child(DummyView {})\n\n .child(TextView::new(\"Passphrase\"))\n\n .child(PasswordView::new(100).with_name(\"passphrase\")),\n\n )\n\n .title(title)\n\n .button(\"Create\", create_identity)\n\n .button(\"Abort\", Cursive::quit)\n\n .padding_left(5)\n\n .padding_right(5)\n\n .padding_top(1)\n\n .padding_bottom(1),\n\n )\n\n}\n\n\n", "file_path": "cli/src/commands/add_identity.rs", "rank": 6, "score": 155689.9788190982 }, { "content": "fn update_status(s: &mut Cursive) {\n\n let next_status = {\n\n let state = s.user_data::<ListUIState>().unwrap();\n\n let now = Utc::now();\n\n if state.last_update.is_none() || (now - state.last_update.unwrap()).num_milliseconds() > 400 {\n\n state.service.check_autolock();\n\n state.last_update.replace(now);\n\n match state.secrets_store.status() {\n\n Ok(status) => {\n\n state.status_text.set_content(status_text(&status));\n\n Some(status)\n\n }\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n };\n\n if next_status.is_some() && next_status.unwrap().locked {\n\n s.quit()\n\n }\n\n}\n\n\n", "file_path": "cli/src/commands/list_secrets.rs", "rank": 7, "score": 148351.67323034254 }, { "content": "fn clean_zero_bytes(mut data: SecretBytes) -> SecretBytes {\n\n data.borrow_mut().iter_mut().for_each(|b| {\n\n if *b == 0 {\n\n *b = 255;\n\n }\n\n });\n\n data\n\n}\n\n\n", "file_path": "lib/src/secrets_store/padding/tests.rs", "rank": 8, "score": 145928.98275836874 }, { "content": "fn update_selection(s: &mut Cursive, entry: &SecretEntry) {\n\n let mut secret_view = s.find_name::<SecretView>(\"secret_view\").unwrap();\n\n secret_view.show_secret(&entry.id);\n\n}\n\n\n", "file_path": "cli/src/commands/list_secrets.rs", "rank": 9, "score": 144104.78304528486 }, { "content": "fn add_identity(\n\n secrets_store: &dyn SecretsStore,\n\n id: &str,\n\n name: &str,\n\n email: &str,\n\n passphrase: &str,\n\n) -> SecretStoreResult<Identity> {\n\n let id = Identity {\n\n id: id.to_string(),\n\n name: name.to_string(),\n\n email: email.to_string(),\n\n hidden: false,\n\n };\n\n\n\n secrets_store.add_identity(id.clone(), secret_from_str(passphrase))?;\n\n\n\n Ok(id)\n\n}\n\n\n", "file_path": "lib/src/secrets_store/tests.rs", "rank": 11, "score": 139490.48841267507 }, { "content": "pub fn lock(service: Arc<dyn TrustlessService>, store_name: String) {\n\n let secrets_store = service\n\n .open_store(&store_name)\n\n .ok_or_exit(format!(\"Failed opening store {}: \", store_name));\n\n\n\n let status = secrets_store.status().ok_or_exit(\"Get status\");\n\n\n\n if !status.locked {\n\n secrets_store.lock().ok_or_exit(\"Lock store\");\n\n }\n\n}\n", "file_path": "cli/src/commands/lock.rs", "rank": 13, "score": 137567.96552270357 }, { "content": "fn add_identities_test(secrets_store: &dyn SecretsStore) -> Vec<(Identity, SecretBytes)> {\n\n let id1 = add_identity(secrets_store, \"identity1\", \"Name1\", \"Email1\", \"Passphrase1\").unwrap();\n\n let id2 = add_identity(secrets_store, \"identity2\", \"Name2\", \"Email2\", \"Passphrase2\").unwrap();\n\n\n\n let mut identities = secrets_store.identities().unwrap();\n\n identities.sort_by(|i1, i2| i1.id.cmp(&i2.id));\n\n\n\n assert_that(&identities).is_equal_to(vec![id1.clone(), id2.clone()]);\n\n\n\n assert_that(&add_identity(\n\n secrets_store,\n\n \"identity1\",\n\n \"Name1\",\n\n \"Email1\",\n\n \"Passphrase1\",\n\n ))\n\n .is_err_containing(SecretStoreError::Conflict);\n\n\n\n assert_that(&secrets_store.unlock(\"identity1\", secret_from_str(\"Passphrase2\")))\n\n .is_err_containing(SecretStoreError::InvalidPassphrase);\n", "file_path": "lib/src/secrets_store/tests.rs", "rank": 15, "score": 133709.73802677522 }, { "content": "fn list_secrets_ui(siv: &mut CursiveRunnable, initial_state: ListUIState, status: Status) {\n\n let mut name_search = EditView::new();\n\n if let Some(name_filter) = &initial_state.filter.name {\n\n name_search.set_content(name_filter.to_string());\n\n }\n\n name_search.set_on_edit(update_name_filter);\n\n\n\n let secrets_store = initial_state.secrets_store.clone();\n\n\n\n siv.set_fps(2);\n\n siv.add_global_callback(Key::Esc, Cursive::quit);\n\n siv.add_global_callback(\n\n Event::CtrlChar('a'),\n\n secret_to_clipboard(&[PROPERTY_USERNAME, PROPERTY_PASSWORD, PROPERTY_TOTP_URL]),\n\n );\n\n siv.add_global_callback(Event::CtrlChar('u'), secret_to_clipboard(&[PROPERTY_USERNAME]));\n\n siv.add_global_callback(Event::CtrlChar('p'), secret_to_clipboard(&[PROPERTY_PASSWORD]));\n\n siv.add_global_callback(Event::CtrlChar('o'), secret_to_clipboard(&[PROPERTY_TOTP_URL]));\n\n siv.add_global_callback(Event::Refresh, update_status);\n\n siv.add_fullscreen_layer(\n", "file_path": "cli/src/commands/list_secrets.rs", "rank": 17, "score": 123942.37703208507 }, { "content": "fn store_config(s: &mut Cursive) {\n\n let service = s.user_data::<Arc<dyn TrustlessService>>().unwrap().clone();\n\n let store_name = s.find_name::<EditView>(\"store_name\").unwrap().get_content();\n\n let store_path = expand_path(&s.find_name::<EditView>(\"store_dir\").unwrap().get_content());\n\n let autolock_timeout = s.find_name::<EditView>(\"autolock_timeout\").unwrap().get_content();\n\n let autolock_timeout_secs = try_with_dialog!(\n\n autolock_timeout.parse::<u64>(),\n\n s,\n\n \"Autolock timeout has to be a positive integer:\\n{}\"\n\n );\n\n let store_configs = try_with_dialog!(service.list_stores(), s, \"Failed reading existing configuration:\\n{}\");\n\n let client_id = match store_configs\n\n .iter()\n\n .find(|config| config.name.as_str() == store_name.as_str())\n\n {\n\n Some(previous) => previous.client_id.clone(),\n\n None => generate_id(64),\n\n };\n\n\n\n if store_path.is_empty() {\n", "file_path": "cli/src/commands/init.rs", "rank": 18, "score": 116956.07143615151 }, { "content": "fn do_unlock_store(s: &mut Cursive) {\n\n let secrets_store = s.user_data::<Arc<dyn SecretsStore>>().unwrap().clone();\n\n let maybe_identity = s.find_name::<SelectView>(\"identity\").unwrap().selection();\n\n let passphrase = s.find_name::<PasswordView>(\"passphrase\").unwrap().get_content();\n\n let identity_id = match maybe_identity {\n\n Some(id) => id,\n\n _ => {\n\n s.add_layer(Dialog::info(\"No identity selected\"));\n\n return;\n\n }\n\n };\n\n\n\n if let Err(error) = secrets_store.unlock(&identity_id, passphrase) {\n\n s.add_layer(Dialog::info(format!(\"Unable to unlock store:\\n{}\", error)));\n\n return;\n\n }\n\n\n\n s.quit()\n\n}\n", "file_path": "cli/src/commands/unlock.rs", "rank": 19, "score": 116956.07143615151 }, { "content": "fn xorbytes(src1: &[u8], src2: &[u8], tgt: &mut [u8]) {\n\n for ((s1, s2), t) in src1.iter().zip(src2).zip(tgt) {\n\n *t = *s1 ^ *s2\n\n }\n\n}\n\n\n\nimpl RustX25519ChaCha20Poly1305Cipher {\n\n fn unpack_public(key: &[u8]) -> x25519_dalek_ng::PublicKey {\n\n let mut raw = [0u8; 32];\n\n\n\n raw.copy_from_slice(key.borrow());\n\n\n\n x25519_dalek_ng::PublicKey::from(raw)\n\n }\n\n\n\n fn unpack_private(key: &PrivateKey) -> x25519_dalek_ng::StaticSecret {\n\n let mut raw = [0u8; 32]; // StaticSecrets takes ownership of this an clears it on drop\n\n\n\n raw.copy_from_slice(&key.borrow());\n\n\n", "file_path": "lib/src/secrets_store/cipher/rust_x25519_chacha20_poly1305.rs", "rank": 20, "score": 116693.19454539294 }, { "content": "pub fn add_identity(service: Arc<dyn TrustlessService>, store_name: String) {\n\n if !atty::is(Stream::Stdout) {\n\n println!(\"Please use a terminal\");\n\n process::exit(1);\n\n }\n\n\n\n let secrets_store = service\n\n .open_store(&store_name)\n\n .ok_or_exit(format!(\"Failed opening store {}: \", store_name));\n\n let mut siv = create_tui();\n\n\n\n siv.add_global_callback(Key::Esc, Cursive::quit);\n\n\n\n add_identity_dialog(&mut siv, secrets_store, \"Add identity\");\n\n\n\n siv.run();\n\n}\n\n\n", "file_path": "cli/src/commands/add_identity.rs", "rank": 21, "score": 115587.94827618303 }, { "content": "pub fn list_identities(service: Arc<dyn TrustlessService>, store_name: String) {\n\n let secrets_store = service\n\n .open_store(&store_name)\n\n .ok_or_exit(format!(\"Failed opening store {}: \", store_name));\n\n let identities = secrets_store.identities().ok_or_exit(\"Failed listing identities: \");\n\n\n\n serde_json::to_writer(io::stdout(), &identities).ok_or_exit(\"Failed dumping identities: \");\n\n}\n", "file_path": "cli/src/commands/list_identities.rs", "rank": 22, "score": 115587.94827618306 }, { "content": "#[bench]\n\nfn bench_openssl_rsa(b: &mut Bencher) {\n\n let mut rng = thread_rng();\n\n let private = Rsa::generate(4096).unwrap();\n\n let public = Rsa::public_key_from_der(&private.public_key_to_der().unwrap()).unwrap();\n\n\n\n b.iter(|| {\n\n let mut message = [0u8; 32];\n\n rng.fill_bytes(&mut message[..]);\n\n\n\n let mut crypled_key_buffer = vec![0u8; public.size() as usize];\n\n let crypted_len = public\n\n .public_encrypt(&message[..], &mut crypled_key_buffer, Padding::PKCS1_OAEP)\n\n .unwrap();\n\n\n\n let mut target = vec![0u8; crypted_len];\n\n\n\n let target_len = private\n\n .private_decrypt(&crypled_key_buffer[0..crypted_len], &mut target, Padding::PKCS1_OAEP)\n\n .unwrap();\n\n\n\n assert_slices_equal(&target[0..target_len], &message[..]);\n\n });\n\n}\n\n\n", "file_path": "lib/benches/rsa_raw.rs", "rank": 23, "score": 114776.8840961371 }, { "content": "#[bench]\n\nfn bench_rust_aes(b: &mut Bencher) {\n\n let mut rng = thread_rng();\n\n\n\n b.iter(|| {\n\n let mut seal_key = [0u8; 32];\n\n let mut nonce = [0u8; 12];\n\n let message = b\"Hello, secret\";\n\n\n\n rng.fill_bytes(&mut seal_key[..]);\n\n rng.fill_bytes(&mut nonce[..]);\n\n\n\n let cipher = Aes256Gcm::new(GenericArray::from_slice(&seal_key[..]));\n\n let public_data = cipher\n\n .encrypt(GenericArray::from_slice(&nonce[0..12]), &message[..])\n\n .unwrap();\n\n let decrypted = cipher\n\n .decrypt(GenericArray::from_slice(&nonce[0..12]), &public_data[..])\n\n .unwrap();\n\n\n\n assert_slices_equal(&decrypted, message);\n\n });\n\n}\n", "file_path": "lib/benches/aes_raw.rs", "rank": 24, "score": 114776.8840961371 }, { "content": "#[bench]\n\nfn bench_openssl_aes(b: &mut Bencher) {\n\n let mut rng = thread_rng();\n\n\n\n b.iter(|| {\n\n let mut seal_key = [0u8; 32];\n\n let mut nonce = [0u8; 12];\n\n let message = b\"Hello, secret\";\n\n let mut tag = [0u8; 16];\n\n\n\n rng.fill_bytes(&mut seal_key[..]);\n\n rng.fill_bytes(&mut nonce[..]);\n\n\n\n let mut public_data = symm::encrypt_aead(\n\n symm::Cipher::aes_256_gcm(),\n\n &seal_key[..],\n\n Some(&nonce[..]),\n\n &[],\n\n message,\n\n &mut tag[..],\n\n )\n", "file_path": "lib/benches/aes_raw.rs", "rank": 25, "score": 114776.8840961371 }, { "content": "#[bench]\n\nfn bench_rust_rsa(b: &mut Bencher) {\n\n let mut rng = thread_rng();\n\n let private = RSAPrivateKey::new(&mut rng, 4096).unwrap();\n\n let public = private.to_public_key();\n\n\n\n b.iter(|| {\n\n let mut message = [0u8; 32];\n\n rng.fill_bytes(&mut message[..]);\n\n\n\n let crypled_key_buffer = public\n\n .encrypt(&mut rng, PaddingScheme::new_oaep::<sha1::Sha1>(), &message[..])\n\n .unwrap();\n\n\n\n let target = private\n\n .decrypt(PaddingScheme::new_oaep::<sha1::Sha1>(), &crypled_key_buffer[..])\n\n .unwrap();\n\n\n\n assert_slices_equal(&target[..], &message[..]);\n\n });\n\n}\n", "file_path": "lib/benches/rsa_raw.rs", "rank": 26, "score": 114776.8840961371 }, { "content": "fn entry_list_item(entry_match: SecretEntryMatch) -> (StyledString, SecretEntry) {\n\n let name = &entry_match.entry.name;\n\n let mut styled_name = StyledString::new();\n\n let mut last = 0usize;\n\n\n\n for highlight in entry_match.name_highlights.iter() {\n\n if *highlight > last {\n\n styled_name.append_plain(name.chars().skip(last).take(highlight - last).collect::<String>());\n\n }\n\n styled_name.append_styled(\n\n name.chars().skip(*highlight).take(1).collect::<String>(),\n\n Effect::Reverse,\n\n );\n\n last = highlight + 1;\n\n }\n\n styled_name.append_plain(name.chars().skip(last).collect::<String>());\n\n\n\n (styled_name, entry_match.entry.clone())\n\n}\n\n\n", "file_path": "cli/src/commands/list_secrets.rs", "rank": 27, "score": 113373.92681792029 }, { "content": "//#[cfg(feature = \"rust_crypto\")]\n\nfn test_rust_rsa_aes_gcm(b: &mut Bencher) {\n\n common_data_encrypt_decrypt(&RUST_RSA_AES_GCM, b);\n\n}\n", "file_path": "lib/benches/ciphers.rs", "rank": 28, "score": 112723.7225361444 }, { "content": "#[bench]\n\nfn test_openssl_rsa_aes_gcm(b: &mut Bencher) {\n\n common_data_encrypt_decrypt(&OPEN_SSL_RSA_AES_GCM, b);\n\n}\n\n\n", "file_path": "lib/benches/ciphers.rs", "rank": 29, "score": 112723.7225361444 }, { "content": "#[bench]\n\nfn test_rust_x25519_chacha20_poly1305(b: &mut Bencher) {\n\n common_data_encrypt_decrypt(&RUST_X25519CHA_CHA20POLY1305, b);\n\n}\n\n\n\n#[bench]\n", "file_path": "lib/benches/ciphers.rs", "rank": 30, "score": 112723.7225361444 }, { "content": "pub fn list_secrets(service: Arc<dyn TrustlessService>, store_name: String, filter: SecretListFilter) {\n\n let secrets_store = service\n\n .open_store(&store_name)\n\n .ok_or_exit(format!(\"Failed opening store {}: \", store_name));\n\n\n\n let mut status = secrets_store.status().ok_or_exit(\"Get status\");\n\n\n\n if atty::is(Stream::Stdout) {\n\n if status.locked {\n\n let mut siv = create_tui();\n\n status = unlock_store(&mut siv, &secrets_store, &store_name);\n\n siv.quit();\n\n }\n\n let mut siv = create_tui();\n\n\n\n let initial_state = ListUIState {\n\n service,\n\n store_name,\n\n secrets_store,\n\n filter,\n", "file_path": "cli/src/commands/list_secrets.rs", "rank": 31, "score": 109606.57590185362 }, { "content": "pub fn set_text_list<I, S>(mut text_list: text_list::Builder, texts: I) -> capnp::Result<()>\n\nwhere\n\n I: IntoIterator<Item = S>,\n\n S: AsRef<str>,\n\n{\n\n for (idx, text) in texts.into_iter().enumerate() {\n\n text_list.set(idx as u32, capnp::text::new_reader(text.as_ref().as_bytes())?);\n\n }\n\n Ok(())\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Zeroize)]\n\n#[zeroize(drop)]\n\npub struct ClipboardProviding {\n\n pub store_name: String,\n\n pub block_id: String,\n\n pub secret_name: String,\n\n pub property: String,\n\n}\n", "file_path": "lib/src/api/mod.rs", "rank": 32, "score": 109545.52916434382 }, { "content": "#[test]\n\n#[cfg_attr(debug_assertions, ignore)]\n\nfn test_multi_lane_secrets_store() {\n\n let (secrets_store, _) = open_secrets_store(\n\n \"test\",\n\n \"multilane+memory://\",\n\n None,\n\n \"node1\",\n\n Duration::from_secs(300),\n\n Arc::new(TestEventHub),\n\n )\n\n .unwrap();\n\n\n\n common_secrets_store_tests(secrets_store)\n\n}\n", "file_path": "lib/src/secrets_store/tests.rs", "rank": 34, "score": 105798.81979523512 }, { "content": "#[allow(clippy::type_complexity)]\n\npub fn open_secrets_store(\n\n name: &str,\n\n url: &str,\n\n maybe_remote_url: Option<&str>,\n\n node_id: &str,\n\n autolock_timeout: Duration,\n\n event_hub: Arc<dyn EventHub>,\n\n) -> SecretStoreResult<(Arc<dyn SecretsStore>, Option<Arc<SyncBlockStore>>)> {\n\n let (scheme, block_store_url) = match url.find('+') {\n\n Some(idx) => (&url[..idx], &url[idx + 1..]),\n\n _ => return Err(SecretStoreError::InvalidStoreUrl(url.to_string())),\n\n };\n\n\n\n let mut block_store = open_block_store(block_store_url, node_id)?;\n\n\n\n let sync_block_store = match maybe_remote_url {\n\n Some(remote_url) => {\n\n let remote = open_block_store(remote_url, node_id)?;\n\n\n\n let sync_block_store = Arc::new(SyncBlockStore::new(block_store, remote));\n", "file_path": "lib/src/secrets_store/mod.rs", "rank": 35, "score": 104596.09478107262 }, { "content": "fn secret_from_str(s: &str) -> SecretBytes {\n\n let raw = s.as_bytes().to_vec();\n\n\n\n SecretBytes::from(raw)\n\n}\n\n\n", "file_path": "lib/src/secrets_store/tests.rs", "rank": 36, "score": 103617.92333565494 }, { "content": "fn filter_set(candidates: &mut Vec<u8>, set: &[u8], params: &PasswordGeneratorCharsParam) {\n\n for ch in set {\n\n if params.exclude_similar && SIMILAR_CHARS.contains(ch) {\n\n continue;\n\n }\n\n if params.exclude_ambiguous && AMBIGOUS_CHARS.contains(ch) {\n\n continue;\n\n }\n\n candidates.push(*ch);\n\n }\n\n}\n\n\n", "file_path": "lib/src/service/pw_generator/chars.rs", "rank": 38, "score": 101236.64217368315 }, { "content": "fn common_data_encrypt_decrypt<T>(cipher: &T, b: &mut Bencher)\n\nwhere\n\n T: Cipher,\n\n{\n\n let mut rng = thread_rng();\n\n\n\n let id1 = \"recipient1\";\n\n let (public_key1, private_key1) = cipher.generate_key_pair().unwrap();\n\n\n\n b.iter(|| {\n\n let private_data = SecretBytes::random(&mut rng, 1234);\n\n let mut message = capnp::message::Builder::new_default();\n\n\n\n let mut block = message.init_root::<block::Builder>();\n\n let headers = block.reborrow().init_headers(1);\n\n\n\n let crypted_data = cipher\n\n .encrypt(&[(id1, public_key1.clone())], &private_data, headers.get(0))\n\n .unwrap();\n\n block.set_content(&crypted_data);\n", "file_path": "lib/benches/ciphers.rs", "rank": 40, "score": 99689.4047991806 }, { "content": "pub fn try_remote_service() -> ServiceResult<Option<impl TrustlessService>> {\n\n let stream = match PipeClient::connect(DAEMON_PIPE_NAME) {\n\n Ok(pipe) => pipe,\n\n Err(error) if error.kind() == ErrorKind::NotFound => return Ok(None),\n\n Err(error) => return Err(error.into()),\n\n };\n\n\n\n Ok(Some(RemoteTrustlessService::new(stream)))\n\n}\n", "file_path": "lib/src/service/windows.rs", "rank": 41, "score": 99411.3024722521 }, { "content": "pub fn try_remote_service() -> ServiceResult<Option<impl TrustlessService>> {\n\n let socket_path = daemon_socket_path();\n\n\n\n if !socket_path.exists() {\n\n return Ok(None);\n\n }\n\n\n\n let stream = UnixStream::connect(socket_path)?;\n\n\n\n Ok(Some(RemoteTrustlessService::new(stream)))\n\n}\n", "file_path": "lib/src/service/unix.rs", "rank": 42, "score": 99411.3024722521 }, { "content": "fn common_secrets_store_tests(secrets_store: Arc<dyn SecretsStore>) {\n\n let initial_status = secrets_store.status().unwrap();\n\n\n\n assert_that(&initial_status.autolock_at).is_none();\n\n assert_that(&initial_status.locked).is_true();\n\n\n\n let initial_identities = secrets_store.identities().unwrap();\n\n\n\n assert_that(&initial_identities).is_empty();\n\n\n\n let ids_with_passphrase = add_identities_test(secrets_store.as_ref());\n\n\n\n add_secrets_versions(secrets_store.as_ref(), &ids_with_passphrase);\n\n}\n\n\n", "file_path": "lib/src/secrets_store/tests.rs", "rank": 43, "score": 97995.81011858178 }, { "content": "pub fn initialize_store(name: &str) -> StoreResult<DropboxInitializer> {\n\n let oauth2_flow = Oauth2Type::PKCE(PkceCode::new());\n\n let auth_url = AuthorizeUrlBuilder::new(APP_KEY, &oauth2_flow)\n\n .redirect_uri(REDIRECT_URL)\n\n .build();\n\n let server_handle = start_authcode_server()?;\n\n\n\n Ok(DropboxInitializer {\n\n name: name.to_string(),\n\n oauth2_flow,\n\n auth_url,\n\n server_handle,\n\n })\n\n}\n\n\n", "file_path": "lib/src/block_store/dropbox/initialize.rs", "rank": 44, "score": 97823.18025483142 }, { "content": "fn sort_ring_ids(ring_ids: Vec<RingId>) -> Vec<String> {\n\n let mut ids: Vec<String> = ring_ids\n\n .into_iter()\n\n .map(|(id, version)| format!(\"{}.{}\", id, version))\n\n .collect();\n\n ids.sort();\n\n ids\n\n}\n\n\n", "file_path": "lib/src/block_store/tests.rs", "rank": 45, "score": 96022.99413440526 }, { "content": "#[test]\n\nfn test_process_change_logs() {\n\n let mut test_store: TestStore = Default::default();\n\n let mut index: Index = Default::default();\n\n\n\n for i in 0..10 {\n\n for j in 0..5 {\n\n test_store.add_secret_version(&format!(\"Secret_{}\", i), j)\n\n }\n\n }\n\n\n\n assert_that(\n\n &index.process_change_logs(&[test_store.make_changelog(\"test_node\")], |block_id| {\n\n Ok(test_store.versions.get(block_id).cloned())\n\n }),\n\n )\n\n .is_ok();\n\n\n\n let filter = Default::default();\n\n let mut all_matches = index.filter_entries(&filter).unwrap();\n\n\n", "file_path": "lib/src/secrets_store/index_tests.rs", "rank": 46, "score": 94997.85215739033 }, { "content": "#[test]\n\nfn test_non_zero_padding() {\n\n let mut rng = thread_rng();\n\n\n\n common_padding_tests::<NonZeroPadding>(clean_zero_bytes(SecretBytes::random(&mut rng, 127)));\n\n common_padding_tests::<NonZeroPadding>(clean_zero_bytes(SecretBytes::random(&mut rng, 128)));\n\n common_padding_tests::<NonZeroPadding>(clean_zero_bytes(SecretBytes::random(&mut rng, 129)));\n\n common_padding_tests::<NonZeroPadding>(clean_zero_bytes(SecretBytes::random(&mut rng, 137)));\n\n common_padding_tests::<NonZeroPadding>(clean_zero_bytes(SecretBytes::random(&mut rng, 1234)));\n\n common_padding_tests::<NonZeroPadding>(clean_zero_bytes(SecretBytes::random(&mut rng, 12345)));\n\n common_padding_tests::<NonZeroPadding>(clean_zero_bytes(SecretBytes::random(&mut rng, 123_456)));\n\n}\n\n\n", "file_path": "lib/src/secrets_store/padding/tests.rs", "rank": 47, "score": 94997.85215739033 }, { "content": "fn pick_char_from<R: Rng>(rng: &mut R, set: &[u8], params: &PasswordGeneratorCharsParam) -> u8 {\n\n let mut candidates = Vec::with_capacity(set.len());\n\n filter_set(&mut candidates, set, params);\n\n\n\n *candidates.choose(rng).unwrap()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use spectral::prelude::*;\n\n\n\n #[test]\n\n fn test_generate_chars() {\n\n let pw1 = generate_chars(&PasswordGeneratorCharsParam {\n\n num_chars: 14,\n\n include_uppers: false,\n\n include_numbers: false,\n\n include_symbols: false,\n\n require_number: false,\n", "file_path": "lib/src/service/pw_generator/chars.rs", "rank": 48, "score": 93716.19683635201 }, { "content": "fn sort_ring_ids(ring_ids: Vec<RingId>) -> Vec<String> {\n\n let mut ids: Vec<String> = ring_ids\n\n .into_iter()\n\n .map(|(id, version)| format!(\"{}.{}\", id, version))\n\n .collect();\n\n ids.sort();\n\n ids\n\n}\n\n\n", "file_path": "lib/src/block_store/sync/synchronize_tests.rs", "rank": 49, "score": 93310.99047599488 }, { "content": "fn common_test_ring(store: &dyn BlockStore, rng: &mut ThreadRng) {\n\n let ring1a = rng\n\n .sample_iter(distributions::Standard)\n\n .take(200 * 8)\n\n .collect::<Vec<u8>>();\n\n let ring1b = rng.sample_iter(distributions::Standard).take(200).collect::<Vec<u8>>();\n\n let ring2a = rng\n\n .sample_iter(distributions::Standard)\n\n .take(300 * 8)\n\n .collect::<Vec<u8>>();\n\n let ring2b = rng\n\n .sample_iter(distributions::Standard)\n\n .take(300 * 8)\n\n .collect::<Vec<u8>>();\n\n\n\n assert_that!(store.list_ring_ids()).is_ok_containing(vec![]);\n\n assert_that!(store.store_ring(\"ring1\", 0, &ring1a)).is_ok();\n\n assert_that!(store.get_ring(\"ring1\")).is_ok_containing((0u64, ZeroingWords::from(ring1a.as_ref())));\n\n assert_that!(store.list_ring_ids().map(sort_ring_ids)).is_ok_containing(vec![\"ring1.0\".to_string()]);\n\n\n", "file_path": "lib/src/block_store/tests.rs", "rank": 50, "score": 93264.85963274584 }, { "content": "fn common_test_index(store: &dyn BlockStore, rng: &mut ThreadRng) {\n\n let node1 = rng\n\n .sample_iter(distributions::Alphanumeric)\n\n .map(char::from)\n\n .take(40)\n\n .collect::<String>();\n\n let node1_index1 = rng\n\n .sample_iter(distributions::Standard)\n\n .take(200 * 8)\n\n .collect::<Vec<u8>>();\n\n let node1_index2 = rng\n\n .sample_iter(distributions::Standard)\n\n .take(200 * 8)\n\n .collect::<Vec<u8>>();\n\n let node2 = rng\n\n .sample_iter(distributions::Alphanumeric)\n\n .map(char::from)\n\n .take(40)\n\n .collect::<String>();\n\n let node2_index1 = rng\n", "file_path": "lib/src/block_store/tests.rs", "rank": 51, "score": 93264.85963274584 }, { "content": "fn common_padding_tests<T>(data: SecretBytes)\n\nwhere\n\n T: Padding,\n\n{\n\n for pad_align in &[100, 128, 200, 256, 1000, 1024] {\n\n let padded = T::pad_secret_data(&data.borrow(), *pad_align).unwrap();\n\n\n\n assert!(padded.len() % *pad_align == 0);\n\n\n\n let padded_borrow = padded.borrow();\n\n\n\n let unpadded = T::unpad_data(&padded_borrow).unwrap();\n\n\n\n assert_slices_equal(unpadded, &data.borrow());\n\n }\n\n}\n\n\n", "file_path": "lib/src/secrets_store/padding/tests.rs", "rank": 52, "score": 93130.2580509748 }, { "content": "#[test]\n\n#[cfg(feature = \"rust_crypto\")]\n\n#[cfg_attr(debug_assertions, ignore)]\n\nfn test_rust_rsa_aes_gcm() {\n\n common_chiper_tests(&crate::secrets_store::cipher::RUST_RSA_AES_GCM);\n\n}\n", "file_path": "lib/src/secrets_store/cipher/tests.rs", "rank": 53, "score": 93120.36899064248 }, { "content": "#[test]\n\nfn test_rust_x25519_chacha20_poly1305() {\n\n common_chiper_tests(&RUST_X25519CHA_CHA20POLY1305);\n\n}\n\n\n", "file_path": "lib/src/secrets_store/cipher/tests.rs", "rank": 54, "score": 93120.36899064248 }, { "content": "#[test]\n\n#[cfg(feature = \"openssl\")]\n\nfn test_openssl_rsa_aes_gcm() {\n\n common_chiper_tests(&crate::secrets_store::cipher::OPEN_SSL_RSA_AES_GCM);\n\n}\n\n\n", "file_path": "lib/src/secrets_store/cipher/tests.rs", "rank": 55, "score": 93120.36899064248 }, { "content": "#[test]\n\nfn test_non_zero_padding_quick() {\n\n #[allow(clippy::needless_pass_by_value)]\n\n fn check_padding(mut data: Vec<u8>) -> bool {\n\n for b in &mut data[..] {\n\n if *b == 0 {\n\n *b = 255;\n\n }\n\n }\n\n common_padding_tests::<NonZeroPadding>(SecretBytes::from(data));\n\n true\n\n }\n\n\n\n quickcheck(check_padding as fn(Vec<u8>) -> bool);\n\n}\n\n\n", "file_path": "lib/src/secrets_store/padding/tests.rs", "rank": 56, "score": 93120.36899064248 }, { "content": "#[test]\n\nfn test_randon_front_back_padding() {\n\n let mut rng = thread_rng();\n\n\n\n common_padding_tests::<RandomFrontBack>(SecretBytes::random(&mut rng, 127));\n\n common_padding_tests::<RandomFrontBack>(SecretBytes::random(&mut rng, 128));\n\n common_padding_tests::<RandomFrontBack>(SecretBytes::random(&mut rng, 129));\n\n common_padding_tests::<RandomFrontBack>(SecretBytes::random(&mut rng, 137));\n\n common_padding_tests::<RandomFrontBack>(SecretBytes::random(&mut rng, 1234));\n\n common_padding_tests::<RandomFrontBack>(SecretBytes::random(&mut rng, 12345));\n\n common_padding_tests::<RandomFrontBack>(SecretBytes::random(&mut rng, 123_456));\n\n}\n\n\n", "file_path": "lib/src/secrets_store/padding/tests.rs", "rank": 57, "score": 93120.36899064248 }, { "content": "fn fixture_test<T>(\n\n cipher: &T,\n\n seal_key_raw: &[u8],\n\n seal_nonce: &[u8],\n\n maybe_public_key: Option<&[u8]>,\n\n crypted_key: &[u8],\n\n messages: &[&[u8]],\n\n) where\n\n T: Cipher,\n\n{\n\n let seal_key = SecretBytes::from_secured(seal_key_raw);\n\n let private_kex = cipher.open_private_key(&seal_key, seal_nonce, crypted_key).unwrap();\n\n\n\n for message in messages {\n\n let words = Word::allocate_zeroed_vec(message.len());\n\n unsafe {\n\n copy_nonoverlapping(message.as_ptr(), words.as_ptr() as *mut u8, words.len() * 8);\n\n }\n\n let mut message_payload: &[u8] = Word::words_to_bytes(&words);\n\n let message_reader =\n", "file_path": "lib/src/secrets_store/cipher/fixture_tests.rs", "rank": 58, "score": 91924.84394417114 }, { "content": "fn common_test_blocks_commits(store: &dyn BlockStore, rng: &mut ThreadRng) {\n\n assert_that(&store.get_block(\"00000000000\")).is_err_containing(StoreError::InvalidBlock(\"00000000000\".to_string()));\n\n\n\n let block1 = rng\n\n .sample_iter(distributions::Standard)\n\n .take(200 * 8)\n\n .collect::<Vec<u8>>();\n\n let block2 = rng\n\n .sample_iter(distributions::Standard)\n\n .take(200 * 8)\n\n .collect::<Vec<u8>>();\n\n let block3 = rng\n\n .sample_iter(distributions::Standard)\n\n .take(200 * 8)\n\n .collect::<Vec<u8>>();\n\n\n\n let block1_id = store.add_block(&block1).unwrap();\n\n let block2_id = store.add_block(&block2).unwrap();\n\n let block3_id = store.add_block(&block3).unwrap();\n\n\n", "file_path": "lib/src/block_store/tests.rs", "rank": 59, "score": 91851.84656335 }, { "content": "fn generate_id(length: usize) -> String {\n\n let rng = thread_rng();\n\n\n\n rng\n\n .sample_iter(distributions::Alphanumeric)\n\n .map(char::from)\n\n .take(length)\n\n .collect::<String>()\n\n}\n", "file_path": "cli/src/commands/mod.rs", "rank": 60, "score": 91733.68147773149 }, { "content": "#[test]\n\nfn test_randon_front_back_padding_quick() {\n\n #[allow(clippy::needless_pass_by_value)]\n\n fn check_padding(data: Vec<u8>) -> bool {\n\n common_padding_tests::<RandomFrontBack>(SecretBytes::from(data));\n\n true\n\n }\n\n\n\n quickcheck(check_padding as fn(Vec<u8>) -> bool);\n\n}\n", "file_path": "lib/src/secrets_store/padding/tests.rs", "rank": 61, "score": 91345.52764842293 }, { "content": "fn status_text(status: &Status) -> String {\n\n if status.locked {\n\n \" Locked\".to_string()\n\n } else {\n\n match status.autolock_at {\n\n Some(autolock_at) => {\n\n let timeout = autolock_at - Utc::now();\n\n\n\n format!(\" Unlocked {}s\", timeout.num_seconds())\n\n }\n\n None => \" Unlocked\".to_string(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "cli/src/commands/list_secrets.rs", "rank": 62, "score": 87428.95010354569 }, { "content": "fn recv_result<S, E>(reader: &mut MutexGuard<S>) -> Result<CommandResult, E>\n\nwhere\n\n S: Read,\n\n E: From<std::io::Error> + From<rmp_serde::decode::Error>,\n\n{\n\n let len = reader.read_u32::<LittleEndian>()? as usize;\n\n let mut buf = Zeroizing::from(vec![0; len]);\n\n\n\n reader.read_exact(&mut buf)?;\n\n\n\n Ok(rmp_serde::from_read_ref(buf.as_slice())?)\n\n}\n\n\n", "file_path": "lib/src/service/remote.rs", "rank": 63, "score": 85823.9379626034 }, { "content": "fn write_command<S, E>(writer: &mut MutexGuard<S>, command: Command) -> Result<(), E>\n\nwhere\n\n S: Write,\n\n E: From<std::io::Error> + From<rmp_serde::encode::Error>,\n\n{\n\n let mut message = ZeroizeBytesBuffer::with_capacity(1024);\n\n rmp_serde::encode::write_named(&mut message, &command)?;\n\n\n\n writer.write_u32::<LittleEndian>(message.len() as u32)?;\n\n writer.write_all(&message)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "lib/src/service/remote.rs", "rank": 64, "score": 84256.87808912838 }, { "content": "pub fn generate_block_id(data: &[u8]) -> String {\n\n let mut hasher = Sha256::new();\n\n\n\n hasher.update(data);\n\n\n\n HEXLOWER.encode(&hasher.finalize())\n\n}\n", "file_path": "lib/src/block_store/mod.rs", "rank": 65, "score": 84208.20490439222 }, { "content": "pub fn default_store_dir(store_name: &str) -> PathBuf {\n\n let home_dir = dirs::home_dir().unwrap_or_else(|| PathBuf::from(\".\"));\n\n\n\n dirs::document_dir().unwrap_or(home_dir).join(store_name)\n\n}\n\n\n", "file_path": "cli/src/config.rs", "rank": 66, "score": 84184.99814134682 }, { "content": "fn common_chiper_tests<T>(cipher: &T)\n\nwhere\n\n T: Cipher,\n\n{\n\n common_private_seal_open(cipher);\n\n common_data_encrypt_decrypt(cipher);\n\n}\n\n\n", "file_path": "lib/src/secrets_store/cipher/tests.rs", "rank": 67, "score": 83776.6255945783 }, { "content": "fn common_data_encrypt_decrypt<T>(cipher: &T)\n\nwhere\n\n T: Cipher,\n\n{\n\n let mut rng = thread_rng();\n\n let private_data = SecretBytes::random(&mut rng, 1234);\n\n\n\n let id1 = \"recipient1\";\n\n let id2 = \"recipient2\";\n\n let (public_key1, private_key1) = cipher.generate_key_pair().unwrap();\n\n let (public_key2, private_key2) = cipher.generate_key_pair().unwrap();\n\n\n\n let mut message = capnp::message::Builder::new_default();\n\n\n\n let mut block = message.init_root::<block::Builder>();\n\n let headers = block.reborrow().init_headers(1);\n\n\n\n let crypted_data = cipher\n\n .encrypt(&[(id1, public_key1), (id2, public_key2)], &private_data, headers.get(0))\n\n .unwrap();\n", "file_path": "lib/src/secrets_store/cipher/tests.rs", "rank": 68, "score": 82096.23290870282 }, { "content": "fn common_private_seal_open<T>(cipher: &T)\n\nwhere\n\n T: Cipher,\n\n{\n\n let (public_key, private_key) = cipher.generate_key_pair().unwrap();\n\n\n\n assert_that(&public_key.len()).is_greater_than_or_equal_to(30);\n\n\n\n let mut rng = thread_rng();\n\n let nonce = iter::repeat(())\n\n .map(|_| rng.sample(distributions::Standard))\n\n .take(cipher.seal_min_nonce_length())\n\n .collect::<Vec<u8>>();\n\n let seal_key = SecretBytes::random(&mut rng, cipher.seal_key_length());\n\n\n\n let crypted_private = cipher.seal_private_key(&seal_key, &nonce, &private_key).unwrap();\n\n let decrypted_private = cipher.open_private_key(&seal_key, &nonce, &crypted_private).unwrap();\n\n\n\n assert_slices_equal(&decrypted_private.borrow(), &private_key.borrow());\n\n}\n\n\n", "file_path": "lib/src/secrets_store/cipher/tests.rs", "rank": 69, "score": 82096.23290870282 }, { "content": "fn assert_slices_equal(actual: &[u8], expected: &[u8]) {\n\n assert!(actual == expected)\n\n}\n\n\n", "file_path": "lib/src/secrets_store/padding/tests.rs", "rank": 70, "score": 80402.25197541293 }, { "content": "fn assert_slices_equal(actual: &[u8], expected: &[u8]) {\n\n assert!(actual == expected)\n\n}\n\n\n", "file_path": "lib/src/secrets_store/cipher/tests.rs", "rank": 71, "score": 80402.25197541293 }, { "content": "pub fn import_v1(service: Arc<dyn TrustlessService>, store_name: String, maybe_file_name: Option<&str>) {\n\n let secrets_store = service\n\n .open_store(&store_name)\n\n .ok_or_exit(format!(\"Failed opening store {}: \", store_name));\n\n\n\n let status = secrets_store.status().ok_or_exit(\"Get status\");\n\n\n\n let import_stream: Box<dyn BufRead> = match maybe_file_name {\n\n Some(file_name) => {\n\n let file = File::open(file_name).ok_or_exit(format!(\"Failed opening {}\", file_name));\n\n Box::new(BufReader::new(file))\n\n }\n\n None => {\n\n if status.locked {\n\n eprintln!(\"Store is locked! Cannot unlock store when importing from stdin (duh).\");\n\n process::exit(1);\n\n }\n\n Box::new(BufReader::new(stdin()))\n\n }\n\n };\n", "file_path": "cli/src/commands/import.rs", "rank": 72, "score": 78092.09206363859 }, { "content": "pub fn unlock(service: Arc<dyn TrustlessService>, store_name: String) {\n\n let secrets_store = service\n\n .open_store(&store_name)\n\n .ok_or_exit(format!(\"Failed opening store {}: \", store_name));\n\n\n\n let status = secrets_store.status().ok_or_exit(\"Get status\");\n\n\n\n if status.locked {\n\n let mut siv = create_tui();\n\n\n\n unlock_store(&mut siv, &secrets_store, &store_name);\n\n }\n\n}\n\n\n", "file_path": "cli/src/commands/unlock.rs", "rank": 73, "score": 77060.06456008201 }, { "content": "pub fn status(service: Arc<dyn TrustlessService>, store_name: String) {\n\n let secrets_store = service\n\n .open_store(&store_name)\n\n .ok_or_exit(format!(\"Failed opening store {}: \", store_name));\n\n let status = secrets_store.status().ok_or_exit(\"Get status\");\n\n\n\n if atty::is(Stream::Stdout) {\n\n println!();\n\n println!(\"Client version: {}\", style(env!(\"CARGO_PKG_VERSION\")).with(Color::Cyan));\n\n println!(\"Store version : {}\", style(status.version.clone()).with(Color::Cyan));\n\n println!(\n\n \"Status : {}\",\n\n if status.locked {\n\n style(\"Locked\").with(Color::Green)\n\n } else {\n\n style(\"Unlocked\").with(Color::Red)\n\n }\n\n )\n\n } else {\n\n println!(\"Client version: {}\", env!(\"CARGO_PKG_VERSION\"));\n\n println!(\"Store version : {}\", status.version);\n\n }\n\n}\n", "file_path": "cli/src/commands/status.rs", "rank": 74, "score": 77060.06456008201 }, { "content": "fn create_list_view(state: &ListUIState) -> ResizedView<LinearLayout> {\n\n let mut entry_select = SelectView::new();\n\n let mut list = state.secrets_store.list(&state.filter).ok_or_exit(\"List entries\");\n\n let initial_selected = list.entries.first().map(|e| e.entry.id.clone());\n\n entry_select.add_all(list.entries.drain(..).into_iter().map(entry_list_item));\n\n entry_select.set_on_select(update_selection);\n\n LinearLayout::horizontal()\n\n .child(entry_select.with_name(\"entry_list\").scrollable())\n\n .child(\n\n SecretView::new(\n\n state.service.clone(),\n\n state.store_name.clone(),\n\n state.secrets_store.clone(),\n\n initial_selected,\n\n )\n\n .with_name(\"secret_view\"),\n\n )\n\n .full_screen()\n\n}\n", "file_path": "cli/src/commands/list_secrets.rs", "rank": 75, "score": 75857.94731684419 }, { "content": "pub fn init(service: Arc<dyn TrustlessService>, maybe_store_name: Option<String>) {\n\n if !atty::is(Stream::Stdout) {\n\n println!(\"Please use a terminal\");\n\n process::exit(1);\n\n }\n\n\n\n let store_name = maybe_store_name.unwrap_or_else(|| \"t-rust-less-store\".to_string());\n\n let store_configs = match service.list_stores() {\n\n Ok(configs) => configs,\n\n Err(err) => {\n\n exit_with_error(\n\n format!(\"Checking exsting configuration for store {}: \", store_name),\n\n err,\n\n );\n\n unreachable!()\n\n }\n\n };\n\n let maybe_config = store_configs\n\n .iter()\n\n .find(|config| config.name.as_str() == store_name.as_str());\n", "file_path": "cli/src/commands/init.rs", "rank": 76, "score": 73071.89224217241 }, { "content": "fn main() {\n\n let outdir = match env::var_os(\"OUT_DIR\") {\n\n Some(outdir) => outdir,\n\n None => {\n\n eprintln!(\"OUT_DIR environment variable not defined.\");\n\n process::exit(1);\n\n }\n\n };\n\n fs::create_dir_all(&outdir).unwrap();\n\n\n\n let mut app = cli::app();\n\n app.gen_completions(\"t-rust-less\", Shell::Bash, &outdir);\n\n app.gen_completions(\"t-rust-less\", Shell::Fish, &outdir);\n\n app.gen_completions(\"t-rust-less\", Shell::Zsh, &outdir);\n\n app.gen_completions(\"t-rust-less\", Shell::PowerShell, &outdir);\n\n}\n", "file_path": "cli/build.rs", "rank": 77, "score": 71073.83872117789 }, { "content": "fn main() {\n\n let outdir = match env::var_os(\"OUT_DIR\") {\n\n Some(outdir) => outdir,\n\n None => {\n\n eprintln!(\"OUT_DIR environment variable not defined.\");\n\n process::exit(1);\n\n }\n\n };\n\n fs::create_dir_all(&outdir).unwrap();\n\n\n\n let mut app = cli::app();\n\n app.gen_completions(\"t-rust-less-daemon\", Shell::Bash, &outdir);\n\n app.gen_completions(\"t-rust-less-daemon\", Shell::Fish, &outdir);\n\n app.gen_completions(\"t-rust-less-daemon\", Shell::Zsh, &outdir);\n\n app.gen_completions(\"t-rust-less-daemon\", Shell::PowerShell, &outdir);\n\n}\n", "file_path": "daemon/build.rs", "rank": 78, "score": 71073.83872117789 }, { "content": "pub fn generate_words(params: &PasswordGeneratorWordsParam) -> String {\n\n let mut rng = thread_rng();\n\n\n\n WORDLIST\n\n .choose_multiple(&mut rng, params.num_words as usize)\n\n .join(&params.delim.to_string())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use spectral::prelude::*;\n\n\n\n #[test]\n\n fn test_generate_words() {\n\n let pw1 = generate_words(&PasswordGeneratorWordsParam {\n\n num_words: 3,\n\n delim: '.',\n\n });\n\n\n", "file_path": "lib/src/service/pw_generator/words.rs", "rank": 79, "score": 70493.10166534263 }, { "content": "pub fn password_generate_param_from_args(args: &ArgMatches) -> PasswordGeneratorParam {\n\n if args.is_present(\"words\") {\n\n PasswordGeneratorParam::Words(PasswordGeneratorWordsParam {\n\n num_words: args.value_of(\"length\").and_then(|v| v.parse::<u8>().ok()).unwrap_or(4),\n\n delim: args.value_of(\"delim\").and_then(|v| v.chars().next()).unwrap_or('.'),\n\n })\n\n } else {\n\n PasswordGeneratorParam::Chars(PasswordGeneratorCharsParam {\n\n num_chars: args.value_of(\"length\").and_then(|v| v.parse::<u8>().ok()).unwrap_or(16),\n\n include_uppers: !args.is_present(\"exclude-uppers\"),\n\n include_numbers: !args.is_present(\"exclude-numbers\"),\n\n include_symbols: !args.is_present(\"exclude-symbols\"),\n\n require_upper: args.is_present(\"require-upper\"),\n\n require_number: args.is_present(\"require-number\"),\n\n require_symbol: args.is_present(\"require-symbol\"),\n\n exclude_ambiguous: !args.is_present(\"include-ambiguous\"),\n\n exclude_similar: !args.is_present(\"include-similar\"),\n\n })\n\n }\n\n}\n\n\n", "file_path": "cli/src/commands/generate.rs", "rank": 80, "score": 70476.01602871015 }, { "content": "pub fn generate_chars(params: &PasswordGeneratorCharsParam) -> String {\n\n let mut rng = thread_rng();\n\n let mut pool = Vec::with_capacity(params.num_chars as usize);\n\n\n\n if params.require_upper {\n\n pool.push(pick_char_from(&mut rng, UPPERS, params));\n\n }\n\n if params.require_number {\n\n pool.push(pick_char_from(&mut rng, NUMBERS, params));\n\n }\n\n if params.require_symbol {\n\n pool.push(pick_char_from(&mut rng, SYMBOLS, params));\n\n }\n\n let candidates = create_base_set(params);\n\n while pool.len() < params.num_chars as usize {\n\n pool.push(*candidates.choose(&mut rng).unwrap());\n\n }\n\n\n\n pool.shuffle(&mut rng);\n\n\n\n String::from_utf8(pool).unwrap()\n\n}\n\n\n", "file_path": "lib/src/service/pw_generator/chars.rs", "rank": 81, "score": 70444.32743565226 }, { "content": "fn create_base_set(params: &PasswordGeneratorCharsParam) -> Vec<u8> {\n\n let mut candidates = Vec::with_capacity(LOWERS.len() + UPPERS.len() + NUMBERS.len() + SYMBOLS.len());\n\n\n\n filter_set(&mut candidates, LOWERS, params);\n\n if params.include_uppers {\n\n filter_set(&mut candidates, UPPERS, params);\n\n }\n\n if params.include_numbers {\n\n filter_set(&mut candidates, NUMBERS, params);\n\n }\n\n if params.include_symbols {\n\n filter_set(&mut candidates, SYMBOLS, params);\n\n }\n\n\n\n candidates\n\n}\n\n\n", "file_path": "lib/src/service/pw_generator/chars.rs", "rank": 82, "score": 69707.99988965309 }, { "content": "fn main() {\n\n let matches = cli::app().get_matches();\n\n\n\n let mut log_builder = env_logger::Builder::from_default_env();\n\n\n\n if matches.is_present(\"debug\") {\n\n log_builder.filter(None, log::LevelFilter::Debug);\n\n } else {\n\n log_builder.filter(None, log::LevelFilter::Error);\n\n }\n\n log_builder.target(env_logger::Target::Stderr);\n\n log_builder.init();\n\n\n\n let service = create_service().ok_or_exit(\"Failed creating service\");\n\n let maybe_store_name = matches\n\n .value_of(\"store\")\n\n .map(str::to_string)\n\n .or_else(|| service.get_default_store().ok_or_exit(\"Get default store\"));\n\n\n\n if matches.subcommand_matches(\"init\").is_some() {\n", "file_path": "cli/src/main.rs", "rank": 83, "score": 69573.55724966736 }, { "content": "fn uninitialized() {\n\n if atty::is(Stream::Stdout) {\n\n println!();\n\n println!(\"{}\", style(\"No default store found\").with(Color::Red));\n\n println!();\n\n println!(\n\n \"t-rust-less was unable to find a default store in configuration at '{}'.\",\n\n config_file().to_string_lossy()\n\n );\n\n println!(\"Probably t-rust-less has not been initialized yet. You may fix this problem with 't-rust-less init'\");\n\n println!();\n\n } else {\n\n error!(\n\n \"Missing default store in configuration: {}\",\n\n config_file().to_string_lossy()\n\n );\n\n }\n\n process::exit(1)\n\n}\n\n\n", "file_path": "cli/src/main.rs", "rank": 84, "score": 69573.55724966736 }, { "content": "fn main() {\n\n env_logger::Builder::from_default_env()\n\n .filter(None, log::LevelFilter::Debug)\n\n .target(env_logger::Target::Stderr)\n\n .init();\n\n\n\n let service = match create_service() {\n\n Ok(service) => service,\n\n Err(error) => {\n\n error!(\"Failed creating service: {}\", error);\n\n process::exit(1);\n\n }\n\n };\n\n\n\n let mut processor = match processor::Processor::new(service, stdin(), stdout()) {\n\n Ok(processor) => processor,\n\n Err(error) => {\n\n error!(\"Failed creating processor: {}\", error);\n\n process::exit(1);\n\n }\n\n };\n\n\n\n if let Err(error) = processor.process() {\n\n error!(\"Error: {}\", error);\n\n process::exit(1);\n\n }\n\n}\n", "file_path": "native/src/main.rs", "rank": 85, "score": 69573.55724966736 }, { "content": "fn main() {\n\n env_logger::Builder::from_default_env()\n\n .filter(None, log::LevelFilter::Debug)\n\n .target(env_logger::Target::Stderr)\n\n .init();\n\n\n\n //fixtures::generate_fixtures();\n\n clipboard::experimental_clipboard();\n\n}\n", "file_path": "experiments/src/main.rs", "rank": 86, "score": 69573.55724966736 }, { "content": "pub fn open_block_store(url: &str, node_id: &str) -> StoreResult<Arc<dyn BlockStore>> {\n\n let store_url = Url::parse(url)?;\n\n\n\n match store_url.scheme() {\n\n \"file\" => Ok(Arc::new(local_dir::LocalDirBlockStore::new(\n\n store_url.to_file_path().unwrap(),\n\n node_id,\n\n )?)),\n\n \"memory\" => Ok(Arc::new(memory::MemoryBlockStore::new(node_id))),\n\n #[cfg(feature = \"sled\")]\n\n \"sled\" => Ok(Arc::new(sled::SledBlockStore::new(\n\n store_url.to_file_path().unwrap(),\n\n node_id,\n\n )?)),\n\n #[cfg(feature = \"dropbox\")]\n\n \"dropbox\" => Ok(Arc::new(dropbox::DropboxBlockStore::new(\n\n store_url.username(),\n\n store_url.host_str().unwrap(),\n\n node_id,\n\n )?)),\n\n _ => Err(StoreError::InvalidStoreUrl(url.to_string())),\n\n }\n\n}\n\n\n", "file_path": "lib/src/block_store/mod.rs", "rank": 87, "score": 66983.58871415048 }, { "content": "fn unlock_dialog(\n\n siv: &mut CursiveRunnable,\n\n secrets_store: &Arc<dyn SecretsStore>,\n\n name: &str,\n\n identities: Vec<Identity>,\n\n) {\n\n siv.set_user_data(secrets_store.clone());\n\n siv.add_global_callback(Key::Esc, Cursive::quit);\n\n siv.add_layer(\n\n Dialog::around(\n\n LinearLayout::vertical()\n\n .child(TextView::new(\"Identity\"))\n\n .child(\n\n SelectView::new()\n\n .with_all(\n\n identities\n\n .into_iter()\n\n .map(|i| (format!(\"{} <{}>\", i.name, i.email), i.id.clone())),\n\n )\n\n .with_name(\"identity\")\n", "file_path": "cli/src/commands/unlock.rs", "rank": 88, "score": 66867.32874534692 }, { "content": "#[test]\n\nfn test_totp_long() {\n\n let totp_url = \"otpauth://totp/[email protected]?secret=LPD4D5FLWUBYFEB66SKYQGJBDS5HWYNT&period=60&digits=8\";\n\n let otpauth = OTPAuthUrl::parse(totp_url).unwrap();\n\n\n\n assert_that(&otpauth.algorithm).is_equal_to(OTPAlgorithm::SHA1);\n\n assert_that(&otpauth.digits).is_equal_to(8);\n\n assert_that(&otpauth.issuer).is_none();\n\n assert_that(&otpauth.account_name).is_equal_to(\"[email protected]\".to_string());\n\n\n\n assert_that(&otpauth.generate(1_556_733_830)).is_equal_to((\"03744419\".to_string(), 1_556_733_840));\n\n assert_that(&otpauth.generate(1_556_733_904)).is_equal_to((\"84237990\".to_string(), 1_556_733_960));\n\n\n\n assert_that(&otpauth.to_url()).is_equal_to(\n\n \"otpauth://totp/someone%40somewhere.com?secret=LPD4D5FLWUBYFEB66SKYQGJBDS5HWYNT&period=60&digits=8\".to_string(),\n\n );\n\n}\n", "file_path": "lib/src/otp/tests.rs", "rank": 89, "score": 65642.74304107296 }, { "content": "#[test]\n\nfn test_totp_std() {\n\n let totp_url = \"otpauth://totp/Example:[email protected]?secret=JBSWY3DPEHPK3PXP&issuer=Example\";\n\n let otpauth = OTPAuthUrl::parse(totp_url).unwrap();\n\n\n\n assert_that(&otpauth.algorithm).is_equal_to(OTPAlgorithm::SHA1);\n\n assert_that(&otpauth.digits).is_equal_to(6);\n\n assert_that(&otpauth.issuer).is_equal_to(Some(\"Example\".to_string()));\n\n assert_that(&otpauth.account_name).is_equal_to(\"[email protected]\".to_string());\n\n\n\n assert_that(&otpauth.generate(1_556_733_311)).is_equal_to((\"184557\".to_string(), 1_556_733_330));\n\n assert_that(&otpauth.generate(1_556_733_406)).is_equal_to((\"757120\".to_string(), 1_556_733_420));\n\n\n\n assert_that(&otpauth.to_url())\n\n .is_equal_to(\"otpauth://totp/Example:someone%40somewhere.com?secret=JBSWY3DPEHPK3PXP&issuer=Example\".to_string());\n\n}\n\n\n", "file_path": "lib/src/otp/tests.rs", "rank": 90, "score": 65642.74304107296 }, { "content": "#[test]\n\nfn test_memory_store() {\n\n let store = open_block_store(\"memory://\", \"node1\").unwrap();\n\n\n\n common_store_tests(store);\n\n}\n\n\n", "file_path": "lib/src/block_store/tests.rs", "rank": 91, "score": 64493.32403171076 }, { "content": "#[cfg(feature = \"sled\")]\n\n#[test]\n\nfn test_sled_store() {\n\n let tempdir = Builder::new().prefix(\"t-rust-less-test\").tempdir().unwrap();\n\n #[cfg(not(windows))]\n\n let url = format!(\"sled://{}\", tempdir.path().to_string_lossy());\n\n #[cfg(windows)]\n\n let url = format!(\"sled:///{}\", tempdir.path().to_string_lossy().replace('\\\\', \"/\"));\n\n\n\n let store = open_block_store(url.as_str(), \"node1\").unwrap();\n\n\n\n common_store_tests(store);\n\n}\n", "file_path": "lib/src/block_store/tests.rs", "rank": 92, "score": 64493.32403171076 }, { "content": "pub fn experimental_clipboard() {\n\n let clipboard = Arc::new(Clipboard::new(DummyProvider { counter: 0 }, Arc::new(TestEventHub)).unwrap());\n\n\n\n thread::spawn({\n\n let cloned = clipboard.clone();\n\n move || {\n\n thread::sleep(Duration::from_secs(30));\n\n info!(\"Destroy clipboard\");\n\n cloned.destroy();\n\n }\n\n });\n\n\n\n loop {\n\n let mut buffer = String::new();\n\n std::io::stdin().read_line(&mut buffer).unwrap();\n\n\n\n clipboard.provide_next();\n\n\n\n if buffer.trim() == \"c\" {\n\n break;\n\n }\n\n }\n\n clipboard.destroy();\n\n // clipboard.wait().unwrap();\n\n}\n", "file_path": "experiments/src/clipboard.rs", "rank": 93, "score": 63794.320532127735 }, { "content": "pub fn generate_fixtures() {\n\n let cipher = &RUST_RSA_AES_GCM;\n\n\n\n let mut rng = thread_rng();\n\n let seal_nonce = iter::repeat(())\n\n .map(|_| rng.sample(distributions::Standard))\n\n .take(cipher.seal_min_nonce_length())\n\n .collect::<Vec<u8>>();\n\n let seal_key = SecretBytes::random(&mut rng, cipher.seal_key_length());\n\n\n\n let (public_key, private_key) = cipher.generate_key_pair().unwrap();\n\n\n\n let crypted_private = cipher.seal_private_key(&seal_key, &seal_nonce, &private_key).unwrap();\n\n\n\n println!(\"const SEAL_NONCE : &[u8] = &hex!(\\\"{}\\\");\", hex::encode(seal_nonce));\n\n println!(\n\n \"const SEAL_KEY : &[u8] = &hex!(\\\"{}\\\");\",\n\n hex::encode(seal_key.borrow().as_bytes())\n\n );\n\n println!(\"const PUBLIC_KEY : &[u8] = &hex!(\\\"{}\\n\\\");\", long_hex(&public_key));\n", "file_path": "experiments/src/fixtures.rs", "rank": 94, "score": 63794.320532127735 }, { "content": "#[test]\n\nfn test_local_dir_store() {\n\n let tempdir = Builder::new().prefix(\"t-rust-less-test\").tempdir().unwrap();\n\n #[cfg(not(windows))]\n\n let url = format!(\"file://{}\", tempdir.path().to_string_lossy());\n\n #[cfg(windows)]\n\n let url = format!(\"file:///{}\", tempdir.path().to_string_lossy().replace('\\\\', \"/\"));\n\n\n\n let store = open_block_store(&url, \"node1\").unwrap();\n\n\n\n common_store_tests(store);\n\n}\n\n\n", "file_path": "lib/src/block_store/tests.rs", "rank": 95, "score": 63412.35704776 }, { "content": "fn test_block_sync(\n\n rng: &mut ThreadRng,\n\n local_store: Arc<dyn BlockStore>,\n\n remote_store: Arc<dyn BlockStore>,\n\n sync_store: Arc<SyncBlockStore>,\n\n) {\n\n let block1 = rng\n\n .sample_iter(distributions::Standard)\n\n .take(200 * 8)\n\n .collect::<Vec<u8>>();\n\n let block2 = rng\n\n .sample_iter(distributions::Standard)\n\n .take(200 * 8)\n\n .collect::<Vec<u8>>();\n\n let block3 = rng\n\n .sample_iter(distributions::Standard)\n\n .take(200 * 8)\n\n .collect::<Vec<u8>>();\n\n\n\n let block1_id = local_store.add_block(&block1).unwrap();\n", "file_path": "lib/src/block_store/sync/synchronize_tests.rs", "rank": 96, "score": 62393.90405920039 }, { "content": "fn test_ring_sync(\n\n rng: &mut ThreadRng,\n\n local_store: Arc<dyn BlockStore>,\n\n remote_store: Arc<dyn BlockStore>,\n\n sync_store: Arc<SyncBlockStore>,\n\n) {\n\n let ring1a = rng\n\n .sample_iter(distributions::Standard)\n\n .take(200 * 8)\n\n .collect::<Vec<u8>>();\n\n let ring1b = rng.sample_iter(distributions::Standard).take(200).collect::<Vec<u8>>();\n\n let ring2a = rng\n\n .sample_iter(distributions::Standard)\n\n .take(300 * 8)\n\n .collect::<Vec<u8>>();\n\n let ring2b = rng\n\n .sample_iter(distributions::Standard)\n\n .take(300 * 8)\n\n .collect::<Vec<u8>>();\n\n\n", "file_path": "lib/src/block_store/sync/synchronize_tests.rs", "rank": 97, "score": 62393.90405920039 }, { "content": "#[test]\n\nfn test_sync_memory() {\n\n let mut rng = thread_rng();\n\n let local_store = open_block_store(\"memory://\", \"local\").unwrap();\n\n let remote_store = open_block_store(\"memory://\", \"remote\").unwrap();\n\n let sync_store = Arc::new(SyncBlockStore::new(local_store.clone(), remote_store.clone()));\n\n\n\n test_ring_sync(&mut rng, local_store.clone(), remote_store.clone(), sync_store.clone());\n\n test_block_sync(&mut rng, local_store, remote_store, sync_store);\n\n}\n", "file_path": "lib/src/block_store/sync/synchronize_tests.rs", "rank": 98, "score": 62393.90405920039 }, { "content": "pub fn default_autolock_timeout() -> Duration {\n\n Duration::from_secs(300)\n\n}\n", "file_path": "cli/src/config.rs", "rank": 99, "score": 58951.41320468322 } ]